diff --git a/epde/__init__.py b/epde/__init__.py index 2179f5c..3141f65 100644 --- a/epde/__init__.py +++ b/epde/__init__.py @@ -1,4 +1,4 @@ -from .interface.interface import EpdeSearch +from .interface.interface import EpdeSearch, EpdeMultisample from .interface.logger import Logger from .interface.equation_translator import translate_equation diff --git a/epde/cache/cache.py b/epde/cache/cache.py index fa9afd9..4bf4189 100644 --- a/epde/cache/cache.py +++ b/epde/cache/cache.py @@ -15,20 +15,6 @@ ''' -""" -This is an example of Google style. - -Args: - param1: This is the first param. - param2: This is a second param. - -Returns: - This is a description of what is returned. - -Raises: - KeyError: Raises an exception. -""" - # !/usr/bin/env python3 # -*- coding: utf-8 -*- @@ -37,13 +23,19 @@ import psutil from functools import partial -from typing import Union, Callable +from typing import Union, Callable, List +import torch from copy import deepcopy -from collections import OrderedDict, Iterable +from collections import OrderedDict +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable -def upload_simple_tokens(labels, cache, tensors, grid_setting=False): +def upload_simple_tokens(labels, cache, tensors, deriv_codes: List = None, + grid_setting=False): """ Uploads the basic factor into the cache with its value in ndimensional numpy.array @@ -56,12 +48,18 @@ def upload_simple_tokens(labels, cache, tensors, grid_setting=False): Returns: None """ + if deriv_codes is not None and len(deriv_codes) != len(labels): + print(deriv_codes, labels) + raise ValueError('Incorrect number of deriv codes passed, expected ') + for idx, label in enumerate(labels): if grid_setting: label_completed = label + deriv_code = None else: label_completed = (label, (1.0,)) - cache.add(label_completed, tensors[idx]) + deriv_code = None if deriv_codes is None else deriv_codes[idx] + cache.add(label_completed, tensors[idx], deriv_code = deriv_code) cache.add_base_matrix(label_completed) @@ -115,6 +113,15 @@ def prepare_var_tensor(var_tensor, derivs_tensor, time_axis): source=time_axis, destination=0) return result +def switch_format(inp: Union[torch.Tensor, np.ndarray], device = 'cpu'): + if isinstance(inp, np.ndarray): + return torch.from_numpy(inp).to(device) # TODO: add device selection + elif isinstance(inp, torch.Tensor): + if device == 'cpu': + return inp.detach().numpy() + else: + return inp.detach().cpu().numpy() + def download_variable(var_filename, deriv_filename, time_axis): """ @@ -135,19 +142,26 @@ class Cache(object): """Class for keeping values of terms/factors of equations. Args: - max_allowed_tensors (`int`): limitation on the number of allowed tensors to load into rhe cache. + max_allowed_tensors (`int`): limitation on the number of allowed tensors to load into the cache. memory_default (`dict`): key - name of tensor (tuple - (name_of_term, params)), value - derivative. Objects without changes after evolutional step memory_normalized (`dict`): key - name of tensor (tuple - (name_of_term, params)), value - derivative. Objects with normalize - memory_structural (`dict`): key - name of tensor (tuple - (name_of_term, params)), value - derivative. NOT USED ДОПИСАТЬ ПРОСМОТРЯ КОД + memory_structural (`dict`): key - name of tensor (tuple - (name_of_term, params)), value - derivative. NOT USED ДОПИСАТЬ ПРОСМОТРЕВ КОД """ - def __init__(self): + def __init__(self, device = 'cpu'): + self._device = device self.max_allowed_tensors = None - self.memory_default = dict() - self.memory_normalized = dict() - self.memory_structural = dict() + + self.memory_default = {'torch' : dict(), 'numpy' : dict()} # TODO: add separate cache for torch tensors and numpy + self.memory_normalized = {'torch' : dict(), 'numpy' : dict()} + self.memory_structural = {'torch' : dict(), 'numpy' : dict()} + self.memory_anns = dict() + self.mem_prop_set = False self.base_tensors = [] # storage of non-normalized tensors, that will not be affected by change of variables self.structural_and_base_merged = dict() + self._deriv_codes = [] # Elements of this list must be tuples with the first element - + # deriv code ([var, term]) like ([1], [0]) for dy/dt in LV, and the second - cache label in + # standard form ('dy/dx1', (1.0,)) def attrs_from_dict(self, attributes, except_attrs: dict = {}): except_attrs['obj_type'] = None @@ -158,14 +172,13 @@ def attrs_from_dict(self, attributes, except_attrs: dict = {}): self.__dict__[key] = elem def use_structural(self, use_base_data=True, label=None, replacing_data=None): - # print(f'Setting structural data for {label}, for it: {use_base_data} - use_base_data') assert use_base_data or replacing_data is not None, 'Structural data must be declared with base data or by additional tensors.' + # print('Called `use_structural`, expect caches to alter') if label is None: - # self.structural_used = True if use_base_data: - self.memory_structural = {key: val for key, val in self.memory_default.items()} + self.memory_structural['numpy'] = {key: val for key, val in self.memory_default['numpy'].items()} try: - for key in self.memory_structural.keys(): + for key in self.memory_structural['numpy'].keys(): self.structural_and_base_merged[key] = True except AttributeError as e: print(f"Error in class Cache {e}") @@ -174,48 +187,55 @@ def use_structural(self, use_base_data=True, label=None, replacing_data=None): raise TypeError('Replacing data shall be set with dict of format: tuple - memory key: np.ndarray ') if np.any([type(entry) != np.ndarray for entry in replacing_data.values()]): raise TypeError('Replacing data shall be set with dict of format: tuple - memory key: np.ndarray ') - if replacing_data.keys() != self.memory_default.keys(): - print(replacing_data.keys(), self.memory_default.keys()) + if replacing_data.keys() != self.memory_default['numpy'].keys(): + print(replacing_data.keys(), self.memory_default['numpy'].keys()) raise ValueError('Labels for the new structural data do not with the baseline data ones.') - if np.any([entry.shape != self.memory_default[label].shape for label, entry in replacing_data.items()]): - print([(entry.shape, self.memory_default[label].shape) for label, entry in replacing_data.items()]) + if np.any([entry.shape != self.memory_default['numpy'][label].shape for label, entry in replacing_data.items()]): + print([(entry.shape, self.memory_default['numpy'][label].shape) for label, entry in replacing_data.items()]) raise ValueError('Shapes of tensors in new structural data do not match their counterparts in the base data') - for key in self.memory_default.keys(): + for key in self.memory_default['numpy'].keys(): self.structural_and_base_merged[label] = False self.memory_structural = replacing_data - elif type(label) == tuple: + elif isinstance(label, tuple): if use_base_data: + # print(self.memory_default['numpy'].keys()) + replacing_data = self.memory_default['numpy'][label] self.structural_and_base_merged[label] = True - if label not in self.memory_default.keys(): + if label not in self.memory_default['numpy'].keys(): self.add(label=label, tensor=replacing_data) else: - # print(self.structural_and_base_merged) + if replacing_data is None: + raise ValueError('Got no replacing data, when expected!') self.structural_and_base_merged[label] = False if type(replacing_data) != np.ndarray: raise TypeError('Replacing data with provided label shall be set with np.ndarray ') - if label in self.memory_default.keys(): - if replacing_data.shape != self.memory_default[label].shape: + if label in self.memory_default['numpy'].keys(): + if replacing_data.shape != self.memory_default['numpy'][label].shape: raise ValueError('Shapes of tensors in new structural data do not match their counterparts in the base data') - self.memory_structural[label] = replacing_data + self.memory_structural['numpy'][label] = replacing_data @property def g_func(self): # , g_func: Union[Callable, type(None)] = None - assert '0' in self.memory_default.keys() # Check if we are working with the grid cache - return self._g_func(self.get_all()[1]) + try: + assert '0' in self.memory_default['numpy'].keys() # Check if we are working with the grid cache + return self._g_func(self.get_all()[1]) + except TypeError: + assert isinstance(self._g_func, (np.ndarray, list)) + return self._g_func @g_func.setter - def g_func(self, function: Callable): + def g_func(self, function: Union[Callable, np.ndarray, list]): self._g_func = function def add_base_matrix(self, label): - assert label in self.memory_default.keys() + assert label in self.memory_default['numpy'].keys() self.base_tensors.append(label) def set_boundaries(self, boundary_width: Union[int, list, tuple]): """ Setting the number of unaccounted elements at the edges """ - assert '0' in self.memory_default.keys(), 'Boundaries should be specified for grid cache.' + assert '0' in self.memory_default['numpy'].keys(), 'Boundaries should be specified for grid cache.' shape = self.get('0')[1].shape if isinstance(boundary_width, int): if any([elem <= 2*boundary_width for elem in shape]): @@ -241,7 +261,7 @@ def memory_usage_properties(self, obj_test_case=None, mem_for_cache_frac=None, m None """ assert not (mem_for_cache_frac is None and mem_for_cache_abs is None), 'Avalable memory space not defined' - assert obj_test_case is not None or len(self.memory_default) > 0, 'Method needs sample of stored matrix to evaluate memory allocation' + assert obj_test_case is not None or len(self.memory_default['numpy']) > 0, 'Method needs sample of stored matrix to evaluate memory allocation' if mem_for_cache_abs is None: self.available_mem = mem_for_cache_frac / 100. * psutil.virtual_memory().total # Allocated memory for tensor storage, bytes else: @@ -249,94 +269,88 @@ def memory_usage_properties(self, obj_test_case=None, mem_for_cache_frac=None, m assert self.available_mem < psutil.virtual_memory().available - if len(self.memory_default) == 0: + if len(self.memory_default['numpy']) == 0: assert obj_test_case is not None self.max_allowed_tensors = int(np.floor(self.available_mem/obj_test_case.nbytes)/2) else: - self.max_allowed_tensors = int(np.floor(self.available_mem/self.memory_default[np.random.choice(list(self.memory_default.keys()))].nbytes)) + key = np.random.choice(list(self.memory_default['numpy'].keys())) + self.max_allowed_tensors = int(np.floor(self.available_mem/ + self.memory_default['numpy'][key].nbytes)) eps = 1e-7 if np.abs(self.available_mem) < eps: print('The memory can not containg any tensor even if it is entirely free (This message can not appear)') def clear(self, full=False): + self._deriv_codes = [] + print('Clearing cache') if full: del self.memory_default, self.memory_normalized, self.memory_structural, self.base_tensors - self.memory_default = dict() - self.memory_normalized = dict() - self.memory_structural = dict() + self.memory_default = {'torch' : dict(), 'numpy' : dict()} + self.memory_normalized = {'torch' : dict(), 'numpy' : dict()} + self.memory_structural = {'torch' : dict(), 'numpy' : dict()} self.base_tensors = [] else: - memory_new = dict() - memory_new_norm = dict() - memory_new_structural = dict() + new_memory_default = {'torch' : dict(), 'numpy' : dict()} + new_memory_normalized = {'torch' : dict(), 'numpy' : dict()} + new_memory_structural = {'torch' : dict(), 'numpy' : dict()} for key in self.base_tensors: - memory_new[key] = self.memory_default[key] - memory_new_norm[key] = self.memory_normalized[key] - memory_new_structural[key] = self.memory_structural[key] + new_memory_default['torch'][key] = self.get(key, False, False, None, True) + new_memory_default['numpy'][key] = self.get(key, False, False, None, False) - del self.memory_default, self.memory_normalized, self.memory_structural - self.memory_default = memory_new - self.memory_normalized = memory_new_norm - self.memory_structural = memory_new_structural + new_memory_normalized['torch'][key] = self.get(key, True, False, None, True) + new_memory_normalized['numpy'][key] = self.get(key, True, False, None, False) - def change_variables(self, increment, increment_structral=None): - ''' - Additional regression in the search process, run on the structural data, is required to set the - increment_structral tensor. ToDo! - ''' - raise DeprecationWarning('No need to change variables in current version of EPDE.') - assert not (increment_structral is None and not all(self.structural_and_base_merged)), 'Not all structural data taken from the default, and the increment for structural was not sent' - - random_key = list(self.memory_default.keys())[0] - increment = np.reshape(increment, newshape=self.memory_default[random_key].shape) - del self.memory_normalized - self.memory_default = {key: self.memory_default[key] for key in self.base_tensors} # deepcopy(self.base_tensors) - self.memory_structural = {key: self.memory_structural[key] for key in self.base_tensors} # deepcopy(self.base_tensors_structural) - self.memory_normalized = dict() - for key in self.memory_default.keys(): - assert np.all(self.memory_default[key].shape == increment.shape) - self.memory_default[key] = self.memory_default[key] - increment - if not self.structural_and_base_merged[key]: - self.memory_structural[key] = self.memory_structural[key] - increment_structral + new_memory_structural['torch'][key] = self.get(key, False, True, None, True) + new_memory_structural['numpy'][key] = self.get(key, False, True, None, False) + + del self.memory_default, self.memory_normalized, self.memory_structural + self.memory_default = new_memory_default + self.memory_normalized = new_memory_normalized + self.memory_structural = new_memory_structural def add(self, label, tensor, normalized: bool = False, structural: bool = False, - indication: bool = False): + deriv_code = None, indication: bool = False): ''' Method for addition of a new tensor into the cache. Returns True if there was enough memory and the tensor was save, and False otherwise. ''' + # print(deriv_code) + if deriv_code is not None: + self._deriv_codes.append((deriv_code, label)) assert not (normalized and structural), 'The added matrix can not be simultaneously normalized and structural. Possibly, bug in token/term saving' + type_key = 'torch' if isinstance(tensor, torch.Tensor) else 'numpy' if normalized: if self.max_allowed_tensors is None: self.memory_usage_properties(obj_test_case=tensor, mem_for_cache_frac=5) - if (len(self.memory_normalized) + len(self.memory_default) + len(self.memory_structural) < self.max_allowed_tensors and - label not in self.memory_normalized.keys()): - self.memory_normalized[label] = tensor + if ((len(self.memory_normalized[type_key]) + len(self.memory_default[type_key]) + + len(self.memory_structural[type_key])) < self.max_allowed_tensors and + label not in self.memory_normalized[type_key].keys()): + self.memory_normalized[type_key][label] = tensor if indication: print('Enough space for saved normalized term ', label, tensor.nbytes) return True - elif label in self.memory_normalized.keys(): - assert np.all(np.isclose(self.memory_normalized[label], tensor)) + elif label in self.memory_normalized[type_key].keys(): if indication: print('The term already present in normalized cache, no addition required', label, tensor.nbytes) return True else: if indication: - print('Not enough space for term ', label, tensor.nbytes, 'Can save only', self.max_allowed_tensors, 'tensors. While already uploaded ', len(self.memory_normalized) + len(self.memory_default) + len(self.memory_structural)) + print('Not enough space for term ', label, tensor.nbytes, 'Can save only', self.max_allowed_tensors, + 'tensors. While already uploaded ', len(self.memory_normalized) + len(self.memory_default) + len(self.memory_structural)) return False elif structural: raise NotImplementedError('The structural data must be added with cache.use_structural method') else: if self.max_allowed_tensors is None: self.memory_usage_properties(obj_test_case=tensor, mem_for_cache_frac=5) - if (len(self.memory_normalized) + len(self.memory_default) + len(self.memory_structural) < self.max_allowed_tensors and - label not in self.memory_default.keys()): - self.memory_default[label] = tensor + if ((len(self.memory_normalized[type_key]) + len(self.memory_default[type_key]) + + len(self.memory_structural[type_key])) < self.max_allowed_tensors and + label not in self.memory_default[type_key].keys()): + self.memory_default[type_key][label] = tensor if indication: print('Enough space for saved unnormalized term ', label, tensor.nbytes) return True - elif label in self.memory_default.keys(): - assert np.all(np.isclose(self.memory_default[label], tensor)) + elif label in self.memory_default[type_key].keys(): if indication: print('The term already present in unnormalized cache, no addition required', label, tensor.nbytes) return True @@ -346,6 +360,7 @@ def add(self, label, tensor, normalized: bool = False, structural: bool = False, return False def delete_entry(self, entry_label): + print(f'Deleting {entry_label} from cache!') if entry_label not in self.memory_default.keys(): raise ValueError('deleted element already not in memory') del self.memory_default[entry_label] @@ -358,47 +373,49 @@ def delete_entry(self, entry_label): except KeyError: pass - def get(self, label, normalized=False, structural=False, saved_as=None): + def get(self, label, normalized=False, structural=False, saved_as=None, torch_mode: bool = False, deriv_code = None): assert not (normalized and structural), 'The added matrix can not be simultaneously normalized and scaled' -# assert not scaled or self.scale_used, 'Trying to add scaled data, while the cache it not allowed to get it' + type_key, other, other_bool = ('torch', 'numpy', False) if torch_mode else ('numpy', 'torch', True) + if deriv_code is not None: + label = [elem[1] for elem in self._deriv_codes if elem[0] == deriv_code][0] + if label is None: - print(self.memory_default.keys()) - return np.random.choice(list(self.memory_default.values())) + # print(self.memory_default[type_key].keys()) + return np.random.choice(list(self.memory_default[type_key].values())) if normalized: - try: - return self.memory_normalized[label] - except KeyError: - print('memory keys: ', self.memory_normalized.keys()) - print('fetched label:', label, ' prev. known as ', saved_as) - raise KeyError('Can not fetch tensor from cache with normalied data') + if label not in self.memory_normalized[type_key] and label in self.memory_normalized[other]: + self.memory_normalized[type_key][label] = switch_format(self.get(label, normalized, + structural, saved_as, other_bool), + device = self._device) + return self.memory_normalized[type_key][label] elif structural: - try: - return self.memory_default[label] if self.structural_and_base_merged[label] else self.memory_structural[label] - except KeyError: - print('structural', structural) - print('self.structural_and_base_merged', self.structural_and_base_merged.keys()) - print('self.memory_default', self.memory_default.keys()) - if self.structural_and_base_merged[label]: - print('memory keys (structural data taken from the default): ', self.memory_default.keys()) - else: - print('memory keys: ', self.memory_structural.keys()) - print('fetched label:', label, ' prev. known as ', saved_as) - raise KeyError('Can not fetch tensor from cache with normalied data') + if self.structural_and_base_merged[label]: + return self.get(label, normalized, False, saved_as, torch_mode) + else: + if label not in self.memory_structural[type_key] and label in self.memory_structural[other]: + self.memory_structural[type_key][label] = switch_format(self.get(label, normalized, + structural, saved_as, other_bool), + device = self._device) + # print('keys in mem_struct:', type_key, self.memory_structural[type_key].keys()) + return self.memory_structural[type_key][label] else: - try: - return self.memory_default[label] - except KeyError: - print('memory keys: ', self.memory_default.keys()) - print('fetched label:', label, ' prev. known as ', saved_as) - raise KeyError('Can not fetch tensor from cache with non-normalied data') - - def get_all(self, normalized=False, structural=False): + if label not in self.memory_default[type_key] and label in self.memory_default[other]: + self.memory_default[type_key][label] = switch_format(self.get(label, normalized, + structural, saved_as, other_bool), + device = self._device) + return self.memory_default[type_key][label] + + def get_all(self, normalized=False, structural=False, mode: str = 'numpy'): + other = 'torch' if mode == 'numpy' else 'numpy' if normalized: - processed_mem = self.memory_normalized + processed_mem = self.memory_normalized[mode] + other_mem = self.memory_normalized[other] elif structural: - processed_mem = self.memory_structural + processed_mem = self.memory_structural[mode] + other_mem = self.memory_structural[other] else: - processed_mem = self.memory_default + processed_mem = self.memory_default[mode] + other_mem = self.memory_default[other] keys = [] tensors = [] @@ -406,6 +423,11 @@ def get_all(self, normalized=False, structural=False): keys.append(key) tensors.append(value) + for key, value in other_mem.items(): + if key not in processed_mem.keys(): + keys.append(key) + tensors.append(switch_format(value, device = self._device)) + return keys, tensors def __contains__(self, obj): @@ -416,55 +438,67 @@ def __contains__(self, obj): (T if norm, else F) and np.ndarray is np.ndarray of tensor values. Does not support scaled vals ''' if (type(obj) == tuple or type(obj) == list) and type(obj[0]) == str: - return obj in self.memory_default.keys() + return (obj in self.memory_default['numpy'].keys()) or (obj in self.memory_default['torch'].keys()) elif (type(obj) == tuple or type(obj) == list) and type(obj[0]) == tuple and type(obj[1]) == bool: if obj[1]: - return obj[0] in self.memory_normalized.keys() + return (obj[0] in self.memory_normalized['numpy'].keys()) or (obj[0] in self.memory_normalized['torch'].keys()) else: - return obj[0] in self.memory_default.keys() + return (obj[0] in self.memory_default['numpy'].keys()) or (obj[0] in self.memory_default['torch'].keys()) elif type(obj) == np.ndarray: try: - return np.any([np.all(obj == entry_values) for entry_values in self.memory_default.values()]) + return np.any([np.all(obj == entry_values) for entry_values in self.memory_default['numpy'].values()]) except: return False + elif type(obj) == torch.Tensor: + try: + return np.any([np.all(obj == entry_values) for entry_values in self.memory_default['torch'].values()]) + except: + return False elif (type(obj) == tuple or type(obj) == list) and type(obj[0]) == np.ndarray and type(obj[1]) == bool: try: if obj[1]: - return np.any([np.all(obj[0] == entry_values) for entry_values in self.memory_normalized.values()]) + return np.any([np.all(obj[0] == entry_values) for entry_values in self.memory_normalized['numpy'].values()]) else: - return np.any([np.all(obj[0] == entry_values) for entry_values in self.memory_default.values()]) + return np.any([np.all(obj[0] == entry_values) for entry_values in self.memory_default['numpy'].values()]) except: return False + elif (type(obj) == tuple or type(obj) == list) and type(obj[0]) == torch.Tensor and type(obj[1]) == bool: + try: + if obj[1]: + return np.any([np.all(obj[0] == entry_values) for entry_values in self.memory_normalized['torch'].values()]) + else: + return np.any([np.all(obj[0] == entry_values) for entry_values in self.memory_default['torch'].values()]) + except: + return False else: raise NotImplementedError('Invalid format of function input to check, if the object is in cache') # def __iter__(self): # for key in self.memory_default.keys() - def prune_tensors(self, pruner, mem_to_process: list = ['default', 'structural', 'normalized']): + def prune_tensors(self, pruner, mem_to_process: list = ['default', 'structural', 'normalized'], + torch_mode: bool = False): + mode = 'torch' if torch_mode else 'numpy' mem_arranged = {'default': self.memory_default, - 'structural': self.memory_structural, - 'normalized': self.memory_normalized} + 'structural': self.memory_structural, + 'normalized': self.memory_normalized} - for key in self.memory_default.keys(): + for key in self.memory_default[mode].keys(): for mem_type in mem_to_process: try: - mem_arranged[mem_type][key] = pruner.prune(mem_arranged[mem_type][key]) + mem_arranged[mode][mem_type][key] = pruner.prune(mem_arranged[mode][mem_type][key]) except (NameError, KeyError) as e: pass @property def consumed_memory(self): - memsize = np.sum([value.nbytes for _, value in self.memory_default.items()]) - memsize += np.sum([value.nbytes for _, value in self.memory_normalized.items()]) + memsize = np.sum([value.nbytes for _, value in self.memory_default['numpy'].items()]) + memsize += np.sum([value.nbytes for _, value in self.memory_normalized['numpy'].items()]) for label, merged_state in self.structural_and_base_merged.items(): - if not merged_state: memsize += self.memory_structural[label].nbytes + if not merged_state: memsize += self.memory_structural['numpy'][label].nbytes return memsize def upload_complex_token(label: str, params_values: OrderedDict, evaluator, tensor_cache: Cache, grid_cache: Cache): - # label_completed = (label, (1.0,)) - # value = evaluator.apply(self) - # global_var.tensor_cache.add(self.cache_label, value, structural = False) try: evaluation_function = evaluator.evaluation_functions[label] except TypeError: @@ -477,3 +511,15 @@ def upload_complex_token(label: str, params_values: OrderedDict, evaluator, tens label_completed = (label, tuple(params_values.values())) tensor_cache.add(label_completed, grid_function(indexes_vect)) + +# class EquationsCache(object): +# ''' +# Cache to keep the information about already discovered equations. Getting equation objectives values will reduce the unnecessary +# computations, that may occur if the EPDE repeatedly generates the same equation. +# ''' +# def __init__(self): +# self._saved_equations = set() + +# @staticmethod +# def parse_input(self, equation): +# return \ No newline at end of file diff --git a/epde/cache/ctrl_cache.py b/epde/cache/ctrl_cache.py new file mode 100644 index 0000000..345c18b --- /dev/null +++ b/epde/cache/ctrl_cache.py @@ -0,0 +1,22 @@ +from typing import Union, List, Tuple + +import torch + +class ControlNNContainer(): + def __init__(self, output_num: int = 1, args: List[Tuple[Union[int, List]]] = [(0, [None,]),], + net: torch.nn.Sequential = None, device: str = 'cpu'): + self.net_args = args + self.net = net if isinstance(net, torch.nn.Sequential) else self.create_shallow_nn(len(self.net_args), + output_num, device) + + @staticmethod + def create_shallow_nn(arg_num: int = 1, output_num: int = 1, + device: str = 'cpu') -> torch.nn.Sequential: # net: torch.nn.Sequential = None, + hidden_neurons = 256 + layers = [torch.nn.Linear(arg_num, hidden_neurons, device=device), + torch.nn.ReLU(), + torch.nn.Linear(hidden_neurons, output_num, device=device)] + control_nn = torch.nn.Sequential(*layers) + control_nn.to(device) + print('control_nn', control_nn, next(control_nn.parameters()).device, 'should be ', device) + return control_nn \ No newline at end of file diff --git a/epde/control/__init__.py b/epde/control/__init__.py new file mode 100644 index 0000000..e1f0810 --- /dev/null +++ b/epde/control/__init__.py @@ -0,0 +1,2 @@ +from .constr import ConstrLocation, ConditionalLoss, ControlConstrEq, ControlConstrNEq +from .control import ControlExp \ No newline at end of file diff --git a/epde/control/constr.py b/epde/control/constr.py new file mode 100644 index 0000000..f99e661 --- /dev/null +++ b/epde/control/constr.py @@ -0,0 +1,326 @@ +from typing import List, Union, Tuple, Callable +from abc import ABC, abstractmethod +from functools import singledispatchmethod + +import numpy as np +import torch + +from epde.supplementary import AutogradDeriv, FDDeriv +from epde.supplementary import BasicDeriv + +class ConstrLocation(): + def __init__(self, domain_shape: Tuple[int], axis: int = None, loc: int = None, + indices: List[np.ndarray] = None, device: str = 'cpu'): + ''' + Objects to contain the indices of the control training contraint location. + + Args: + domain_shape (`Tuple[int]`): shape of the domain, for which the control problem is solved. + + axis (`int`): axis, along that the boundary conditions are selected. Shall be introduced + only for constraints on the boundary. + Optional, the default value (`None`) matches the entire domain. + + + loc (`int`): position along axis, where "bounindices = self.get_boundary_indices(self.domain_indixes, axis, loc) + self.flat_idxdary" is located. Shall be introduced only for constraints on the boundary. + Optional, the default value (`None`) matches the entire domain. For example, -1 will correspond + to the end of the domain along axis. + + device (`str`): string, matching the device, used for computation. Uses default torch designations. + Optional, defaults to `cpu` for CPU computations. + + ''' + self._device = device + self._initial_shape = domain_shape + + self.domain_indixes = np.indices(domain_shape) + if indices is not None: + self.loc_indices = indices + elif axis is not None and loc is not None: + self.loc_indices = self.get_boundary_indices(self.domain_indixes, axis, loc) + else: + self.loc_indices = self.domain_indixes + self.flat_idxs = torch.from_numpy(np.ravel_multi_index(self.loc_indices, + dims = self._initial_shape)).long().to(self._device) + + + @staticmethod + def get_boundary_indices(domain_indices: np.ndarray, axis: int, + loc: Union[int, Tuple[int]]) -> np.array: + ''' + Method of obtaining domain indices for specified position, i.e. all 0-th elements along an axis, or the last + elements along a specific axis. + + Args: + domain_indices (`np.ndarray`): an array representing the indices of a grid. The subarrays contain index + values 0, 1, … varying only along the corresponding axis. For furher details inspect `np.indices(...)` + function. + + axis (`int`): index of the axis, along which the elements are taken, + + loc (`int` or `tuple` of `int`): positions along the specified axis, which are taken. Can be tuple to + accomodate for multiple elements along axis. + + Returns: + `np.ndarray` of indicies, where the conditions are estimated. + ''' + return np.stack([np.take(domain_indices[idx], indices = loc, axis = axis).reshape(-1) + for idx in np.arange(domain_indices.shape[0])]) + + def apply(self, tensor: torch.Tensor, flattened: bool = True, along_axis: int = None): + ''' + Get `tensor` values at the locations, specified by the object indexing. The resulting tensor will be flattened. + + Args: + tensor (`torch.Tensor`): the filtered tensor. + + flattened (`bool`): marker, of will the tensor be flattened. Optional, default `True`, and + `False` is not yet implemented. + + along_axis (`int`): axis, for which the filtering is taken. + ''' + if flattened: + shape = [1,] * tensor.ndim + shape[along_axis] = -1 + return torch.take_along_dim(input = tensor, indices = self.flat_idxs.view(*shape), dim = along_axis) + else: + raise NotImplementedError('Currently, apply can be applied only to flattened tensors.') + idxs = self.loc_indices # loop will be held over the first dimension + return tensor.take() + + +class ControlConstraint(ABC): + ''' + Abstract class for constraints declaration in the control optimization problems. + ''' + def __init__(self, val : Union[float, torch.Tensor], deriv_method: BasicDeriv, indices: ConstrLocation, + device: str = 'cpu', deriv_axes: List = [None,], nn_output: int = 0, **kwargs): + self._val = val + self._indices = indices + self._axes = deriv_axes + self._nn_output = nn_output + self._deriv_method = deriv_method + self._device = device + + @abstractmethod + def __call__(self, fun_nn: Union[torch.nn.Sequential, torch.Tensor], + arg_tensor: torch.Tensor) -> Tuple[bool, torch.Tensor]: + raise NotImplementedError('Trying to call abstract constraint discrepancy evaluation.') + + @abstractmethod + def loss(self, fun_nn: torch.nn.Sequential, arg_tensor: torch.Tensor) -> torch.Tensor: + raise NotImplementedError('Trying to call abstract constraint discrepancy evaluation.') + +class ControlConstrEq(ControlConstraint): + ''' + Class for equality constrints of type $c(u^(n)) = f(u) - val = 0$ . + ''' + def __init__(self, val : Union[float, torch.Tensor], deriv_method: BasicDeriv, # grid: torch.Tensor, + indices: ConstrLocation, device: str = 'cpu', deriv_axes: List = [None,], + nn_output: int = 0, tolerance: float = 1e-7, estim_func: Callable = None): + print(f'Initializing condition with {deriv_method} method of differentiation.') + super().__init__(val, deriv_method, indices, device, deriv_axes, nn_output) # grid, + self._eps = tolerance + self._estim_func = estim_func + + # TODO: add singledispatch in relation to "fun_nn": rework to accept np.ndarrays as arguments + + @singledispatchmethod + def __call__(self, function, arg_tensor) -> Tuple[bool, torch.Tensor]: + ''' + Calculate the fullfilment of the equality constraint condition and the discrepancy between + the observed value of constraint and the desired value. + ''' + raise NotImplementedError(f'Incorrect type of arguments passed into the call method. \ + Got {type(function)} instead of np.ndarrays of torch.nn.Sequentials.') + + @__call__.register + def _(self, function: np.ndarray, arg_tensor): + if isinstance(self._deriv_method, AutogradDeriv): + raise RuntimeError('Trying to call autograd differentiation of numpy npdarray. Use FDDeriv instead.') + + to_compare = self._deriv_method.take_derivative(u = function, + args = self._indices.apply(arg_tensor, along_axis=0), # correct along_axis argument + axes = self._axes) + to_compare = torch.from_numpy(to_compare) + + if not isinstance(self._val, torch.Tensor): + val_transformed = torch.full_like(input = to_compare, + fill_value=self._val).to(self._device) + else: + if to_compare.shape != self._val.shape: + try: + to_compare = to_compare.view(self._val.size()) + except: + raise TypeError(f'Incorrect shapes of constraint value tensor: expected {self._val.shape}, got {to_compare.shape}.') + val_transformed = self._val + if self._estim_func is not None: + constr_enf = self._estim_func(to_compare, val_transformed) + else: + constr_enf = val_transformed - to_compare + + return (torch.isclose(constr_enf, torch.zeros_like(constr_enf).to(self._device), rtol = self._eps), + constr_enf) # val_transformed - to_compare + + @__call__.register + def _(self, function: torch.nn.Sequential, arg_tensor): + if isinstance(self._deriv_method, FDDeriv): + raise RuntimeError('Trying to call finite differences to get derivatives of ANN, while ANN eval is not supported.\ + Use Autograd instead.') + to_compare = self._deriv_method.take_derivative(u = function, + args = self._indices.apply(arg_tensor, along_axis=0), # correct along_axis argument + axes = self._axes) + + + if not isinstance(self._val, torch.Tensor): + val_transformed = torch.full_like(input = to_compare, fill_value=self._val).to(self._device) + else: + if to_compare.shape != self._val.shape: + try: + to_compare = to_compare.view(self._val.size()) + except: + raise TypeError(f'Incorrect shapes of constraint value tensor: expected {self._val.shape}, got {to_compare.shape}.') + val_transformed = self._val + if self._estim_func is not None: + constr_enf = self._estim_func(to_compare, val_transformed) + else: + constr_enf = val_transformed - to_compare + + return (torch.isclose(constr_enf, torch.zeros_like(constr_enf).to(self._device), rtol = self._eps), + constr_enf) # val_transformed - to_compare + + def loss(self, function: Union[torch.nn.Sequential, np.ndarray], arg_tensor: torch.Tensor) -> torch.Tensor: + ''' + Return value of the loss function term, created by the condition. + + Args: + fun_nn (`torch.nn.Sequential`): artificial neural network, approximating the function used in the condition. + + arg_tensor (`torch.Tensor`): tensor, used as the argument of the network, passed as `fun_nn`. + + Returns: + `torch.Tensor` with norm of the contraint discrepancy to be used in the combined loss. + ''' + _, discrepancy = self(function, arg_tensor) + return torch.norm(discrepancy) + + +class ControlConstrNEq(ControlConstraint): + ''' + Class for constrints of type $c(u, x) = f(u, x) - val `self._sign` 0$ + ''' + def __init__(self, val : Union[float, torch.Tensor], deriv_method: BasicDeriv, # grid: torch.Tensor, + indices: ConstrLocation, device: str = 'cpu', sign: str = '>', deriv_axes: List = [None,], + nn_output: int = 0, tolerance: float = 1e-7, estim_func: Callable = None): + print(f'Initializing condition with {deriv_method} method of differentiation.') + super().__init__(val, deriv_method, indices, device, deriv_axes, nn_output) # grid, + self._sign = sign + self._estim_func = estim_func + + @singledispatchmethod + def __call__(self, function, arg_tensor) -> Tuple[bool, torch.Tensor]: + ''' + Calculate the fullfilment of the inequality constraint condition and the discrepancy between + the observed value of constraint and the desired value. + ''' + raise NotImplementedError(f'Incorrect type of arguments passed into the call method. \ + Got {type(function)} instead of np.ndarrays of torch.nn.Sequentials.') + + @__call__.register + def _(self, function: np.ndarray, arg_tensor) -> Tuple[bool, torch.Tensor]: + if isinstance(self._deriv_method, AutogradDeriv): + raise RuntimeError('Trying to call autograd differentiation of numpy npdarray. Use FDDeriv instead.') + + to_compare = self._deriv_method.take_derivative(u = function, + args=self._indices.apply(arg_tensor, along_axis = 0), # correct along_axis argument + axes=self._axes, component = self._nn_output) + + to_compare = torch.from_numpy(to_compare) + + if not isinstance(self._val, torch.Tensor): + val_transformed = torch.full_like(input = to_compare, fill_value=self._val) + else: + if not to_compare.shape == self._val.shape: + to_compare = torch.reshape(to_compare, shape=self._val.shape) + val_transformed = self._val + + if self._estim_func is not None: + constr_enf = self._estim_func(val_transformed, to_compare) + else: + constr_enf = val_transformed - to_compare + + if self._sign == '>': + return torch.greater(constr_enf, torch.zeros_like(constr_enf).to(self._device)), torch.nn.functional.relu(constr_enf) + elif self._sign == '<': + return (torch.less(constr_enf, torch.zeros_like(constr_enf).to(self._device)), + torch.nn.functional.relu(constr_enf)) + + @__call__.register + def _(self, function: torch.nn.Sequential, arg_tensor) -> Tuple[bool, torch.Tensor]: + to_compare = self._deriv_method.take_derivative(u = function, + args=self._indices.apply(arg_tensor, along_axis = 0), # correct along_axis argument + axes=self._axes, component = self._nn_output) + + if isinstance(self._deriv_method, FDDeriv): + raise RuntimeError('Trying to call finite differences to get derivatives of ANN, while ANN eval is not supported.\ + Use Autograd instead.') + if not isinstance(self._val, torch.Tensor): + val_transformed = torch.full_like(input = to_compare, fill_value=self._val) + else: + if not to_compare.shape == self._val.shape: + to_compare = torch.reshape(to_compare, shape=self._val.shape) + val_transformed = self._val + + if self._estim_func is not None: + constr_enf = self._estim_func(val_transformed, to_compare) + else: + constr_enf = val_transformed - to_compare + + if self._sign == '>': + return torch.greater(constr_enf, torch.zeros_like(constr_enf).to(self._device)), torch.nn.functional.relu(constr_enf) + elif self._sign == '<': + return (torch.less(constr_enf, torch.zeros_like(constr_enf).to(self._device)), + torch.nn.functional.relu(constr_enf)) + #torch.less(val_transformed, to_compare), torch.nn.functional.relu(to_compare - val_transformed) + + def loss(self, function: Union[torch.nn.Sequential, np.ndarray], arg_tensor: torch.Tensor) -> torch.Tensor: + ''' + Return value of the loss function term, created by the condition. + + Args: + fun_nn (`torch.nn.Sequential`): artificial neural network, approximating the function used in the condition. + + arg_tensor (`torch.Tensor`): tensor, used as the argument of the network, passed as `fun_nn`. + + Returns: + `torch.Tensor` with norm of the contraint discrepancy to be used in the combined loss. + ''' + _, discrepancy = self(function, arg_tensor) + return torch.norm(discrepancy) + + +class ConditionalLoss(): + ''' + Class for the loss, used in the control function opimizaton procedure. Conrains terms of the loss + function in `self._cond` attribute. + ''' + def __init__(self, conditions: List[Tuple[Union[float, ControlConstraint, int]]]): + ''' + Initialize the conditional loss with the terms, partaking in evaluating inflicted control and + quality of the equation solution with the current control. + + Args: + conditions (`list` of triplet `tuple` as (`float`, `ControlConstraint`, `int`)) + ''' + self._cond = conditions + + def __call__(self, models: List[torch.nn.Sequential], args: list): # Introduce prepare control input: get torch tensors from solver & autodiff them + ''' + Return the summed values of the loss function component + ''' + temp = [] + for cond in self._cond: + temp.append(cond[0] * cond[1].loss(models[cond[2]], args[cond[2]])) + + return torch.stack(temp, dim=0).sum(dim=0).sum(dim=0) diff --git a/epde/control/control.py b/epde/control/control.py new file mode 100644 index 0000000..08fb2f8 --- /dev/null +++ b/epde/control/control.py @@ -0,0 +1,350 @@ +import numpy as np +import torch +import pickle + +import matplotlib.pyplot as plt +import datetime +import os +from warnings import warn + +import gc +from typing import List, Union +from copy import deepcopy + + +import epde.globals as global_var +from epde.interface.interface import ExperimentCombiner +from epde.optimizers.moeadd.moeadd import ParetoLevels +from epde.integrate import SolverAdapter, OdeintAdapter, BOPElement + +from epde.control.constr import ConditionalLoss +from epde.control.utils import prepare_control_inputs, eps_increment_diff +from epde.control.optim import AdamOptimizer, CoordDescentOptimizer + +from epde.supplementary import FDDeriv, AutogradDeriv + +class ControlExp(): + def __init__(self, loss : ConditionalLoss, device: str = 'cpu'): + self._device = device + self._state_net = None + self._best_control_net = None + self.loss = loss + + def create_best_equations(self, optimal_equations: Union[list, ParetoLevels]): + res_combiner = ExperimentCombiner(optimal_equations) + return res_combiner.create_best(self._pool) + + @staticmethod + def create_ode_bop(key, var, term, grid_loc, value, device: str = 'cpu'): + bop = BOPElement(axis = 0, key = key, term = term, power = 1, var = var, device = device) + bop_grd_np = np.array([[grid_loc,]]) + bop.set_grid(torch.from_numpy(bop_grd_np).type(torch.FloatTensor).to(device)) + bop.values = torch.from_numpy(np.array([[value,]])).float().to(device) + return bop + + def set_solver_params(self, use_pinn: bool = True, mode: str = 'autograd', compiling_params: dict = {}, + optimizer_params: dict = {}, cache_params: dict = {}, early_stopping_params: dict = {}, + plotting_params: dict = {}, training_params: dict = {'epochs': 150,}, + use_cache: bool = False, use_fourier: bool = False, # 5*1e0 + fourier_params: dict = None, use_adaptive_lambdas: bool = False): # device: str = 'cpu' + self._use_pinn = use_pinn + self._solver_params = {'mode': mode, + 'compiling_params': compiling_params, + 'optimizer_params': optimizer_params, + 'cache_params': cache_params, + 'early_stopping_params': early_stopping_params, + 'plotting_params': plotting_params, + 'training_params': training_params, + 'use_cache': use_cache, + 'use_fourier': use_fourier, + 'fourier_params': fourier_params, + 'use_adaptive_lambdas': use_adaptive_lambdas, + 'device': torch.device(self._device) + } + + def get_solver_adapter(self, net: torch.nn.Sequential = None): + if self._use_pinn: + adapter = SolverAdapter(net = net, use_cache = False, device = self._device) + # Edit solver forms of functions of dependent variable to Callable objects. + # Setting various adapater parameters + adapter.set_compiling_params(**self._solver_params['compiling_params']) + adapter.set_optimizer_params(**self._solver_params['optimizer_params']) + adapter.set_cache_params(**self._solver_params['cache_params']) + adapter.set_early_stopping_params(**self._solver_params['early_stopping_params']) + adapter.set_plotting_params(**self._solver_params['plotting_params']) + adapter.set_training_params(**self._solver_params['training_params']) + adapter.change_parameter('mode', self._solver_params['mode'], param_dict_key = 'compiling_params') + else: + try: + self._solver_params['method'] + except KeyError: + self._solver_params['method'] = 'Radau' + adapter = OdeintAdapter(method = self._solver_params['method']) + return adapter + + @staticmethod + def finite_diff_calculation(system, adapter, loc, control_loss, state_net: torch.nn.Sequential, + bc_operators, grids: list, solver_params: dict, eps: float): + ''' + Calculate finite-differecnce approximation of gradient in respect to the specified parameter. + + ''' + # Calculating loss in p[i]+eps: + ctrl_dict_prev = global_var.control_nn.net.state_dict() + ctrl_nn_dict = eps_increment_diff(input_params=ctrl_dict_prev, + loc = loc, forward=True, eps=eps) + global_var.control_nn.net.load_state_dict(ctrl_nn_dict) + + if isinstance(adapter, SolverAdapter): + adapter.set_net(deepcopy(state_net)) + diff_method = AutogradDeriv + else: + diff_method = FDDeriv + + solver_loss_forward, model = adapter.solve_epde_system(system = system, grids = grids[0], data = None, + boundary_conditions = bc_operators, + mode = solver_params['mode'], + use_cache = solver_params['use_cache'], + use_fourier = solver_params['use_fourier'], + fourier_params = solver_params['fourier_params'], + use_adaptive_lambdas = solver_params['use_adaptive_lambdas']) + + control_inputs = prepare_control_inputs(model, grids[1], global_var.control_nn.net_args, + diff_method = diff_method) + loss_forward = control_loss([model, global_var.control_nn.net], [grids[1], control_inputs]) + + # Calculating loss in p[i]-eps: + ctrl_dict_prev = global_var.control_nn.net.state_dict() # deepcopy() + ctrl_nn_dict = eps_increment_diff(input_params=ctrl_dict_prev, + loc = loc, forward=False, eps=eps) + global_var.control_nn.net.load_state_dict(ctrl_nn_dict) + + if isinstance(adapter, SolverAdapter): + adapter.set_net(deepcopy(state_net)) + solver_loss_backward, model = adapter.solve_epde_system(system = system, grids = grids[0], data = None, + boundary_conditions = bc_operators, + mode = solver_params['mode'], + use_cache = solver_params['use_cache'], + use_fourier = solver_params['use_fourier'], + fourier_params = solver_params['fourier_params'], + use_adaptive_lambdas = solver_params['use_adaptive_lambdas']) + + control_inputs = prepare_control_inputs(model, grids[1], global_var.control_nn.net_args, + diff_method = diff_method) + loss_back = control_loss([model, global_var.control_nn.net], [grids[1], control_inputs]) + + # Restore values of the control NN parameters + ctrl_nn_dict = global_var.control_nn.net.state_dict() + ctrl_nn_dict = eps_increment_diff(input_params=ctrl_nn_dict, + loc = loc, forward=True, eps=eps) + global_var.control_nn.net.load_state_dict(ctrl_nn_dict) + + loss_max = 1e-3 + if solver_loss_backward > loss_max or solver_loss_forward > loss_max: + warn(f'High solver loss occured: backward {solver_loss_backward} and forward {solver_loss_forward}.') + loss_alpha = 1e1 + + with torch.no_grad(): + delta = loss_forward - loss_back + if torch.abs(delta) < solver_loss_forward+solver_loss_backward: + res = 0*delta + else: + res = delta/(2*eps*(1+loss_alpha*(solver_loss_forward+solver_loss_backward))) + # print(f'loss_forward {loss_forward, solver_loss_forward}, loss_backward {loss_back, solver_loss_backward}, res {res}') + # print(f'loss_alpha*(solver_loss_forward+solver_loss_backward) {loss_alpha*(solver_loss_forward+solver_loss_backward)}') + return res + + + def feedback(self, bc_operators: List[Union[dict, float]], grids: List[Union[np.ndarray, torch.Tensor]], + n_control: int = 1, epochs: int = 1e2, state_net: torch.nn.Sequential = None, + opt_params: List[float] = [0.01, 0.9, 0.999, 1e-8], + control_net: torch.nn.Sequential = None, fig_folder: str = None, + LV_exp: bool = True, eps: float = 1e-2, solver_params: dict = {}): + def modify_bc(operator: dict, scale: Union[float, torch.Tensor]) -> dict: + noised_operator = deepcopy(operator) + noised_operator['bnd_val'] = torch.normal(operator['bnd_val'], scale).to(self._device) + return noised_operator + + # Properly formulate training approach + t = 0 + + loss_hist = [] + stop_training = False + + time = datetime.datetime.now() + + if isinstance(state_net, torch.nn.Sequential): self._state_net = state_net + global_var.reset_control_nn(n_control = n_control, ann = control_net, + ctrl_args = global_var.control_nn.net_args, device = self._device) + + # TODO Refactor hook: To optimize the net in global variables is a terrific approach, rethink it + + if isinstance(grids[0], np.ndarray): + grids_merged = torch.from_numpy(np.array([subgrid.reshape(-1) for subgrid in grids])).float().T.to(self._device) + elif isinstance(grids[0], torch.Tensor): + grids_merged = torch.cat([subgrid.reshape(-1, 1) for subgrid in grids], dim = 1).float() + grids_merged.to(device=self._device) + + grad_tensors = deepcopy(global_var.control_nn.net.state_dict()) + + min_loss = np.inf + self._best_control_params = global_var.control_nn.net.state_dict() + + # optimizer = AdamOptimizer(optimized = global_var.control_nn.net.state_dict(), parameters = opt_params) + optimizer = CoordDescentOptimizer(optimized = global_var.control_nn.net.state_dict(), parameters = opt_params) + + self.set_solver_params(**solver_params['full']) + adapter = self.get_solver_adapter(None) + if isinstance(adapter, SolverAdapter): + adapter.set_net(deepcopy(self._state_net)) + diff_method = AutogradDeriv + else: + diff_method = FDDeriv + + sampled_bc = [modify_bc(operator, noise_std) for operator, noise_std in bc_operators] + loss_pinn, model = adapter.solve_epde_system(system = self.system, grids = grids, data = None, + boundary_conditions = sampled_bc, + mode = self._solver_params['mode'], + use_cache = self._solver_params['use_cache'], + use_fourier = self._solver_params['use_fourier'], + fourier_params = self._solver_params['fourier_params'], + use_adaptive_lambdas = self._solver_params['use_adaptive_lambdas']) + + + print(f'Model is {type(model)}, while loss requires {[(cond, cond[1]._deriv_method) for cond in self.loss._cond]}') + control_inputs = prepare_control_inputs(model, grids_merged, global_var.control_nn.net_args, + diff_method = diff_method) + loss = self.loss([model, global_var.control_nn.net], [grids_merged, control_inputs]) + print('current loss is ', loss, 'model undertrained with loss of ', loss_pinn) + + while t < epochs and not stop_training: + self.set_solver_params(**solver_params['abridged']) + adapter = self.get_solver_adapter(None) + sampled_bc = [modify_bc(operator, noise_std) for operator, noise_std in bc_operators] + + # self.set_solver_params(**solver_params['full']) + adapter = self.get_solver_adapter(None) + + if isinstance(adapter, SolverAdapter): + adapter.set_net(self._state_net) + + loss_pinn, model = adapter.solve_epde_system(system = self.system, grids = grids, data = None, + boundary_conditions = sampled_bc, + mode = self._solver_params['mode'], + use_cache = self._solver_params['use_cache'], + use_fourier = self._solver_params['use_fourier'], + fourier_params = self._solver_params['fourier_params'], + use_adaptive_lambdas = self._solver_params['use_adaptive_lambdas']) + + control_inputs = prepare_control_inputs(model, grids_merged, global_var.control_nn.net_args, + diff_method = diff_method) + loss = self.loss([model, global_var.control_nn.net], [grids_merged, control_inputs]) + self._state_net = model + self.set_solver_params(**solver_params['abridged']) + + global_var.control_nn.net.load_state_dict(self._best_control_params) + state_net = deepcopy(self._state_net) + print(f'Control function optimization epoch {t}.') + for param_key, param_tensor in grad_tensors.items(): + print(f'Optimizing {param_key}: shape is {param_tensor.shape}') + if len(param_tensor.size()) == 1: + for param_idx, _ in enumerate(param_tensor): + loc = (param_key, param_idx) + grad_tensors[loc[0]] = grad_tensors[loc[0]].detach() + grad_tensors[loc[0]][loc[1:]] = self.finite_diff_calculation(system = self.system, + adapter = adapter, + loc = loc, control_loss = self.loss, + state_net = state_net, + bc_operators = sampled_bc, + grids = [grids, grids_merged], + solver_params = self._solver_params, + eps = eps) + if optimizer.behavior == 'Coordinate': + state_dict_prev = global_var.control_nn.net.state_dict() + state_dict = optimizer.step(gradient = grad_tensors, optimized = state_dict_prev, loc = loc) + global_var.control_nn.net.load_state_dict(state_dict) + elif len(param_tensor.size()) == 2: + for param_outer_idx, _ in enumerate(param_tensor): + for param_inner_idx, _ in enumerate(param_tensor[0]): + loc = (param_key, param_outer_idx, param_inner_idx) + grad_tensors[loc[0]] = grad_tensors[loc[0]].detach() + grad_tensors[loc[0]][tuple(loc[1:])] = self.finite_diff_calculation(system = self.system, + adapter = adapter, + loc = loc, + control_loss = self.loss, + state_net = state_net, + bc_operators = sampled_bc, + grids = [grids, grids_merged], + solver_params = self._solver_params, + eps = eps) + if optimizer.behavior == 'Coordinate': + state_dict_prev = global_var.control_nn.net.state_dict() + state_dict = optimizer.step(gradient = grad_tensors, optimized = state_dict_prev, loc = loc) + global_var.control_nn.net.load_state_dict(state_dict) + else: + raise Exception(f'Incorrect shape of weights/bias. Got {param_tensor.size()} tensor.') + if optimizer.behavior == 'Gradient': + state_dict_prev = global_var.control_nn.net.state_dict() + state_dict = optimizer.step(gradient = grad_tensors, optimized = state_dict_prev) + global_var.control_nn.net.load_state_dict(state_dict) + del state_dict, state_dict_prev + + self.set_solver_params(**solver_params['full']) + adapter = self.get_solver_adapter(None) + if isinstance(adapter, SolverAdapter): + adapter.set_net(self._state_net) + loss_pinn, model = adapter.solve_epde_system(system = self.system, grids = grids, data = None, + boundary_conditions = sampled_bc, + mode = self._solver_params['mode'], + use_cache = self._solver_params['use_cache'], + use_fourier = self._solver_params['use_fourier'], + fourier_params = self._solver_params['fourier_params'], + use_adaptive_lambdas = self._solver_params['use_adaptive_lambdas']) + + # var_prediction = model(grids_merged) + self._state_net = model + + control_inputs = prepare_control_inputs(model, grids_merged, global_var.control_nn.net_args, + diff_method = diff_method) + loss = self.loss([model, global_var.control_nn.net], [grids_merged, control_inputs]) + print('current loss is ', loss, 'model undertrained with loss of ', loss_pinn) + + self._best_control_params = global_var.control_nn.net.state_dict() + loss_hist.append(loss) + + if fig_folder is not None and LV_exp: + plt.figure(figsize=(11, 6)) + plt.plot(grids_merged.cpu().detach().numpy(), control_inputs.cpu().detach().numpy()[:, 0], color = 'k') + plt.plot(grids_merged.cpu().detach().numpy(), control_inputs.cpu().detach().numpy()[:, 1], color = 'r') + plt.plot(grids_merged.cpu().detach().numpy(), global_var.control_nn.net(control_inputs).cpu().detach().numpy(), + color = 'tab:orange') + plt.grid() + frame_name = f'Exp_{time.month}_{time.day}_at_{time.hour}_{time.minute}_{t}.png' + plt.savefig(os.path.join(fig_folder, frame_name)) + + if fig_folder is not None: + exp_res = {'state' : control_inputs.cpu().detach().numpy(), + 'control' : global_var.control_nn.net(control_inputs).cpu().detach().numpy()} + frame_name = f'Exp_{time.month}_{time.day}_at_{time.hour}_{time.minute}_{t}.pickle' + with open(os.path.join(fig_folder, frame_name), 'wb') as ctrl_output_file: + pickle.dump(exp_res, file = ctrl_output_file) + + gc.collect() + t += 1 + + control_inputs = prepare_control_inputs(model, grids_merged, global_var.control_nn.net_args, + diff_method = diff_method) + ctrl_pred = global_var.control_nn.net(control_inputs) + + return self._state_net, global_var.control_nn.net, ctrl_pred, loss_hist + + def time_based(self, bc_operators: List[Union[dict, float]], grids: List[Union[np.ndarray, torch.Tensor]], + n_control: int = 1, epochs: int = 1e2, state_net: torch.nn.Sequential = None, + opt_params: List[float] = [0.01, 0.9, 0.999, 1e-8], + control_net: torch.nn.Sequential = None, fig_folder: str = None, + LV_exp: bool = True, eps: float = 1e-2, solver_params: dict = {}): + self.set_solver_params(**solver_params['full']) + adapter = self.get_solver_adapter(None) + solver_form = self.system + + raise NotImplementedError() diff --git a/epde/control/optim.py b/epde/control/optim.py new file mode 100644 index 0000000..17600bd --- /dev/null +++ b/epde/control/optim.py @@ -0,0 +1,128 @@ +from abc import ABC +from collections import OrderedDict +from typing import List, Dict + +from warnings import warn + +import numpy as np +import torch + +class FirstOrderOptimizerNp(ABC): + behavior = 'None' + def __init__(self, parameters: np.ndarray, optimized: np.ndarray): + raise NotImplementedError('Calling __init__ of an abstract optimizer') + + def step(self, gradient: np.ndarray): + raise NotImplementedError('Calling step of an abstract optimizer') + +class AdamOptimizerNp(FirstOrderOptimizerNp): + behavior = 'Gradient' + def __init__(self, optimized: np.ndarray, parameters: np.ndarray = np.array([0.001, 0.9, 0.999, 1e-8])): + ''' + parameters[0] - alpha, parameters[1] - beta_1, parameters[2] - beta_2 + parameters[3] - eps + ''' + self.reset(optimized, parameters) + + def reset(self, optimized: np.ndarray, parameters: np.ndarray): + self._moment = np.zeros_like(optimized) + self._second_moment = np.zeros_like(optimized) + self._second_moment_max = np.zeros_like(optimized) + self.parameters = parameters + self.time = 0 + + def step(self, gradient: np.ndarray, optimized: np.ndarray): + self.time += 1 + self._moment = self.parameters[1] * self._moment + (1-self.parameters[1]) * gradient + self._second_moment = self.parameters[2] * self._second_moment +\ + (1-self.parameters[2]) * np.power(gradient) + moment_cor = self._moment/(1 - np.power(self.parameters[1], self.time)) + second_moment_cor = self._second_moment/(1 - np.power(self.parameters[2], self.time)) + return optimized - self.parameters[0]*moment_cor/(np.sqrt(second_moment_cor)+self.parameters[3]) + +class FirstOrderOptimizer(ABC): + behavior = 'Gradient' + def __init__(self, optimized: List[torch.Tensor], parameters: list): + raise NotImplementedError('Calling __init__ of an abstract optimizer') + + def reset(self, optimized: Dict[str, torch.Tensor], parameters: np.ndarray): + raise NotImplementedError('Calling reset method of an abstract optimizer') + + def step(self, gradient: Dict[str, torch.Tensor], optimized: Dict[str, torch.Tensor], + *args, **kwargs) -> Dict[str, torch.Tensor]: + raise NotImplementedError('Calling step of an abstract optimizer') + +class AdamOptimizer(FirstOrderOptimizer): + behavior = 'Gradient' + def __init__(self, optimized: List[torch.Tensor], parameters: list = [0.001, 0.9, 0.999, 1e-8]): + ''' + parameters[0] - alpha, parameters[1] - beta_1, parameters[2] - beta_2 + parameters[3] - eps + ''' + self.reset(optimized, parameters) + + def reset(self, optimized: Dict[str, torch.Tensor], parameters: np.ndarray): + self._moment = [torch.zeros_like(param_subtensor) for param_subtensor in optimized.values()] + self._second_moment = [torch.zeros_like(param_subtensor) for param_subtensor in optimized.values()] + self.parameters = parameters + self.time = 0 + + def step(self, gradient: Dict[str, torch.Tensor], optimized: Dict[str, torch.Tensor], + *args, **kwargs) -> Dict[str, torch.Tensor]: + self.time += 1 + + self._moment = [self.parameters[1] * self._moment[tensor_idx] + (1-self.parameters[1]) * grad_subtensor + for tensor_idx, grad_subtensor in enumerate(gradient.values())] + + self._second_moment = [self.parameters[2]*self._second_moment[tensor_idx] + + (1-self.parameters[2])*torch.pow(grad_subtensor, 2) + for tensor_idx, grad_subtensor in enumerate(gradient.values())] + + moment_cor = [moment_tensor/(1 - self.parameters[1] ** self.time) for moment_tensor in self._moment] + second_moment_cor = [sm_tensor/(1 - self.parameters[2] ** self.time) for sm_tensor in self._second_moment] + return OrderedDict([(subtensor_key, optimized[subtensor_key] - self.parameters[0] * moment_cor[tensor_idx]/\ + (torch.sqrt(second_moment_cor[tensor_idx]) + self.parameters[3])) + for tensor_idx, subtensor_key in enumerate(optimized.keys())]) + +class CoordDescentOptimizer(FirstOrderOptimizer): + behavior = 'Coordinate' + def __init__(self, optimized: List[torch.Tensor], parameters: list = [0.001,]): + ''' + parameters[0] - alpha, parameters[1] - beta_1, parameters[2] - beta_2 + parameters[3] - eps + ''' + self.reset(optimized, parameters) + + def reset(self, optimized: Dict[str, torch.Tensor], parameters: np.ndarray): + self.parameters = parameters + self.time = 0 + + def step(self, gradient: Dict[str, torch.Tensor], optimized: Dict[str, torch.Tensor], + *args, **kwargs) -> Dict[str, torch.Tensor]: + self.time += 1 + assert 'loc' in kwargs.keys(), 'Missing location of parameter value shift in coordinate descent.' + loc = kwargs['loc'] + if torch.isclose(gradient[loc[0]][tuple(loc[1:])], + torch.tensor((0,)).to(device=gradient[loc[0]][tuple(loc[1:])].device).float()): + warn(f'Gradient at {loc} is close to zero: {gradient[loc[0]][tuple(loc[1:])]}.') + optimized[loc[0]][tuple(loc[1:])] = optimized[loc[0]][tuple(loc[1:])] -\ + self.parameters[0]*gradient[loc[0]][tuple(loc[1:])] + return optimized + +# TODO: implement coordinate descent + #np.power(self.parameters[1], self.time)) # np.power(self.parameters[2], self.time) + +# class LBFGS(FirstOrderOptimizer): +# def __init__(self, optimized: List[torch.Tensor], parameters: list = []): +# pass + +# def step(self, gradient: Dict[str, torch.Tensor], optimized: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]: +# pass + +# def update_hessian(self, gradient: Dict[str, torch.Tensor], x_vals: Dict[str, torch.Tensor]): +# # Use self._prev_grad +# for i in range(self._mem_size - 1): +# alpha = + +# def get_alpha(self): +# return alpha \ No newline at end of file diff --git a/epde/control/utils.py b/epde/control/utils.py new file mode 100644 index 0000000..a03170e --- /dev/null +++ b/epde/control/utils.py @@ -0,0 +1,58 @@ +from typing import List, Tuple, Union +from collections import OrderedDict + +import numpy as np +import torch + +from epde.supplementary import BasicDeriv, AutogradDeriv + +def prepare_control_inputs(model: Union[torch.nn.Sequential, List[np.ndarray]], grid: torch.Tensor, + args: List[Tuple[Union[int, List]]], diff_method: BasicDeriv = None) -> torch.Tensor: + ''' + Recompute the control ANN arguments tensor from the solutions of + controlled equations $L \mathbf{u}(t, \mathbf{x}, \mathbf{c}) = 0$, + calculating necessary derivatives, as `args` + + Args: + model (`torch.nn.Sequential`): solution of the controlled equation $\mathbf{u}(\mathbf{u})$. + + grid (`torch.Tensor`): tensor of the grids m x n, where m - number of points in the domain, n - number of NN inputs. + + args (`List[Tuple[Union[int, List]]]`) - list of arguments of derivative operators. + + Returns: + `torch.Tensor`: tensor of arguments for the control ANN. + ''' + if diff_method is None: + diff_method = AutogradDeriv + + differentiator = diff_method() + ctrl_inputs = [differentiator.take_derivative(u = model, args = grid, + axes = arg[1], component = arg[0]).reshape(-1, 1) for arg in args] + if not isinstance(model, torch.nn.Sequential): + ctrl_inputs = [torch.from_numpy(inp).reshape((-1, 1)) for inp in ctrl_inputs] + ctrl_inputs = torch.cat(ctrl_inputs, dim = 1).float() + # print(f'ctrl_inputs shape is {ctrl_inputs.shape}') + return ctrl_inputs + + # if isinstance(model, torch.nn.Sequential): + # differentiator = AutogradDeriv() + # ctrl_inputs = torch.cat([differentiator.take_derivative(u = model, args = grid, axes = arg[1], + # component = arg[0]).reshape(-1, 1) for arg in args], dim = 1).float() + # else: + # assert isinstance(grid, torch.Tensor) and grid.ndim == 2 and grid.shape[-1] == 1 + # grid = grid.detach().cpu().numpy() + # differentiator = FDDeriv() + # ctrl_inputs = torch.cat([torch.from_numpy(differentiator.take_derivative(model, grid, axes = arg[1], + # component = arg[0])).reshape(-1, 1) + # for arg in args], dim = 1).float() + return ctrl_inputs + +@torch.no_grad() +def eps_increment_diff(input_params: OrderedDict, loc: List[Union[str, Tuple[int]]], + forward: bool = True, eps = 1e-4): # input_keys: list, prev_loc: List = None, + if forward: + input_params[loc[0]][tuple(loc[1:])] += eps + else: + input_params[loc[0]][tuple(loc[1:])] -= 2*eps + return input_params diff --git a/epde/eq_mo_objectives.py b/epde/eq_mo_objectives.py index c199b83..762dc5a 100644 --- a/epde/eq_mo_objectives.py +++ b/epde/eq_mo_objectives.py @@ -28,8 +28,6 @@ def equation_fitness(system, equation_key): error : float. The value of the error metric. ''' - # print(f'System, for which we evaluate fitness: {system.text_form}') - # print(f'For equation key {equation_key}, {system.vals[equation_key].fitness_calculated}') assert system.vals[equation_key].fitness_calculated, 'Trying to call fitness before its evaluation.' res = system.vals[equation_key].fitness_value return res diff --git a/epde/evaluators.py b/epde/evaluators.py index e7dc4f7..836a0cc 100644 --- a/epde/evaluators.py +++ b/epde/evaluators.py @@ -8,10 +8,10 @@ import numpy as np import torch -device = torch.device('cpu') +# device = torch.device('cpu') -from abc import ABC -from typing import Callable, Union +from abc import ABC, abstractmethod +from typing import Callable, Union, List import epde.globals as global_var from epde.supplementary import factor_params_to_str @@ -20,31 +20,46 @@ class EvaluatorTemplate(ABC): def __init__(self): pass - def __call__(self, factor, structural: bool = False, grids: list = None, **kwargs): + @abstractmethod + def __call__(self, factor, structural: bool = False, grids: list = None, + torch_mode: bool = False, **kwargs): raise NotImplementedError( 'Trying to call the method of an abstract class') class CustomEvaluator(EvaluatorTemplate): - def __init__(self, evaluation_functions: Union[Callable, dict], - eval_fun_params_labels: Union[list, tuple, set], use_factors_grids: bool = True): - if isinstance(evaluation_functions, dict): - self.single_function_token = False + def __init__(self, evaluation_functions_np: Union[Callable, dict] = None, + evaluation_functions_torch: Union[Callable, dict] = None, + eval_fun_params_labels: Union[list, tuple, set] = ['power']): + self._evaluation_functions_np = evaluation_functions_np + self._evaluation_functions_torch = evaluation_functions_torch + + if (evaluation_functions_np is None) and (evaluation_functions_torch is None): + raise ValueError('No evaluation function set in the initialization of CustomEvaluator.') + + if isinstance(evaluation_functions_np, dict): + self._single_function_token = False else: - self.single_function_token = True + self._single_function_token = True - self.evaluation_functions = evaluation_functions - self.use_factors_grids = use_factors_grids self.eval_fun_params_labels = eval_fun_params_labels - def __call__(self, factor, structural: bool = False, grids: list = None, **kwargs): - if not self.single_function_token and factor.label not in self.evaluation_functions.keys(): + def __call__(self, factor, structural: bool = False, func_args: List[Union[torch.Tensor, np.ndarray]] = None, + torch_mode: bool = False, **kwargs): # s + if torch_mode: # TODO: rewrite + torch_mode_explicit = True + if not self._single_function_token and factor.label not in self._evaluation_functions_np.keys(): raise KeyError( 'The label of the token function does not match keys of the evaluator functions') - if self.single_function_token: - evaluation_function = self.evaluation_functions + if func_args is not None: + if isinstance(func_args[0], np.ndarray) or self._evaluation_functions_torch is None: + funcs = self._evaluation_functions_np if self._single_function_token else self._evaluation_functions_np[factor.label] + elif isinstance(func_args[0], torch.Tensor) or self._evaluation_functions_np is None or torch_mode_explicit: + funcs = self._evaluation_functions_torch if self._single_function_token else self._evaluation_functions_torch[factor.label] + elif torch_mode: + funcs = self._evaluation_functions_torch if self._single_function_token else self._evaluation_functions_torch[factor.label] else: - evaluation_function = self.evaluation_functions[factor.label] + funcs = self._evaluation_functions_np if self._single_function_token else self._evaluation_functions_np[factor.label] eval_fun_kwargs = dict() for key in self.eval_fun_params_labels: @@ -52,11 +67,11 @@ def __call__(self, factor, structural: bool = False, grids: list = None, **kwarg if param_descr['name'] == key: eval_fun_kwargs[key] = factor.params[param_idx] - grid_function = np.vectorize(lambda args: evaluation_function(*args, **eval_fun_kwargs)) + grid_function = np.vectorize(lambda args: funcs(*args, **eval_fun_kwargs)) - if grids is None: + if func_args is None: new_grid = False - grids = factor.grids + func_args = factor.grids else: new_grid = True try: @@ -64,16 +79,16 @@ def __call__(self, factor, structural: bool = False, grids: list = None, **kwarg raise AttributeError self.indexes_vect except AttributeError: - self.indexes_vect = np.empty_like(grids[0], dtype=object) - for tensor_idx, _ in np.ndenumerate(grids[0]): - self.indexes_vect[tensor_idx] = tuple([grid[tensor_idx] - for grid in grids]) - + self.indexes_vect = np.empty_like(func_args[0], dtype=object) + for tensor_idx, _ in np.ndenumerate(func_args[0]): + self.indexes_vect[tensor_idx] = tuple([subarg[tensor_idx] + for subarg in func_args]) value = grid_function(self.indexes_vect) return value -def simple_function_evaluator(factor, structural: bool = False, grids=None, **kwargs): +def simple_function_evaluator(factor, structural: bool = False, grids=None, + torch_mode: bool = False, **kwargs): ''' Example of the evaluator of token values, that can be used for uploading values of stored functions from cache. Cases, when @@ -99,49 +114,70 @@ def simple_function_evaluator(factor, structural: bool = False, grids=None, **kw for param_idx, param_descr in factor.params_description.items(): if param_descr['name'] == 'power': power_param_idx = param_idx - + if grids is not None: - base_val = global_var.tensor_cache.get(factor.cache_label, structural=structural) - # original_grids = factor.grids - # factor_model = train_ann(grids = original_grids, data = base_val) - value = factor.predict_with_ann(grids) value = value**(factor.params[power_param_idx]) + return value else: if factor.params[power_param_idx] == 1: - value = global_var.tensor_cache.get(factor.cache_label, structural=structural) + value = global_var.tensor_cache.get(factor.cache_label, structural = structural, torch_mode = torch_mode) return value else: - value = global_var.tensor_cache.get(factor_params_to_str(factor, set_default_power=True, power_idx=power_param_idx), - structural=structural) + value = global_var.tensor_cache.get(factor_params_to_str(factor, set_default_power = True, + power_idx = power_param_idx), + structural = structural, torch_mode = torch_mode) value = value**(factor.params[power_param_idx]) return value -trig_eval_fun = {'cos': lambda *grids, **kwargs: np.cos(kwargs['freq'] * grids[int(kwargs['dim'])]) ** kwargs['power'], - 'sin': lambda *grids, **kwargs: np.sin(kwargs['freq'] * grids[int(kwargs['dim'])]) ** kwargs['power']} +sign_eval_fun_np = lambda *args, **kwargs: np.sign(args[0]) # If dim argument is needed here: int(kwargs['dim']) +sign_eval_fun_torch = lambda *args, **kwargs: torch.sign(args[0]) + +trig_eval_fun_np = {'cos': lambda *grids, **kwargs: np.cos(kwargs['freq'] * grids[int(kwargs['dim'])]) ** kwargs['power'], + 'sin': lambda *grids, **kwargs: np.sin(kwargs['freq'] * grids[int(kwargs['dim'])]) ** kwargs['power']} + +trig_eval_fun_torch = {'cos': lambda *grids, **kwargs: torch.cos(kwargs['freq'] * grids[int(kwargs['dim'])]) ** kwargs['power'], + 'sin': lambda *grids, **kwargs: torch.sin(kwargs['freq'] * grids[int(kwargs['dim'])]) ** kwargs['power']} -inverse_eval_fun = lambda *grids, **kwargs: np.power(grids[int(kwargs['dim'])], - kwargs['power']) +inverse_eval_fun_np = lambda *grids, **kwargs: np.power(grids[int(kwargs['dim'])], - kwargs['power']) +inverse_eval_fun_torch = lambda *grids, **kwargs: torch.pow(grids[int(kwargs['dim'])], - kwargs['power']) -grid_eval_fun = lambda *grids, **kwargs: np.power(grids[int(kwargs['dim'])], kwargs['power']) +grid_eval_fun_np = lambda *grids, **kwargs: np.power(grids[int(kwargs['dim'])], kwargs['power']) +grid_eval_fun_torch = lambda *grids, **kwargs: torch.pow(grids[int(kwargs['dim'])], kwargs['power']) -def phased_sine(*grids, **kwargs): +def phased_sine_np(*grids, **kwargs): coordwise_elems = [kwargs['freq'][dim] * 2*np.pi*(grids[dim] + kwargs['phase'][dim]) for dim in range(len(grids))] return np.power(np.sin(np.sum(coordwise_elems, axis = 0)), kwargs['power']) -def phased_sine_1d(*grids, **kwargs): +def phased_sine_torch(*grids, **kwargs): + coordwise_elems = [kwargs['freq'][dim] * 2*torch.pi*(grids[dim] + kwargs['phase'][dim]) + for dim in range(len(grids))] + return torch.pow(torch.sin(torch.sum(coordwise_elems, axis = 0)), kwargs['power']) + +def phased_sine_1d_np(*grids, **kwargs): coordwise_elems = kwargs['freq'] * 2*np.pi*(grids[0] + kwargs['phase']/kwargs['freq']) return np.power(np.sin(coordwise_elems), kwargs['power']) -def const_eval_fun(*grids, **kwargs): +def phased_sine_1d_torch(*grids, **kwargs): + coordwise_elems = kwargs['freq'] * 2*torch.pi*(grids[0] + kwargs['phase']/kwargs['freq']) + return torch.pow(torch.sin(coordwise_elems), kwargs['power']) + +def const_eval_fun_np(*grids, **kwargs): return np.full_like(a=grids[0], fill_value=kwargs['value']) -def const_grad_fun(*grids, **kwargs): +def const_eval_fun_torch(*grids, **kwargs): + return torch.full_like(a=grids[0], fill_value=kwargs['value']) + +def const_grad_fun_np(*grids, **kwargs): return np.zeros_like(a=grids[0]) +def const_grad_fun_torch(*grids, **kwargs): + return torch.zeros_like(a=grids[0]) + def get_velocity_common(*grids, **kwargs): a = [kwargs['p' + str(idx*3+1)] * grids[0]**2 + kwargs['p' + str(idx*3 + 2)] * grids[0] + kwargs['p' + str(idx*3 + 3)] for idx in range(5)] alpha = np.exp(a[0] * grids[1] + a[1]); beta = a[2] * grids[1]**2 + a[3] * grids[1] + a[4] @@ -221,14 +257,30 @@ def vhef_grad_15(*grids, **kwargs): vhef_grad_10, vhef_grad_11, vhef_grad_12, vhef_grad_13, vhef_grad_14, vhef_grad_15] -phased_sine_evaluator = CustomEvaluator(phased_sine_1d, eval_fun_params_labels=['power', 'freq', 'phase'], use_factors_grids=True) -trigonometric_evaluator = CustomEvaluator(trig_eval_fun, eval_fun_params_labels=['freq', 'dim', 'power'], use_factors_grids=True) -grid_evaluator = CustomEvaluator(grid_eval_fun, eval_fun_params_labels=['dim', 'power'], use_factors_grids=True) - -inverse_function_evaluator = CustomEvaluator(inverse_eval_fun, eval_fun_params_labels=['dim', 'power'], use_factors_grids=True) - -const_evaluator = CustomEvaluator(const_eval_fun, ['power', 'value']) -const_grad_evaluator = CustomEvaluator(const_grad_fun, ['power', 'value']) +sign_evaluator = CustomEvaluator(evaluation_functions_np=sign_eval_fun_np, + evaluation_functions_torch=sign_eval_fun_torch, + eval_fun_params_labels = ['power', 'dim']) + +phased_sine_evaluator = CustomEvaluator(evaluation_functions_np = phased_sine_1d_np, + evaluation_functions_torch = phased_sine_1d_torch, + eval_fun_params_labels = ['power', 'freq', 'phase']) # , use_factors_grids = True +trigonometric_evaluator = CustomEvaluator(evaluation_functions_np = trig_eval_fun_np, + evaluation_functions_torch = trig_eval_fun_torch, + eval_fun_params_labels=['freq', 'dim', 'power']) # , use_factors_grids = True +grid_evaluator = CustomEvaluator(evaluation_functions_np = grid_eval_fun_np, + evaluation_functions_torch = grid_eval_fun_torch, + eval_fun_params_labels=['dim', 'power']) # , use_factors_grids=True + +inverse_function_evaluator = CustomEvaluator(evaluation_functions_np = inverse_eval_fun_np, + evaluation_functions_torch = inverse_eval_fun_torch, + eval_fun_params_labels=['dim', 'power']) # , use_factors_grids=True + +const_evaluator = CustomEvaluator(evaluation_functions_np = const_eval_fun_np, + evaluation_functions_torch = const_eval_fun_torch, + eval_fun_params_labels = ['power', 'value']) +const_grad_evaluator = CustomEvaluator(evaluation_functions_np = const_grad_fun_np, + evaluation_functions_torch = const_grad_fun_np, + eval_fun_params_labels = ['power', 'value']) velocity_evaluator = CustomEvaluator(velocity_heating_eval_fun, ['p' + str(idx+1) for idx in range(15)]) velocity_grad_evaluators = [CustomEvaluator(component, ['p' + str(idx+1) for idx in range(15)]) diff --git a/epde/globals.py b/epde/globals.py index 825ae9e..4c108c4 100644 --- a/epde/globals.py +++ b/epde/globals.py @@ -8,11 +8,18 @@ from dataclasses import dataclass import warnings +from typing import List, Union + +import numpy as np +import torch +# device = torch.device('cpu') # TODO: make system-agnostic approach from epde.cache.cache import Cache +from epde.cache.ctrl_cache import ControlNNContainer +from epde.supplementary import create_solution_net, AutogradDeriv -def init_caches(set_grids: bool = False): +def init_caches(set_grids: bool = False, device = 'cpu'): """ Initialization global variables for keeping input data, values of grid and useful tensors such as evaluated terms @@ -23,10 +30,10 @@ def init_caches(set_grids: bool = False): None """ global tensor_cache, grid_cache, initial_data_cache - tensor_cache = Cache() - initial_data_cache = Cache() + tensor_cache = Cache(device = device) + initial_data_cache = Cache(device = device) if set_grids: - grid_cache = Cache() + grid_cache = Cache(device = device) else: grid_cache = None @@ -112,3 +119,99 @@ def init_verbose(plot_DE_solutions : bool = False, show_iter_idx : bool = True, warnings.filterwarnings("ignore") verbose = VerboseManager(plot_DE_solutions, show_iter_idx, show_iter_fitness, show_iter_stats, show_ann_loss, show_warnings) + +def reset_control_nn(n_control: int = 1, ann: torch.nn.Sequential = None, + ctrl_args: list = [(0, [None,]),], device: str = 'cpu'): + ''' + Use of bad practices, link control nn to the token family. + ''' + + global control_nn + control_nn = ControlNNContainer(output_num = n_control, args = ctrl_args, + net = ann, device = device) + + +def reset_data_repr_nn(data: List[np.ndarray], grids: List[np.ndarray], train: bool = True, + derivs: List[Union[int, List, Union[np.ndarray]]] = None, + penalised_derivs: List[Union[int, List]] = None, + epochs_max=1e5, predefined_ann: torch.nn.Sequential = None, + batch_frac=0.5, learining_rate=1e-4, device = 'cpu', + use_fourier: bool = True, fourier_params: dict = {'L' : [4,], 'M' : [3,]}): + ''' + Represent the data with ANN, suitable to be used as the initial guess of the candidate equations solutions + during the equation search, employing solver-based fitness function. + + Possible addition: add optimization in Sobolev space, using passed derivatives, incl. higher orders. + ''' + + global solution_guess_nn + + if predefined_ann is None: + model = create_solution_net(equations_num=len(data), domain_dim=len(grids), device = device, + use_fourier=use_fourier, fourier_params=fourier_params) + else: + model = predefined_ann + + if train: + model.to(device) + + grids_tr = torch.from_numpy(np.array([subgrid.reshape(-1) for subgrid in grids])).float().T + data_tr = torch.from_numpy(np.array([data_var.reshape(-1) for data_var in data])).float().T + grids_tr = grids_tr.to(device) + data_tr = data_tr.to(device) + + batch_size = int(data[0].size * batch_frac) + optimizer = torch.optim.Adam(model.parameters(), lr = learining_rate) + deriv_calc = AutogradDeriv() + + t = 0 + min_loss = np.inf + loss_mean = np.inf + print(f'Training NN to represent data for {epochs_max} epochs') + while loss_mean > 1e-6 and t < epochs_max: + + permutation = torch.randperm(grids_tr.size()[0]) + + loss_list = [] + + for i in range(0, grids_tr.size()[0], batch_size): + optimizer.zero_grad() + + indices = permutation[i:i+batch_size] + batch_x, batch_y = grids_tr[indices], data_tr[indices] + + # print(f'batch_y {batch_y.get_device()}, batch_x {batch_x.get_device()},, {next(model.parameters()).is_cuda}') + # print(f'model(batch_x) {model(batch_x)}') + loss = torch.mean(torch.abs(batch_y - model(batch_x))) + if derivs is not None: + for var_idx, deriv_axes, deriv_tensor in derivs: + deriv_autograd = deriv_calc.take_derivative(model, batch_x, axes = deriv_axes, component = var_idx) + batch_derivs = torch.from_numpy(deriv_tensor)[torch.unravel_index(indices, + deriv_tensor.shape)].reshape_as(deriv_autograd).to(device) + + loss_add = 1e2 * torch.mean(torch.abs(batch_derivs - deriv_autograd)) + # print(loss, loss_add) + loss += loss_add + + if penalised_derivs is not None: + for var_idx, deriv_axes in derivs: + deriv_autograd = deriv_calc.take_derivative(model, batch_x, axes = deriv_axes, component = var_idx) + batch_derivs = torch.from_numpy(deriv_tensor)[torch.unravel_index(indices, + deriv_tensor.shape)].reshape_as(deriv_autograd).to(device) + higher_ord_penalty = 1e3 * torch.mean(torch.abs(deriv_autograd)) + + loss += higher_ord_penalty + + loss.backward() + optimizer.step() + loss_list.append(loss.item()) + loss_mean = np.mean(loss_list) + if loss_mean < min_loss: + best_model = model + min_loss = loss_mean + t += 1 + model = best_model + print(f'min loss is {min_loss}, in last epoch: {loss_list}, ') + solution_guess_nn = best_model + else: + solution_guess_nn = model \ No newline at end of file diff --git a/epde/integrate/__init__.py b/epde/integrate/__init__.py new file mode 100644 index 0000000..c72a083 --- /dev/null +++ b/epde/integrate/__init__.py @@ -0,0 +1,4 @@ +from .interface import SystemSolverInterface +from .bop import BOPElement, BoundaryConditions +from .solver_integration import SolverAdapter +from .odeint_integration import OdeintAdapter \ No newline at end of file diff --git a/epde/integrate/bop.py b/epde/integrate/bop.py new file mode 100644 index 0000000..2302f23 --- /dev/null +++ b/epde/integrate/bop.py @@ -0,0 +1,276 @@ +from typing import Union, List, Dict +from types import FunctionType +from functools import singledispatch + +import numpy as np +import torch + +import epde.globals as global_var +from epde.structure.main_structures import SoEq + +VAL_TYPES = Union[FunctionType, int, float, torch.Tensor, np.ndarray] + + +def get_max_deriv_orders(system_sf: List[Union[Dict[str, Dict]]], variables: List[str] = ['u',]) -> dict: + def count_factor_order(factor_code, deriv_ax): + if factor_code is None or isinstance(factor_code, tuple): + return 0 + else: + if isinstance(factor_code, list): + return factor_code.count(deriv_ax) + elif isinstance(factor_code, int): + return 1 if factor_code == deriv_ax else 0 + else: + raise TypeError(f'Incorrect type of the input. Got {type(factor_code), factor_code}, expecting int or list') + + @singledispatch + def get_equation_requirements(equation_sf, variables=['u',]): + raise NotImplementedError( + 'Single-dispatch called in generalized form') + + @get_equation_requirements.register + def _(equation_sf: dict, variables=['u',]) -> dict: # dict = {u: 0}): + dim = global_var.grid_cache.get('0').ndim + if len(variables) == 1: + var_max_orders = np.zeros(dim) + for term in equation_sf.values(): + if isinstance(term['pow'], list): + for deriv_factor in term['term']: + orders = np.array([count_factor_order(deriv_factor, ax) for ax + in np.arange(dim)]) + var_max_orders = np.maximum(var_max_orders, orders) + else: + orders = np.array([count_factor_order(term['term'], ax) for ax + in np.arange(dim)]) + var_max_orders = np.maximum(var_max_orders, orders) + return {variables[0]: var_max_orders} + else: + var_max_orders = {var_key: np.zeros(dim) for var_key in variables} + for term_key, symb_form in equation_sf.items(): + if isinstance(symb_form['var'], list): + assert len(symb_form['term']) == len(symb_form['var']) + for factor_idx, deriv_factor in enumerate(symb_form['term']): + var_orders = np.array([count_factor_order(deriv_factor, ax) for ax + in np.arange(dim)]) + if isinstance(symb_form['var'][factor_idx], int): + var_key = symb_form['var'][factor_idx] #- 1 + else: + var_key = 0 + var_orders = 0 # Such tokens do not increase order of the DE + var_max_orders[variables[var_key]] = np.maximum(var_max_orders[variables[var_key]], + var_orders) + elif isinstance(symb_form['var'], int): + raise NotImplementedError() + assert len(symb_form['term']) == 1 + for factor_idx, factor in enumerate([count_factor_order(symb_form['term'], ax) for ax + in np.arange(dim)]): + var_orders = np.array([count_factor_order(deriv_factor, ax) for ax + in np.arange(dim)]) + var_key = symb_form['var'][factor_idx] + var_max_orders[var_key] = np.maximum(var_max_orders[var_key], var_orders) + return var_max_orders + + @get_equation_requirements.register + def _(equation_sf: list, variables=['u',]): + raise NotImplementedError( + 'TODO: add equation list form processing') + + eq_forms = [] + for equation_form in system_sf: + eq_forms.append(get_equation_requirements(equation_form, variables)) + + max_orders = {var: np.maximum.accumulate([eq_list[var] for eq_list in eq_forms])[-1] + for var in variables} # TODO + return max_orders + +class BOPElement(object): + def __init__(self, axis: int, key: str, coeff: float = 1., term: list = [None], + power: Union[Union[List[int], int]] = 1, var: Union[List[int], int] = 1, + rel_location: float = 0., device = 'cpu'): + self.axis = axis + self.key = key + self.coefficient = coeff + self.term = term + self.power = power + self.variables = var + self.location = rel_location + self.grid = None + + self.status = {'boundary_location_set': False, + 'boundary_values_set': False} + + self._device = device + + def set_grid(self, grid: torch.Tensor): + self.grid = grid + self.status['boundary_location_set'] = True + + @property + def operator_form(self): + form = { + 'coeff': self.coefficient, + self.key: self.term, + 'pow': self.power, + 'var': self.variables + } + return self.key, form + + @property + def values(self): + if isinstance(self._values, FunctionType): + assert self.grid_set, 'Tring to evaluate variable coefficent without a proper grid.' + res = self._values(self.grids) + assert res.shape == self.grids[0].shape + return torch.from_numpy(res).to(self._device) + else: + return self._values + + @values.setter + def values(self, vals): + if isinstance(vals, (FunctionType, int, float, torch.Tensor)): + self._values = vals + self.vals_set = True + elif isinstance(vals, np.ndarray): + self._values = torch.from_numpy(vals).to(self._device) + self.vals_set = True + else: + raise TypeError( + f'Incorrect type of coefficients. Must be a type from list {VAL_TYPES}.') + + def __call__(self, values: VAL_TYPES = None) -> dict: + if not self.vals_set and values is not None: + self.values = values + self.status['boundary_values_set'] = True + elif not self.vals_set and values is None: + raise ValueError('No location passed into the BOP.') + if self.grid is not None: + boundary = self.grid + elif self.grid is None and self.location is not None: + _, all_grids = global_var.grid_cache.get_all(mode = 'torch') + + abs_loc = self.location * all_grids[0].shape[self.axis] + if all_grids[0].ndim > 1: + boundary = np.array(all_grids[:self.axis] + all_grids[self.axis+1:]) + if isinstance(values, FunctionType): + raise NotImplementedError # TODO: evaluation of BCs passed as functions or lambdas + boundary = torch.from_numpy(np.expand_dims(boundary, axis=self.axis)).to(self._device).float() + + boundary = torch.cartesian_prod(boundary, + torch.from_numpy(np.array([abs_loc,], dtype=np.float64)).to(self._device)).float() + boundary = torch.moveaxis(boundary, source=0, destination=self.axis).resize() + else: + boundary = torch.from_numpy(np.array([[abs_loc,],])).to(self._device).float() # TODO: work from here + + elif boundary is None and self.location is None: + raise ValueError('No location passed into the BOP.') + + form = self.operator_form + boundary_operator = {form[0]: form[1]} + + boundary_value = self.values + + return {'bnd_loc' : boundary.to(self._device), 'bnd_op' : boundary_operator, + 'bnd_val' : boundary_value.to(self._device), + 'variables' : self.variables, 'type' : 'operator'} + +class PregenBOperator(object): + def __init__(self, system: SoEq, system_of_equation_solver_form: list): #, device = 'cpu' + self.system = system + self.equation_sf = [eq for eq in system_of_equation_solver_form] + self.variables = list(system.vars_to_describe) + + def demonstrate_required_ords(self): + linked_ords = list(zip([eq.main_var_to_explain for eq in self.system], + self.max_deriv_orders)) + + @property + def conditions(self): + return self._bconds + + @conditions.setter + def conditions(self, conds: List[BOPElement]): + self._bconds = [] + if len(conds) != int(sum([value.sum() for value in self.max_deriv_orders.values()])): + raise ValueError( + 'Number of passed boundry conditions does not match requirements of the system.') + for condition in conds: + if isinstance(condition, BOPElement): + self._bconds.append(condition()) + else: + print('condition is ', type(condition), condition) + raise NotImplementedError( + 'In-place initialization of boundary operator has not been implemented yet.') + + @property + def max_deriv_orders(self): + return get_max_deriv_orders(self.equation_sf, self.variables) + + def generate_default_bc(self, vals: Union[np.ndarray, dict] = None, grids: List[np.ndarray] = None, + allow_high_ords: bool = False, required_bc_ord: List[int] = None): + # Implement allow_high_ords - selection of derivatives from + if required_bc_ord is None: + required_bc_ord = self.max_deriv_orders + assert set(self.variables) == set(required_bc_ord.keys()), 'Some conditions miss required orders.' + + grid_cache = global_var.initial_data_cache + tensor_cache = global_var.initial_data_cache + + if vals is None: + val_keys = {key: (key, (1.0,)) for key in self.variables} + + if grids is None: + _, grids = grid_cache.get_all(mode = 'torch') + + device = global_var.grid_cache._device + # assert self._device + device_on_cpu = (device == 'cpu') + relative_bc_location = {0: (), 1: (0,), 2: (0, 1), + 3: (0., 0.5, 1.), 4: (0., 1/3., 2/3., 1.)} + + bconds = [] + tensor_shape = grids[0].shape + + def get_boundary_ind(tensor_shape, axis, rel_loc): + return tuple(np.meshgrid(*[np.arange(shape) if dim_idx != axis else min(int(rel_loc * shape), shape-1) + for dim_idx, shape in enumerate(tensor_shape)], indexing='ij')) + + for var_idx, variable in enumerate(self.variables): + for ax_idx, ax_ord in enumerate(required_bc_ord[variable]): + for loc in relative_bc_location[ax_ord]: + indexes = get_boundary_ind(tensor_shape, ax_idx, rel_loc=loc) + + if device_on_cpu: + coords = np.array([grids[idx][indexes].detach().numpy() for idx in np.arange(len(tensor_shape))]).T + else: + coords = np.array([grids[idx][indexes].detach().cpu().numpy() + for idx in np.arange(len(tensor_shape))]).T + if coords.ndim > 2: + coords = coords.squeeze() + + if vals is None: + bc_values = tensor_cache.get(val_keys[variable])[indexes] + else: + bc_values = vals[indexes] + + bc_values = np.expand_dims(bc_values, axis=0).T + coords = torch.from_numpy(coords).to(device).float() + + bc_values = torch.from_numpy(bc_values).to(device).float() # TODO: set devices for all torch objs + operator = BOPElement(axis=ax_idx, key=variable, coeff=1, term=[None], + power=1, var=var_idx, rel_location=loc, device=device) + operator.set_grid(grid=coords) + operator.values = bc_values + bconds.append(operator) + print('Types of conds:', [type(cond) for cond in bconds]) + self.conditions = bconds + + +class BoundaryConditions(object): + def __init__(self, grids=None, partial_operators: dict = []): + self.grids_set = (grids is not None) + if grids is not None: + self.grids = grids + self.operators = partial_operators + + def form_operator(self): + return [list(bcond()) for bcond in self.operators.values()] diff --git a/epde/integrate/interface.py b/epde/integrate/interface.py new file mode 100644 index 0000000..373417f --- /dev/null +++ b/epde/integrate/interface.py @@ -0,0 +1,177 @@ +from typing import List +from functools import singledispatchmethod + +import numpy as np +import torch + +from epde.evaluators import simple_function_evaluator +from epde.structure.main_structures import SoEq +import epde.globals as global_var + +def make_eval_func(eval_func, eval_func_kwargs): + return lambda *args: eval_func(*args, **eval_func_kwargs) + +class SystemSolverInterface(object): + def __init__(self, system_to_adapt: SoEq, coeff_tol: float = 1.e-9, device = 'cpu'): + self.variables = list(system_to_adapt.vars_to_describe) + self.adaptee = system_to_adapt + self.grids = None + self.coeff_tol = coeff_tol + + self._device = device + + @staticmethod + def _term_solver_form(term, grids, default_domain, variables: List[str] = ['u',], + device = 'cpu') -> dict: + deriv_orders = [] + deriv_powers = [] + deriv_vars = [] + derivs_detected = False + + try: + coeff_tensor = torch.ones_like(grids[0]).to(device) + + except KeyError: + raise NotImplementedError('No cache implemented') + for factor in term.structure: + if factor.is_deriv: + for param_idx, param_descr in factor.params_description.items(): + if param_descr['name'] == 'power': + power_param_idx = param_idx + deriv_orders.append(factor.deriv_code) + if factor.evaluator._evaluator != simple_function_evaluator: + if factor.evaluator._evaluator._single_function_token: + eval_func = factor.evaluator._evaluator._evaluation_functions_torch + else: + eval_func = factor.evaluator._evaluator._evaluation_functions_torch[factor.label] + if not isinstance(eval_func, torch.nn.Sequential): + # print(f'for term {factor.name} eval func {eval_func} is non') + eval_func_kwargs = dict() + for key in factor.evaluator._evaluator.eval_fun_params_labels: + for param_idx, param_descr in factor.params_description.items(): + if param_descr['name'] == key: + eval_func_kwargs[key] = factor.params[param_idx] + # print(f'eval_func_kwargs for {factor.name} are {eval_func_kwargs}') + lbd_eval_func = make_eval_func(eval_func, eval_func_kwargs) + deriv_powers.append(lbd_eval_func) + else: + deriv_powers.append(factor.params[power_param_idx]) + try: + if isinstance(factor.variable, str): + cur_deriv_var = variables.index(factor.variable) + elif isinstance(factor.variable, int) or (isinstance(factor.variable, (list, tuple)) and + isinstance(factor.variable[0], int)): + cur_deriv_var = factor.variable + elif isinstance(factor.variable, (list, tuple)) and isinstance(factor.variable[0], str): + cur_deriv_var = [variables.index(var_elem) for var_elem in factor.variable] + except ValueError: + raise ValueError( + f'Variable family of passed derivative {variables}, other than {factor.variable}') + derivs_detected = True + + deriv_vars.append(cur_deriv_var) + else: + grid_arg = None if default_domain else grids + coeff_tensor = coeff_tensor * factor.evaluate(grids=grid_arg, torch_mode = True).to(device) + if not derivs_detected: + deriv_powers = [0,] + deriv_orders = [[None,],] + if len(deriv_powers) == 1: + deriv_powers = [deriv_powers[0],] + deriv_orders = [deriv_orders[0],] + + if deriv_vars == []: + if isinstance(deriv_powers, int) and deriv_powers != 0: + raise Exception('Something went wrong with parsing an equation for solver') + # elif isinstance(deriv_powers, list) and all([spec_power != 0 for spec_power in deriv_powers]): + # raise Exception('Something went wrong with parsing an equation for solver') + else: + deriv_vars = [0,] + + if torch.all(torch.isclose(coeff_tensor.reshape(-1)[0], coeff_tensor.reshape(-1))): + coeff_tensor = coeff_tensor.reshape(-1)[0].item() + + res = {'coeff': coeff_tensor, + 'term': deriv_orders, + 'pow': deriv_powers, + 'var': deriv_vars} + + # print(f'Translated {term.name} to "term" {deriv_orders}, "pow" {deriv_powers}, "var" {deriv_vars} ') + return res + + @singledispatchmethod + def set_boundary_operator(self, operator_info): + raise NotImplementedError() + + def _equation_solver_form(self, equation, variables, grids=None, mode = 'NN') -> dict: + assert mode in ['NN', 'autograd', 'mat'], 'Incorrect mode passed. Form available only \ + for "NN", "autograd "and "mat" methods' + + def adjust_shape(tensor, mode = 'NN'): + if mode in ['NN', 'autograd']: + return torch.flatten(tensor).unsqueeze(1).type(torch.FloatTensor) + elif mode == 'mat': + return tensor.type(torch.FloatTensor) + + _solver_form = {} + if grids is None: + grids = self.grids + default_domain = True + else: + if isinstance(grids[0], np.ndarray): + grids = [torch.from_numpy(subgrid).to(self._device) for subgrid in grids] + default_domain = False + + for term_idx, term in enumerate(equation.structure): + if term_idx != equation.target_idx: + if term_idx < equation.target_idx: + weight = equation.weights_final[term_idx] + else: + weight = equation.weights_final[term_idx-1] + if not np.isclose(weight, 0, rtol = self.coeff_tol): + _solver_form[term.name] = self._term_solver_form(term, grids, default_domain, variables) + _solver_form[term.name]['coeff'] = _solver_form[term.name]['coeff'] * weight + if isinstance(_solver_form[term.name]['coeff'], torch.Tensor): + _solver_form[term.name]['coeff'] = adjust_shape(_solver_form[term.name]['coeff'], mode = mode) + + free_coeff_weight = equation.weights_final[-1] #torch.full_like(input=grids[0], fill_value=equation.weights_final[-1]).to(self._device) + + # free_coeff_weight = adjust_shape(free_coeff_weight, mode = mode) + free_coeff_term = {'coeff': free_coeff_weight, + 'term': [None], + 'pow': 0, + 'var': [0,]} + _solver_form['C'] = free_coeff_term + + target_weight = -1 # torch.full_like(input = grids[0], fill_value = -1.).to(self._device) + + target_form = self._term_solver_form(equation.structure[equation.target_idx], grids, default_domain, variables) + target_form['coeff'] = target_form['coeff'] * target_weight + # target_form['coeff'] = adjust_shape(target_form['coeff'], mode = mode) + # print(f'target_form shape is {target_form["coeff"].shape}') + + _solver_form[equation.structure[equation.target_idx].name] = target_form + + return _solver_form + + def use_grids(self, grids=None): # + if grids is None and self.grids is None: + _, self.grids = global_var.grid_cache.get_all(mode = 'torch') + elif grids is not None: + if len(grids) != len(global_var.grid_cache.get_all(mode = 'torch')[1]): + raise ValueError( + 'Number of passed grids does not match the problem') + if isinstance(grids[0], np.ndarray): + grids = [torch.from_numpy(subgrid).to(self._device) for subgrid in grids] + self.grids = grids + + + def form(self, grids=None, mode = 'NN'): + self.use_grids(grids=grids) + equation_forms = [] + + for equation in self.adaptee.vals: + equation_forms.append((equation.main_var_to_explain, + self._equation_solver_form(equation, variables=self.variables, + grids=grids, mode = mode))) + return equation_forms diff --git a/epde/integrate/odeint_integration.py b/epde/integrate/odeint_integration.py new file mode 100644 index 0000000..05528a1 --- /dev/null +++ b/epde/integrate/odeint_integration.py @@ -0,0 +1,291 @@ +from typing import Union, List, Dict, Tuple, Callable +import copy +from functools import reduce +from warnings import warn + +import torch +import numpy as np +from scipy.integrate import ode, solve_ivp + +from epde.solver.data import Domain, Conditions +from epde.structure.main_structures import SoEq +from epde.integrate.bop import PregenBOperator, BOPElement, get_max_deriv_orders +from epde.integrate.interface import SystemSolverInterface + +def get_terms_der_order(equation: Dict, variable_idx: int) -> np.ndarray: + ''' + Get the highest orders of the ``variable_idx``-th variable derivative in the equation terms. + ''' + term_max_ord = np.zeros(len(equation)) + for term_idx, term_dict in enumerate(equation.values()): + + if isinstance(term_dict['var'], list) and len(term_dict['var']) > 1: + max_ord = 0 + for arg_idx, deriv_ord in enumerate(term_dict['term']): + if isinstance(term_dict['pow'][arg_idx], (int, float)) and term_dict['var'][arg_idx] == variable_idx: + max_ord = max(max_ord, len([var for var in deriv_ord if var is not None])) + term_max_ord[term_idx] = max_ord + elif isinstance(term_dict['var'], int): + if isinstance(term_dict['pow'], (int, float)) and term_dict['var'] == variable_idx: + term_max_ord[term_idx] = max(0, len([var for var in term_dict['term'] if var is not None])) + elif isinstance(term_dict['var'], list) and len(term_dict['var']) == 1: + if isinstance(term_dict['var'][0], (int, float)): + term_var = term_dict['var'][0] + elif isinstance(term_dict['var'][0], (list, tuple)): + term_var = term_dict['var'][0][0] + if (isinstance(term_dict['pow'], (int, float)) or (isinstance(term_dict['pow'], (list, tuple)) + and len(term_dict['pow']) == 1)) and term_var == variable_idx: + term_max_ord[term_idx] = max(0, len([var for var in term_dict['term'] if var is not None and var != [None,]])) + pass + + return term_max_ord + +def get_higher_order_coeff(equation: Dict, orders: np.ndarray, var: int) -> Tuple[List]: + def transform_term(term: Dict, deriv_key: list, var: int) -> Dict: + term_filtered = copy.deepcopy(term) + if (isinstance(term['var'], int) and term['var'] == var) or (isinstance(term['var'], list) + and len(term['var']) == 1 and term['var'][0] == var): + term_filtered['term'] = [None,] + term_filtered['pow'] = 0 + else: + term_idx = [der_var for idx, der_var in enumerate(term_filtered['term']) + if der_var == deriv_key and term_filtered['pow'][idx] == var][0] + term_filtered['term'][term_idx] = [None,] + term_filtered['pow'][term_idx] = 0 + return term_filtered + + denom_terms = [] + numer_terms = [] + for term_idx, term in enumerate(equation.values()): + if orders[term_idx] == np.max(orders): + denom_terms.append(transform_term(term, deriv_key=[0,]*int(np.max(orders)), var=var)) + else: + numer_terms.append(term) + return [denom_terms, numer_terms] + +def get_eq_order(equation, variables: List[str]): + eq_var = 0; eq_orders = np.zeros(len(equation)) + for var_idx in range(len(variables)): + orders = get_terms_der_order(equation=equation, variable_idx=var_idx) + if np.max(orders) > np.max(eq_orders): + eq_var = var_idx; eq_orders = orders + return eq_var, eq_orders + +def replace_operator(term: Dict, variables: List): + ''' + + Variables have to be in form of [(0, [None]), (0, [0,]), (0, [0, 0]), (0, [0, 0, 0]), (1, [None,]), ... ] + where the list elements are factors, taken as derivatives: (variable, differentiations), and the index in list + matches the index of dynamics operator output. + + ''' + term_ = copy.deepcopy(term) + if isinstance(term_['var'], list) and len(term_['var']) > 1: + for arg_idx, deriv_ord in enumerate(term_['term']): + if isinstance(term_['var'][arg_idx], (tuple, list)): + continue + term_['var'][arg_idx] = variables.index((term_['var'][arg_idx], deriv_ord)) + term_['term'][arg_idx] = [None,] + elif isinstance(term['var'], int) or (isinstance(term_['var'], list) and len(term_['var']) == 1): + if isinstance(term['var'], int): + term_var = term_['var'] + else: + term_var = term_['var'][0] + if isinstance(term['pow'], (int, float)): + term_['var'] = variables.index((term_var, term_['term'])) + term_['term'] = [None,] + return term_ + +class ImplicitEquation(object): + def __init__(self, system: List, grid: np.ndarray, variables: List[str]): + self.grid_dict = grid + + # print(f'Solved system is {system}') + + self._dynamics_operators = [] + self._var_order = [] + self._vars_with_eqs = {} + + for var, order in [get_eq_order(equation, variables) for equation in system]: + self._var_order.extend([(var, [None,])] + [(var, [0,]*(idx+1)) for idx in range(int(np.max(order))-1)]) + if len(self._vars_with_eqs) == 0: + self._vars_with_eqs[int(np.max(order)) - 1] = (var, order) + else: + self._vars_with_eqs[list(self._vars_with_eqs.keys())[-1] + int(np.max(order))] = (var, order) + + for var_idx, var in enumerate(self._var_order): + if var_idx in self._vars_with_eqs.keys(): + operator = get_higher_order_coeff(equation = system[self._vars_with_eqs[var_idx][0]], + orders = self._vars_with_eqs[var_idx][1], + var = self._vars_with_eqs[var_idx][0]) + operator[0] = [replace_operator(denom_term, self._var_order) for denom_term in operator[0]] + operator[1] = [replace_operator(numer_term, self._var_order) for numer_term in operator[1]] + else: + operator = [None, self.create_first_ord_eq(var_idx + 1)] + self._dynamics_operators.append(operator) + + def parse_cond(self, conditions: List[Union[BOPElement, dict]]): + cond_val = np.full(shape = len(self._dynamics_operators), fill_value=np.inf) + for cond in conditions: + if isinstance(cond, BOPElement): + assert isinstance(cond.variables, int), 'Boundary operator has to contain only a single variable.' + try: + var = self._var_order.index((cond.variables, cond.axis)) + except ValueError: + print(f'Missing {cond.variables, cond.axis} from the list of variables {self._var_order}') + raise RuntimeError() + cond_val[var] = cond.values + else: + op_form = list(cond['bnd_op'].values())[0] + term_key = [op_key for op_key in list(op_form.keys()) if op_key not in ['coeff', 'pow', 'var']][0] + try: + # print(f'term key {term_key}') + var = self._var_order.index((op_form['var'], op_form[term_key])) + except ValueError: + print(f'Missing {(op_form["var"], op_form[term_key])} from the list of variables {self._var_order}') + raise RuntimeError() + cond_val[var] = cond['bnd_val'] + + assert np.sum(np.inf == cond_val) == 0, 'Not enough initial conditions were passed.' + return cond_val + + def __call__(self, t, y): + values = np.empty(len(self._dynamics_operators)) + for idx, operator in enumerate(self._dynamics_operators): + if operator[0] is None: + denom = 1 + else: + denom = [self.term_callable(term, t, y) for term in operator[0]] + if np.isclose(denom, 0): + raise ZeroDivisionError('Denominator in the dynamics operator is close to zero.') + numerator = [self.term_callable(term, t, y) for term in operator[1]] + values[idx] = -1*np.sum(numerator)/np.sum(denom) + return values + + @property + def grid_dict(self): + return self._grid_rounded + + @grid_dict.setter + def grid_dict(self, grid_points): + self._grid_step = grid_points[1] - grid_points[0] + digits = np.floor(np.log10(self._grid_step/2.)-1) + self._grid_rounded = {np.round(grid_val, -int(digits)): idx + for idx, grid_val in np.ndenumerate(grid_points)} + + def create_first_ord_eq(self, var: int) -> List[Tuple]: + ''' + Example of order: np.array([3., 0., 0.]) for third ord eq. + ''' + return [{'coeff' : -1., + 'term' : [None,], + 'pow' : 1, + 'var' : var},] + + def merge_coeff(self, coeff: np.ndarray, t: float): + try: + return self.grid_dict[t] + except KeyError: + for grid_loc, grid_idx in self.grid_dict.items(): + if grid_loc < t and grid_loc + self._grid_step > t: + # print('Search in ', grid_loc, grid_loc + self._grid_step) + left_loc, right_loc = grid_loc, grid_loc + self._grid_step + left_idx, right_idx = grid_idx[0], grid_idx[0] + 1 + break + val = coeff[left_idx] + (t - left_loc) / (right_loc - left_loc) * (coeff[right_idx] - coeff[left_idx]) + return val + + def term_callable(self, term: Dict, t, y): + def call_ann_token(token_nn: torch.nn.Sequential, arguments: list, + t: float, y: np.ndarray): + return token_nn[torch.from_numpy(y[tuple(arguments)]).reshape((-1, 1))].detach().numpy() # Hereby, the ANN does not explicitly depend on time + + if isinstance(term['coeff'], Callable): + k = term['coeff'](t) + elif isinstance(term['coeff'], torch.nn.Sequential): + k = term['coeff'](torch.from_numpy(t).reshape((1, 1).float())) + elif isinstance(term['coeff'], np.ndarray): + k = self.merge_coeff(term['coeff'], t) + else: + k = term['coeff'] + + if not isinstance(term['var'], (list, tuple)) or len(term['pow']) == 1: + term_var = [term['var'],] + else: + term_var = term['var'] + if isinstance(term['var'], (list, tuple)): + term_pow = term['pow'] + else: + term_pow = [term['pow'],] + + values = [] + for var_idx, var in enumerate(term_var): + if isinstance(var, int): + if isinstance(term_pow[var_idx], (int, float)): + val = y[var]**term_pow[var_idx] + elif isinstance(term_pow[var_idx], torch.nn.Sequential): + val = call_ann_token(term_pow[var_idx], var, t, y) + else: + val = term_pow[var_idx](y[var]) + elif isinstance(var, (tuple, list)): + if isinstance(term_pow[var_idx], torch.nn.Sequential): + val = call_ann_token(term_pow[var_idx], var, t, y) + elif isinstance(term_pow[var_idx], (int, float)): + assert len(var) == 1, 'Incorrect number of arguments' + val = y[list(var)]**term_pow[var_idx] + if isinstance(val, np.ndarray): val = val[0] + + # print(values[-1], type(values[-1])) + else: + val = term_pow[var_idx](*y[list(var)]) + if isinstance(val, torch.Tensor): + val = val.item() + + values.append(val) + pass + + return reduce(lambda x, z: x*z, values, k) + + +class OdeintAdapter(object): + def __init__(self, method: str = 'Radau'): + self._solve_method = method + pass # TODO: implement hyperparameters setup, according to the problem specifics + + def solve_epde_system(self, system: Union[SoEq, dict], grids: list=None, boundary_conditions=None, + mode='NN', data=None, *args, **kwargs): + if isinstance(system, SoEq): + system_interface = SystemSolverInterface(system_to_adapt=system) + system_solver_forms = system_interface.form(grids = grids, mode = mode) + elif isinstance(system, List[Dict]): + system_solver_forms = system + + if boundary_conditions is None: + op_gen = PregenBOperator(system=system, + system_of_equation_solver_form=[sf_labeled[1] for sf_labeled + in system_solver_forms]) + op_gen.generate_default_bc(vals = data, grids = grids) + boundary_conditions = op_gen.conditions + + return self.solve(equations = [sf_labeled[1] for sf_labeled in system_solver_forms], domain = grids[0], + boundary_conditions = boundary_conditions, vars = system.vars_to_describe) + # Add condition parser and control function args parser + + + + def solve(self, equations, domain: Union[Domain, np.ndarray], + boundary_conditions: List[BOPElement] = None, vars: List[str] = ['x',], *args, **kwargs): + if not isinstance(equations, list): + raise RuntimeError('Incorrect type of equations passed into odeint solver.') + self._implicit_equation = ImplicitEquation(equations, domain, vars) + if isinstance(domain, Domain): + grid = domain.build().detach().numpy().reshape(-1) + else: + grid = domain.detach().numpy().reshape(-1) + + initial_cond = self._implicit_equation.parse_cond(boundary_conditions) + solution = solve_ivp(fun = self._implicit_equation, t_span = (grid[0], grid[-1]), y0=initial_cond, + t_eval = grid, method = self._solve_method) + if not solution.success: + warn(f'Numerical solution of ODEs has did not converge. The error message is {solution.message}') + return 0, solution.y.T diff --git a/epde/integrate/solver_integration.py b/epde/integrate/solver_integration.py new file mode 100644 index 0000000..d39dd29 --- /dev/null +++ b/epde/integrate/solver_integration.py @@ -0,0 +1,412 @@ +''' + +''' + +# !/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import numpy as np +import torch + +from typing import Callable, Union, Dict, List +from functools import singledispatchmethod, singledispatch + +from torch.nn import Sequential + +from epde.structure.main_structures import Equation, SoEq +import epde.globals as global_var +from epde.evaluators import CustomEvaluator, simple_function_evaluator + +from epde.integrate.interface import SystemSolverInterface +from epde.integrate.bop import BOPElement, PregenBOperator + +from epde.supplementary import create_solution_net +from epde.solver.data import Domain, Conditions +from epde.solver.data import Equation as SolverEquation +from epde.solver.model import Model +from epde.solver.callbacks import cache, early_stopping, plot, adaptive_lambda +from epde.solver.optimizers.optimizer import Optimizer +from epde.solver.device import solver_device, check_device, device_type +from epde.solver.models import mat_model + +''' +Specification of baseline equation solver parameters. Can be separated into its own json file for +better handling, i.e. manual setup. +''' + +BASE_COMPILING_PARAMS = { + 'mode' : 'NN', + 'lambda_operator' : 1e1, + 'lambda_bound' : 1e4, + 'normalized_loss_stop' : False, + 'h' : 0.001, + 'inner_order' : '1', + 'boundary_order' : '2', + 'weak_form' : 'None', + 'tol' : 0.005, + 'derivative_points' : 3 + } + +ADAM_OPTIMIZER_PARAMS = { + 'lr' : 1e-5, + 'eps' : 1e-6 + } + +SGD_OPTIMIZER_PARAMS = { + } + +LBFGS_OPTIMIZER_PARAMS = { + 'lr' : 1e-2, + 'max_iter' : 10 + } + +PSO_OPTIMIZER_PARAMS = { + 'pop_size' : 30, + 'b' : 0.9, + 'c1' : 8e-2, + 'c2' : 5e-1, + 'lr' : 1e-3, + 'betas' : (0.99, 0.999), + 'c_decrease' : False, + 'variance' : 1, + 'epsilon' : 1e-8, + 'n_iter' : 2000 + } + +BASE_OPTIMIZER_PARAMS = { + 'optimizer' : 'Adam', # Alternatively, switch to PSO, if it proves to be effective. + 'gamma' : 'None', + 'decay_every' : 'None' + } + +OPTIMIZERS_MATCHED = { + 'Adam' : ADAM_OPTIMIZER_PARAMS, + 'LBFGS' : LBFGS_OPTIMIZER_PARAMS, + 'PSO' : PSO_OPTIMIZER_PARAMS, + 'SGD' : SGD_OPTIMIZER_PARAMS + } + +BASE_CACHE_PARAMS = { + 'cache_verbose' : True, + 'cache_model' : 'None', + 'model_randomize_parameter' : 0, + 'clear_cache' : False + } + +BASE_EARLY_STOPPING_PARAMS = { + 'eps' : 1e-7, + 'loss_window' : 100, + 'no_improvement_patience' : 1000, + 'patience' : 7, + 'abs_loss' : 1e-5, + 'normalized_loss' : False, + 'randomize_parameter' : 1e-5, + 'info_string_every' : 'None', + 'verbose' : False + } + +try: + plot_saving_directory = os.path.realpath(__file__) +except NameError: + plot_saving_directory = 'None' + +BASE_PLOTTER_PARAMS = { + 'save_every' : 1000, + 'print_every' : 500, + 'title' : 'None', + 'img_dir' : plot_saving_directory + } + + +BASE_TRAINING_PARAMS = { + 'epochs' : 3e4, # 1e5 + 'info_string_every' : 'None', #1e4, + 'mixed_precision' : False, + 'save_model' : False, + 'model_name' : 'None' + } + +def solver_formed_grid(training_grid=None, device = 'cpu'): + if training_grid is None: + keys, training_grid = global_var.grid_cache.get_all(mode = 'torch') + else: + keys, _ = global_var.grid_cache.get_all(mode = 'torch') + + assert len(keys) == training_grid[0].ndim, 'Mismatching dimensionalities' + + training_grid = np.array(training_grid).reshape((len(training_grid), -1)) + return torch.from_numpy(training_grid).T.to(device).float() + + +class SolverAdapter(object): + def __init__(self, net=None, use_cache: bool = True, device: str = 'cpu'): + self._device = device + self.set_net(net) + + self._compiling_params = dict() + self.set_compiling_params(**BASE_COMPILING_PARAMS) + + self._optimizer_params = dict() + self.set_optimizer_params(**BASE_OPTIMIZER_PARAMS) + + self._cache_params = dict() + self.set_cache_params(**BASE_CACHE_PARAMS) + + self._early_stopping_params = dict() + self.set_early_stopping_params(**BASE_EARLY_STOPPING_PARAMS) + + self._ploter_params = dict() + self.set_plotting_params(**BASE_PLOTTER_PARAMS) + + self._training_params = dict() + self.set_training_params(**BASE_TRAINING_PARAMS) + + self.use_cache = use_cache + + # def set_net(self, net, get_net_kwargs: dict): + # self.net = net if net is None else self.get_net(**get_net_kwargs) + + @property + def mode(self): + return self._compiling_params['mode'] + + def set_net(self, net: torch.nn.Sequential): + # if self.net is not None and + self.net = net + + @staticmethod + def get_net(equations, mode: str, domain: Domain, use_fourier = True, + fft_params: dict = {'L' : [4,], 'M' : [3,]}, device: str = 'cpu'): + if mode == 'mat': + return mat_model(domain, equations) + elif mode in ['autograd', 'NN']: + return create_solution_net(equations_num=equations.num, domain_dim=domain.dim, + use_fourier=use_fourier, fft_params=fft_params, device=device) + + + def set_compiling_params(self, mode: str = None, lambda_operator: float = None, + lambda_bound : float = None, normalized_loss_stop: bool = None, + h: float = None, inner_order: str = None, boundary_order: str = None, + weak_form: List[Callable] = None, tol: float = None, derivative_points: int = None): + compiling_params = {'mode' : mode, 'lambda_operator' : lambda_operator, 'lambda_bound' : lambda_bound, + 'normalized_loss_stop' : normalized_loss_stop, 'h' : h, 'inner_order' : inner_order, + 'boundary_order' : boundary_order, 'weak_form' : weak_form, 'tol' : tol, + 'derivative_points' : derivative_points} + + for param_key, param_vals in compiling_params.items(): + if param_vals is not None: + try: + if param_vals == 'None': + self._compiling_params[param_key] = None + else: + self._compiling_params[param_key] = param_vals + except KeyError: + print(f'Parameter {param_key} can not be passed into the solver.') + + def set_optimizer_params(self, optimizer: str = None, params: Dict[str, float] = None, + gamma: float = None, decay_every: int = None): + optim_params = {'optimizer' : optimizer, 'params' : params, 'gamma' : gamma, + 'decay_every' : decay_every} + + for param_key, param_vals in optim_params.items(): + if param_vals is not None: + try: + if param_vals == 'None': + self._optimizer_params[param_key] = None + else: + self._optimizer_params[param_key] = param_vals + if param_key == 'optimizer': + if param_vals not in ['Adam', 'SGD', 'PSO', 'LBFGS']: + raise ValueError(f'Unimplemented optimizer has been selected. Please, use {OPTIMIZERS_MATCHED.keys()}') + self._optimizer_params['params'] = OPTIMIZERS_MATCHED[param_vals] + except KeyError: + print(f'Parameter {param_key} can not be passed into the solver.') + + def set_cache_params(self, cache_verbose: bool = None, cache_model: Sequential = None, + model_randomize_parameter: Union[int, float] = None, clear_cache: bool = None): # use_cache: bool = None, + + cache_params = { 'cache_verbose' : cache_verbose, 'cache_model' : cache_model, # 'use_cache' : use_cache, + 'model_randomize_parameter' : model_randomize_parameter, 'clear_cache' : clear_cache} + + for param_key, param_vals in cache_params.items(): + if param_vals is not None: + try: + if param_vals == 'None': + self._cache_params[param_key] = None + else: + self._cache_params[param_key] = param_vals + except KeyError: + print(f'Parameter {param_key} can not be passed into the solver.') + + def set_early_stopping_params(self, eps: float = None, loss_window: int = None, no_improvement_patience: int = None, + patience: int = None, abs_loss: float = None, normalized_loss: bool = None, + randomize_parameter: float = None, info_string_every: int = None, verbose: bool = None): + early_stopping_params = {'eps' : eps, 'loss_window' : loss_window, 'no_improvement_patience' : no_improvement_patience, + 'patience' : patience, 'abs_loss' : abs_loss, 'normalized_loss' : normalized_loss, + 'randomize_parameter' : randomize_parameter, 'info_string_every' : info_string_every, + 'verbose' : verbose} + + for param_key, param_vals in early_stopping_params.items(): + if param_vals is not None: + try: + if param_vals == 'None': + self._early_stopping_params[param_key] = None + else: + self._early_stopping_params[param_key] = param_vals + except KeyError: + print(f'Parameter {param_key} can not be passed into the solver.') + + def set_plotting_params(self, save_every: int = None, print_every: int = None, title: str = None, + img_dir: str = None): + plotting_params = {'save_every' : save_every, 'print_every' : print_every, + 'print_every' : print_every, 'img_dir' : img_dir} + for param_key, param_vals in plotting_params.items(): + if param_vals is not None: + try: + if param_vals == 'None': + self._ploter_params[param_key] = None + else: + self._ploter_params[param_key] = param_vals + except KeyError: + print(f'Parameter {param_key} can not be passed into the solver.') + + def set_training_params(self, epochs: int = None, info_string_every: int = None, mixed_precision: bool = None, + save_model: bool = None, model_name: str = None): + training_params = {'epochs' : epochs, 'info_string_every' : info_string_every, + 'mixed_precision' : mixed_precision, 'save_model' : save_model, 'model_name' : model_name} + + for param_key, param_vals in training_params.items(): + if param_vals is not None: + try: + if param_vals == 'None': + self._training_params[param_key] = None + else: + self._training_params[param_key] = param_vals + except KeyError: + print(f'Parameter {param_key} can not be passed into the solver.') + + def change_parameter(self, parameter: str, value, param_dict_key: str = None): + setters = {'compiling_params' : (BASE_COMPILING_PARAMS, self.set_compiling_params), + 'optimizer_params' : (BASE_OPTIMIZER_PARAMS, self.set_optimizer_params), + 'cache_params' : (BASE_CACHE_PARAMS, self.set_cache_params), + 'early_stopping_params' : (BASE_EARLY_STOPPING_PARAMS, self.set_early_stopping_params), + 'plotting_params' : (BASE_PLOTTER_PARAMS, self.set_plotting_params), + 'training_params' : (BASE_TRAINING_PARAMS, self.set_training_params)} + + if value is None: + value = 'None' + + if param_dict_key is not None: # TODO: Add regular expressions + param_labeled = {parameter : value} + setters[param_dict_key][1](**param_labeled) + else: + for key, param_elem in setters.items(): + if parameter in param_elem[0].keys(): + param_labeled = {parameter : value} + param_elem[1](**param_labeled) + + @staticmethod + def create_domain(variables: List[str], grids : List[Union[np.ndarray, torch.Tensor]], + device: str = 'cpu') -> Domain: + assert len(variables) == len(grids), f'Number of passed variables {len(variables)} does not \ + match number of grids {len(grids)}.' + if isinstance(grids[0], np.ndarray): + assert len(variables) == grids[0].ndim, 'Grids have to be set as a N-dimensional np.ndarrays with dim \ + matching the domain dimensionality' + domain = Domain('uniform') + + for idx, var_name in enumerate(variables): + var_grid = grids[idx].to(device) if isinstance(grids[idx], torch.Tensor) else torch.tensor(grids[idx]).to(device) + var_grid = var_grid.unique().reshape(-1) + domain.variable(variable_name = var_name, variable_set = var_grid, + n_points = None) + + return domain + + def solve_epde_system(self, system: Union[SoEq, dict], grids: list=None, boundary_conditions=None, + mode='NN', data=None, use_cache: bool = False, use_fourier: bool = False, + fourier_params: dict = None, use_adaptive_lambdas: bool = False, + to_numpy: bool = False, *args, **kwargs): + solver_device(device = self._device) + + if isinstance(system, SoEq): + system_interface = SystemSolverInterface(system_to_adapt=system) + system_solver_forms = system_interface.form(grids = grids, mode = mode) + elif isinstance(system, dict): + system_solver_forms = system + else: + raise TypeError(f'Incorrect type of the equations passed into solver. Expected dict or SoEq, got {type(system)}.') + + if boundary_conditions is None: + op_gen = PregenBOperator(system=system, + system_of_equation_solver_form=[sf_labeled[1] for sf_labeled + in system_solver_forms]) + op_gen.generate_default_bc(vals = data, grids = grids) + boundary_conditions = op_gen.conditions + + bconds_combined = Conditions() + for cond in boundary_conditions: + bconds_combined.operator(bnd = cond['bnd_loc'], operator = cond['bnd_op'], + value = cond['bnd_val']) + + if grids is None: + grid_var_keys, grids = global_var.grid_cache.get_all(mode = 'torch') + else: + grid_var_keys, _ = global_var.grid_cache.get_all(mode = 'torch') + domain = self.create_domain(grid_var_keys, grids, self._device) + + return self.solve(equations=[form[1] for form in system_solver_forms], domain = domain, + boundary_conditions = bconds_combined, mode = mode, use_cache = use_cache, + use_fourier = use_fourier, fourier_params = fourier_params, + use_adaptive_lambdas = use_adaptive_lambdas, to_numpy = to_numpy) + + def solve(self, equations: Union[List, SoEq, SolverEquation], domain: Domain, + boundary_conditions = None, mode = 'NN', use_cache: bool = False, + use_fourier: bool = False, fourier_params: dict = None, # epochs = 1e3, + use_adaptive_lambdas: bool = False, to_numpy = False, *args, **kwargs): + + if isinstance(equations, SolverEquation): + equations_prepared = equations + else: + equations_prepared = SolverEquation() + for form in equations: + equations_prepared.add(form) + if self.net is None: + self.net = self.get_net(equations_prepared, mode, domain, use_fourier, + fourier_params, device=self._device) + + + cb_early_stops = early_stopping.EarlyStopping(**self._early_stopping_params) + callbacks = [cb_early_stops,] + if use_cache: + callbacks.append(cache.Cache(**self._cache_params)) + + if use_adaptive_lambdas: + callbacks.append(adaptive_lambda.AdaptiveLambda()) + + optimizer = Optimizer(**self._optimizer_params) + + self.net.to(device = self._device) + + model = Model(net = self.net, domain = domain, equation = equations_prepared, + conditions = boundary_conditions) + + model.compile(**self._compiling_params) + loss = model.train(optimizer, callbacks=callbacks, **self._training_params) + + grid = domain.build(mode = self.mode) + + grid = check_device(grid) + + if mode in ['NN', 'autograd'] and to_numpy: + solution = self.net(grid).detach().cpu().numpy() + elif mode in ['NN', 'autograd'] and not to_numpy: + solution = self.net + elif mode == 'mat' and to_numpy: + solution = self.net.detach().cpu().numpy() + elif mode == 'mat' and not to_numpy: + solution = self.net + else: + raise ValueError('Incorrect mode.') + return loss, solution diff --git a/epde/interface/equation_translator.py b/epde/interface/equation_translator.py index d07a807..0128cac 100755 --- a/epde/interface/equation_translator.py +++ b/epde/interface/equation_translator.py @@ -22,11 +22,11 @@ def float_convertable(obj): return False @singledispatch -def translate_equation(text_form, pool): +def translate_equation(text_form, pool, all_vars): raise NotImplementedError(f'Equation shall be translated from {type(text_form)}') @translate_equation.register -def _(text_form : str, pool): +def _(text_form : str, pool, all_vars): parsed_text_form = parse_equation_str(text_form) term_list = [] weights = np.empty(len(parsed_text_form) - 1) @@ -34,7 +34,7 @@ def _(text_form : str, pool): for idx, term in enumerate(parsed_text_form): if (any([not float_convertable(elem) for elem in term]) and any([float_convertable(elem) for elem in term])): - factors = [parse_factor(factor, pool) for factor in term[1:]] + factors = [parse_factor(factor, pool, all_vars) for factor in term[1:]] if len(factors) > max_factors: max_factors = len(factors) term_list.append(Term(pool, passed_term=factors, collapse_powers=False)) @@ -42,7 +42,7 @@ def _(text_form : str, pool): elif float_convertable(term[0]) and len(term) == 1: weights[idx] = float(term[0]) elif all([not float_convertable(elem) for elem in term]): - factors = [parse_factor(factor, pool) for factor in term] + factors = [parse_factor(factor, pool, all_vars) for factor in term] if len(factors) > max_factors: max_factors = len(factors) term_list.append(Term(pool, passed_term=factors, collapse_powers=False)) @@ -65,7 +65,7 @@ def _(text_form : str, pool): return system @translate_equation.register -def _(text_form : dict, pool): +def _(text_form : dict, pool, all_vars): structure = {} for var_key, eq_text_form in text_form.items(): parsed_text_form = parse_equation_str(eq_text_form) @@ -75,7 +75,7 @@ def _(text_form : dict, pool): for idx, term in enumerate(parsed_text_form): if (any([not float_convertable(elem) for elem in term]) and any([float_convertable(elem) for elem in term])): - factors = [parse_factor(factor, pool) for factor in term[1:]] + factors = [parse_factor(factor, pool, all_vars) for factor in term[1:]] if len(factors) > max_factors: max_factors = len(factors) term_list.append(Term(pool, passed_term=factors, collapse_powers=False)) @@ -83,7 +83,7 @@ def _(text_form : dict, pool): elif float_convertable(term[0]) and len(term) == 1: weights[idx] = float(term[0]) elif all([not float_convertable(elem) for elem in term]): - factors = [parse_factor(factor, pool) for factor in term] + factors = [parse_factor(factor, pool, all_vars) for factor in term] if len(factors) > max_factors: max_factors = len(factors) term_list.append(Term(pool, passed_term=factors, collapse_powers=False)) @@ -128,15 +128,15 @@ def parse_term_str(term_form): pass -def parse_factor(factor_form, pool): # В проект: работы по обрезке сетки, на которых нулевые значения производных - print(factor_form) +def parse_factor(factor_form, pool, all_vars): # В проект: работы по обрезке сетки, на которых нулевые значения производных + # print(factor_form) label_str, params_str = tuple(factor_form.split('{')) if '}' not in params_str: raise ValueError('Missing brackets, denoting parameters part of factor text form. Possible explanation: passing wrong argument') params_str = parse_params_str(params_str.replace('}', '')) - print(label_str, params_str) + # print(label_str, params_str) factor_family = [family for family in pool.families if label_str in family.tokens][0] - _, factor = factor_family.create(label=label_str, **params_str) + _, factor = factor_family.create(label=label_str, all_vars = all_vars, **params_str) factor.set_param(param = params_str['power'], name = 'power') return factor @@ -152,20 +152,67 @@ def parse_params_str(param_str): return params_parsed class CoeffLessEquation(): - def __init__(self, lp_terms : Union[list, tuple], rp_term : Union[list, tuple], pool): - self.lp_terms_translated = [Term(pool, passed_term = [parse_factor(factor, pool) for factor in term], - collapse_powers=False) for term in lp_terms] - self.rp_translated = Term(pool, passed_term = [parse_factor(factor, pool) for factor in rp_term], collapse_powers=False) - - self.lp_values = np.vstack(list(map(lambda x: x.evaluate(False).reshape(-1), self.lp_terms_translated))) - self.rp_value = self.rp_translated.evaluate(False).reshape(-1) - lr = LinearRegression() - lr.fit(self.lp_values.T, self.rp_value) - print(lr.coef_, lr.intercept_, type(lr.coef_)) - terms_aggregated = self.lp_terms_translated + [self.rp_translated,] - max_factors = max([len(term.structure) for term in terms_aggregated]) - self.equation = Equation(pool=pool, basic_structure=terms_aggregated, - terms_number=len(lp_terms) + 1, max_factors_in_term=max_factors) - self.equation.target_idx = len(terms_aggregated) - 1 - self.equation.weights_internal = np.append(lr.coef_, lr.intercept_) - self.equation.weights_final = np.append(lr.coef_, lr.intercept_) + def __init__(self, lp_terms : Union[list, tuple, dict], rp_term : Union[list, tuple, dict], pool, all_vars): + ''' + ``lp_terms'' + ''' + if isinstance(lp_terms, dict): + if not len(lp_terms.keys()) == len(rp_term.keys()): + raise KeyError(f'Number of left parts {lp_terms.keys()} mismatches right parts {rp_term.keys()}.') + structure = {} + for variable in rp_term.keys(): + lp_terms_translated = [Term(pool, passed_term = [parse_factor(factor, pool, all_vars) for factor in term], + collapse_powers=False) for term in lp_terms[variable]] + rp_translated = Term(pool, passed_term = [parse_factor(factor, pool, all_vars) for factor in rp_term[variable]], + collapse_powers=False) + + lp_values = np.vstack(list(map(lambda x: x.evaluate(False).reshape(-1), lp_terms_translated))) + rp_value = rp_translated.evaluate(False).reshape(-1) + lr = LinearRegression() + lr.fit(lp_values.T, rp_value) + # print(lr.coef_, lr.intercept_, type(lr.coef_)) + terms_aggregated = lp_terms_translated + [rp_translated,] + max_factors = max([len(term.structure) for term in terms_aggregated]) + equation = Equation(pool=pool, basic_structure=terms_aggregated, + metaparameters={'sparsity' : {'optimizable': True, 'value': 1.}, + 'terms_number' : {'optimizable': False, 'value': len(lp_terms[variable]) + 1}, + 'max_factors_in_term': {'optimizable': False, 'value': max_factors}}) + # terms_number=len(lp_terms) + 1, max_factors_in_term=max_factors) + equation.target_idx = len(terms_aggregated) - 1 + equation.weights_internal = np.append(lr.coef_, lr.intercept_) + equation.weights_final = np.append(lr.coef_, lr.intercept_) + structure[variable] = equation + self.system = SoEq(pool = pool, metaparameters={'sparsity': {'optimizable': True, 'value': 1.}, + 'terms_number': {'optimizable': False, 'value': 1}, # TODO: better terms number init + 'max_factors_in_term': {'optimizable': False, 'value': max_factors}}) + self.system.vals = Chromosome(structure, params={key: val for key, val in self.system.metaparameters.items() + if val['optimizable']}) + + + else: + self.lp_terms_translated = [Term(pool, passed_term = [parse_factor(factor, pool, all_vars) for factor in term], + collapse_powers=False) for term in lp_terms] + self.rp_translated = Term(pool, passed_term = [parse_factor(factor, pool, all_vars) for factor in rp_term], + collapse_powers=False) + + self.lp_values = np.vstack(list(map(lambda x: x.evaluate(False).reshape(-1), self.lp_terms_translated))) + self.rp_value = self.rp_translated.evaluate(False).reshape(-1) + lr = LinearRegression() + lr.fit(self.lp_values.T, self.rp_value) + # print(lr.coef_, lr.intercept_, type(lr.coef_)) + terms_aggregated = self.lp_terms_translated + [self.rp_translated,] + max_factors = max([len(term.structure) for term in terms_aggregated]) + self.equation = Equation(pool=pool, basic_structure=terms_aggregated, + metaparameters={'sparsity' : {'optimizable': True, 'value': 1.}, + 'terms_number' : {'optimizable': False, 'value': len(lp_terms) + 1}, + 'max_factors_in_term': {'optimizable': False, 'value': max_factors}}) + # terms_number=len(lp_terms) + 1, max_factors_in_term=max_factors) + self.equation.target_idx = len(terms_aggregated) - 1 + self.equation.weights_internal = np.append(lr.coef_, lr.intercept_) + self.equation.weights_final = np.append(lr.coef_, lr.intercept_) + self.system = SoEq(pool = pool, metaparameters={'sparsity': {'optimizable': True, 'value': 1.}, + 'terms_number': {'optimizable': False, 'value': len(max())}, + 'max_factors_in_term': {'optimizable': False, 'value': max_factors}}) + self.system.vals = Chromosome({'u': self.equation}, + params={key: val for key, val in self.system.metaparameters.items() + if val['optimizable']}) \ No newline at end of file diff --git a/epde/interface/interface.py b/epde/interface/interface.py index 852cb2d..3436d95 100644 --- a/epde/interface/interface.py +++ b/epde/interface/interface.py @@ -1,25 +1,36 @@ -# !/usr/bin/env python3 -# -*- coding: utf-8 -*- """ -Created on Tue Jul 6 15:55:12 2021 -@author: mike_ubuntu +Inteface objects for EPDE framework + +Contains: +--------- + +**InputDataEntry** class, containing logic for preparing the input data for the equation search, +such as initialization of neccessary token families and derivatives calculation. + +**EpdeSearch** class for main interactions between the user and the framework. + """ import pickle import numpy as np -from typing import Union, Callable +import torch + +from copy import deepcopy +from typing import Union, Callable, List, Tuple from collections import OrderedDict +from functools import reduce, singledispatchmethod import epde.globals as global_var from epde.optimizers.builder import StrategyBuilder +from epde.optimizers.builder import OptimizationPatternDirector from epde.optimizers.moeadd.moeadd import * from epde.optimizers.moeadd.supplementary import * from epde.optimizers.moeadd.strategy import MOEADDDirector from epde.optimizers.moeadd.strategy_elems import MOEADDSectorProcesser -from epde.optimizers.single_criterion.optimizer import EvolutionaryStrategy, SimpleOptimizer +from epde.optimizers.single_criterion.optimizer import EvolutionaryStrategy, SimpleOptimizer, Population from epde.optimizers.single_criterion.strategy import BaselineDirector from epde.optimizers.single_criterion.supplementary import simple_sorting @@ -40,7 +51,7 @@ from epde.interface.token_family import TFPool, TokenFamily from epde.interface.type_checks import * from epde.interface.prepared_tokens import PreparedTokens, CustomTokens, DataPolynomials -from epde.interface.solver_integration import BoundaryConditions, SolverAdapter, SystemSolverInterface +from epde.integrate import BoundaryConditions, SolverAdapter, SystemSolverInterface class InputDataEntry(object): """ @@ -54,12 +65,20 @@ class InputDataEntry(object): derivatives (`np.ndarray`): values of derivatives deriv_properties (`dict`): settings of derivatives """ - def __init__(self, var_name: str, data_tensor: np.ndarray): + def __init__(self, var_name: str, var_idx: int, data_tensor: Union[List[np.ndarray], np.ndarray]): self.var_name = var_name - check_nparray(data_tensor) + self.var_idx = var_idx + if isinstance(data_tensor, np.ndarray): + check_nparray(data_tensor) + self.ndim = data_tensor.ndim + elif isinstance(data_tensor, list): + [check_nparray(tensor) for tensor in data_tensor] + assert all([data_tensor[0].ndim == tensor.ndim for tensor in data_tensor]), 'Mismatching dimensionalities of data tensors.' + self.ndim = data_tensor[0].ndim self.data_tensor = data_tensor - def set_derivatives(self, preprocesser: PreprocessingPipe, deriv_tensors=None, + + def set_derivatives(self, preprocesser: PreprocessingPipe, deriv_tensors: Union[list, np.ndarray] = None, max_order: Union[list, tuple, int] = 1, grid: list = []): """ Method for setting derivatives ot calculate derivatives from data @@ -73,36 +92,63 @@ def set_derivatives(self, preprocesser: PreprocessingPipe, deriv_tensors=None, Returns: None """ - deriv_names, deriv_orders = define_derivatives(self.var_name, dimensionality=self.data_tensor.ndim, + deriv_names, deriv_orders = define_derivatives(self.var_name, dimensionality=self.ndim, max_order=max_order) self.names = deriv_names self.d_orders = deriv_orders - if deriv_tensors is None: + if deriv_tensors is None and isinstance(self.data_tensor, np.ndarray): self.data_tensor, self.derivatives = preprocesser.run(self.data_tensor, grid=grid, max_order=max_order) self.deriv_properties = {'max order': max_order, 'dimensionality': self.data_tensor.ndim} + elif deriv_tensors is None and isinstance(self.data_tensor, list): + if isinstance(grid[0], np.ndarray): + raise ValueError('A single set of grids passed for multiple samples mode.') + data_tensors, derivatives = [], [] + for samp_idx, sample in enumerate(self.data_tensor): + processed_data, derivs = preprocesser.run(sample, grid=grid[samp_idx], + max_order=max_order) + data_tensors.append(processed_data) + derivatives.append(derivs) + self.data_tensor = np.concatenate(data_tensors, axis = 0) # TODO: stack data_tensors with the time axis in the correct wa + self.derivatives = np.concatenate(derivatives, axis=0) # TODO: check the correct + self.deriv_properties = {'max order': max_order, + 'dimensionality': self.data_tensor.ndim} + + elif deriv_tensors is not None and isinstance(self.data_tensor, list): + self.data_tensor = np.concatenate(self.data_tensor, axis = 0) + + print(f'Concatenating arrays of len {len(deriv_tensors)}') + self.derivatives = np.concatenate(deriv_tensors, axis = 0) + self.deriv_properties = {'max order': max_order, + 'dimensionality': self.data_tensor.ndim} else: self.derivatives = deriv_tensors self.deriv_properties = {'max order': max_order, 'dimensionality': self.data_tensor.ndim} - def use_global_cache(self): + def use_global_cache(self): # , var_idx: int, deriv_codes: list """ Method for add calculated derivatives in the cache """ - derivs_stacked = prepare_var_tensor(self.data_tensor, self.derivatives, time_axis=global_var.time_axis) + var_idx = self.var_idx + deriv_codes = self.d_orders + derivs_stacked = prepare_var_tensor(self.data_tensor, self.derivatives, + time_axis=global_var.time_axis) + deriv_codes = [(var_idx, code) for code in deriv_codes] try: - upload_simple_tokens(self.names, global_var.tensor_cache, derivs_stacked) - upload_simple_tokens([self.var_name,], global_var.tensor_cache, [self.data_tensor,]) + upload_simple_tokens(self.names, global_var.tensor_cache, derivs_stacked, + deriv_codes=deriv_codes) + upload_simple_tokens([self.var_name,], global_var.tensor_cache, [self.data_tensor,], + deriv_codes=[(var_idx, [None,]),]) upload_simple_tokens([self.var_name,], global_var.initial_data_cache, [self.data_tensor,]) except AttributeError: raise NameError('Cache has not been declared before tensor addition.') - + print(f'Size of linked labels is {len(global_var.tensor_cache._deriv_codes)}') global_var.tensor_cache.use_structural() @staticmethod @@ -134,14 +180,13 @@ def create_derivs_family(self, max_deriv_power: int = 1): self._derivs_family = TokenFamily(token_type=f'deriv of {self.var_name}', variable = self.var_name, family_of_derivs=True) - self._derivs_family.set_latex_form_constructor(self.latex_form) self._derivs_family.set_status(demands_equation=True, unique_specific_token=False, - unique_token_type=False, s_and_d_merged=False, - meaningful=True) + unique_token_type=False, s_and_d_merged=False, + meaningful=True) self._derivs_family.set_params(self.names, OrderedDict([('power', (1, max_deriv_power))]), {'power': 0}, self.d_orders) - self._derivs_family.set_evaluator(simple_function_evaluator, []) + self._derivs_family.set_evaluator(simple_function_evaluator) def create_polynomial_family(self, max_power): polynomials = DataPolynomials(self.var_name, max_power = max_power) @@ -150,10 +195,18 @@ def create_polynomial_family(self, max_power): def get_families(self): return [self._polynomial_family, self._derivs_family] + def matched_derivs(self, max_order = 1): + derivs_stacked = prepare_var_tensor(self.data_tensor, self.derivatives, + time_axis=global_var.time_axis) + # print(f'Creating matched derivs: {[[self.var_idx, key, len(key) <= max_order] for idx, + # key in enumerate(self.d_orders)]}') + # print(f'From {self.d_orders}') + return [[self.var_idx, key, derivs_stacked[idx, ...]] for idx, key in enumerate(self.d_orders) + if len(key) <= max_order] + def simple_selector(sorted_neighbors, number_of_neighbors=4): return sorted_neighbors[:number_of_neighbors] - class EpdeSearch(object): """ Intialization of the epde search object. Here, the user can declare the properties of the @@ -181,7 +234,8 @@ def __init__(self, multiobjective_mode: bool = True, use_default_strategy: bool use_solver: bool = False, dimensionality: int = 1, verbose_params: dict = {'show_iter_idx' : True}, coordinate_tensors=None, memory_for_cache=5, prune_domain: bool = False, pivotal_tensor_label=None, pruner=None, threshold: float = 1e-2, - division_fractions=3, rectangular: bool = True, params_filename: str = None): + division_fractions=3, rectangular: bool = True, + params_filename: str = None, device: str = 'cpu'): """ Args: use_default_strategy (`bool`): optional @@ -228,6 +282,7 @@ def __init__(self, multiobjective_mode: bool = True, use_default_strategy: bool rectangular(`bool`): optional A line of subdomains along an axis can be removed if all values inside them are identical to zero. """ + self._device = device self.multiobjective_mode = multiobjective_mode global_var.set_time_axis(time_axis) global_var.init_verbose(**verbose_params) @@ -241,16 +296,12 @@ def __init__(self, multiobjective_mode: bool = True, use_default_strategy: bool threshold=threshold, division_fractions=division_fractions, rectangular=rectangular) - if multiobjective_mode: - mode_key = 'multi objective' - else: - mode_key = 'single objective' - - EvolutionaryParams.reset() - evo_param = EvolutionaryParams(parameter_file = params_filename, mode = mode_key) + self._mode_info = {'criteria': 'multi objective' if multiobjective_mode else 'single objective', + 'solver_fitness': use_solver} - if use_solver: - global_var.dimensionality = dimensionality + # Here we initialize a singleton object with evolutionary params. It is used in operators' initialization. + EvolutionaryParams.reset() + evo_param = EvolutionaryParams(parameter_file = params_filename, mode = self._mode_info['criteria']) if director is not None and not use_default_strategy: self.director = director @@ -262,7 +313,7 @@ def __init__(self, multiobjective_mode: bool = True, use_default_strategy: bool self.director = BaselineDirector() builder = StrategyBuilder(EvolutionaryStrategy) self.director.builder = builder - self.director.use_baseline(params=director_params) + self.director.use_baseline(use_solver=self._mode_info['solver_fitness'], params=director_params) else: raise NotImplementedError('Wrong arguments passed during the epde search initialization') @@ -386,18 +437,21 @@ def domain_pruning(self, pivotal_tensor_label = None, pruner = None, threshold : float = 1e-5, division_fractions = 3, rectangular : bool = True): """ - Method for select only subdomains with variable dinamice + Method for select only subdomains with variable dynamics. Args: - pivotal_tensor_label (`np.ndarray`): pattern that guides the domain pruning - will be cutting areas, where values of the `pivotal_tensor` are closed to zero - pruner (`DomainPruner`): object for selecting domain region + pivotal_tensor_label (`np.ndarray`): + Pattern that guides the domain pruning will be cutting areas, where values of the + `pivotal_tensor` are closed to zero. + pruner (`DomainPruner`): + Custom object for selecting domain region by pruning out areas with no dynamics. threshold (`float`): optional, default - 1e-5 - the boundary at which values are considered zero + The boundary at which values are considered zero. division_fractions (`int`): optional, default - 3 - number of fraction for each axis (if this is integer than all axis are dividing by same fractions) + Number of fraction for each axis (if this is integer than all axis are dividing by same fractions). rectangular (`bool`): default - True - flag indecating that area is rectangle + Flag indecating that area is rectangle. + Returns: None """ @@ -422,18 +476,20 @@ def domain_pruning(self, pivotal_tensor_label = None, pruner = None, if global_var.grid_cache is not None: global_var.grid_cache.prune_tensors(self.pruner) - def create_caches(self, coordinate_tensors, memory_for_cache): + def _create_caches(self, coordinate_tensors, memory_for_cache): """ - Creating caches for tensor keeping - + Creating caches for keeping tensors during EPDE search. + Args: - coordinate_tensors (`np.ndarray|list`): grid values - memory_for_cache (`int`): allowed amount of memory for data storage - + coordinate_tensors (`np.ndarray|list`): + Grid values, passed as a single `np.ndarray` or a list of `np.ndarray`'s. + memory_for_cache (`int`): + allowed amount of memory for data storage + Returns: None """ - global_var.init_caches(set_grids=True) + global_var.init_caches(set_grids=True, device=self._device) example = coordinate_tensors if isinstance(coordinate_tensors, np.ndarray) else coordinate_tensors[0] self.set_memory_properties(example_tensor=example, mem_for_cache_frac=memory_for_cache) upload_grids(coordinate_tensors, global_var.initial_data_cache) @@ -441,81 +497,116 @@ def create_caches(self, coordinate_tensors, memory_for_cache): def set_boundaries(self, boundary_width: Union[int, list]): """ - Setting the number of unaccountable elements at the edges into cache with saved grid + Setting the number of unaccountable elements at the edges into cache with saved grid. """ global_var.grid_cache.set_boundaries(boundary_width=boundary_width) - def upload_g_func(self, function_form: Callable = None): + def _upload_g_func(self, function_form: Union[Callable, np.ndarray, list] = None): """ - Loading testing function connected to the weak derivative notion + Loading testing function connected to the weak derivative notion. Args: - function_form (`callable`): test function, default using inverse polynomial with max in the domain center + function_form (`callable`, or `np.ndarray`, or `list[np.ndarray]`) + Test function, default using inverse polynomial with max in the domain center. Returns: - None + None """ - try: - decorator = BoundaryExclusion(boundary_width=global_var.grid_cache.boundary_width) - if function_form is None: - def baseline_exp_function(grids): - def uniformize(data): - temp = -(data - np.mean(data))**2 - if np.min(temp) == np.max(temp): - return np.ones_like(temp) - else: - return (temp - np.min(temp)) / (np.max(temp) - np.min(temp)) - - exponent_partial = np.array([uniformize(grid) for grid in grids]) - exponent = np.multiply.reduce(exponent_partial, axis=0) - return exponent - - global_var.grid_cache.g_func = decorator(baseline_exp_function) - else: - global_var.grid_cache.g_func = decorator(function_form) + if isinstance(function_form, (np.ndarray, list)): + global_var.grid_cache.g_func = function_form + else: + try: + decorator = BoundaryExclusion(boundary_width=global_var.grid_cache.boundary_width) + if function_form is None: + def baseline_exp_function(grids): + def uniformize(data): + temp = -(data - np.mean(data))**2 + if np.min(temp) == np.max(temp): + return np.ones_like(temp) + else: + return (temp - np.min(temp)) / (np.max(temp) - np.min(temp)) + + exponent_partial = np.array([uniformize(grid) for grid in grids]) + exponent = np.multiply.reduce(exponent_partial, axis=0) + return exponent + + global_var.grid_cache.g_func = decorator(baseline_exp_function) + else: + global_var.grid_cache.g_func = decorator(function_form) - except NameError: - raise NameError('Cache for grids has not been initilized yet!') + except NameError: + raise NameError('Cache for grids has not been initilized yet!') def set_domain_properties(self, coordinate_tensors, memory_for_cache, boundary_width: Union[int, list], function_form: Callable = None, prune_domain: bool = False, pivotal_tensor_label=None, pruner=None, threshold: float = 1e-5, division_fractions: int = 3, rectangular: bool = True): """ - Setting properties for processing considered domain, such as removing areas with no dynamics, setting bounderes and uploading test function + Setting properties for processing considered domain, such as removing areas with no dynamics, + and setting bounderes. Can be used for uploading test function. - Args: - coordinate_tensor (`np.ndarray|list`): tensor with grid values - memory_for_cache (`int`): allowed amount of memory for data storage - boundary_width (`int|list`): the number of unaccountable elements at the edges - function_form (`callable`): optional, default - None - testing function connected to the weak derivative notion - prune_domain (`bool`): flag about using areas cropping, default - False - pivotal_tensor_label (`np.ndarray`): pattern that guides the domain pruning, default - None - pruner (`DomainPruner`): object for selecting domain region, default - None - threshold (`float`): optional, default - 1e-5 - the boundary at which values are considered zero - division_fractions (`int`): optional, default - 3 - number of fraction for each axis (if this is integer than all axis are dividing by same fractions) - rectangular (`bool`): default - True - flag indecating that area is rectangle + Parameters + ---------- + coordinate_tensors : list|np.ndarrays, optional + Values of the coordinates on the grid nodes with studied functions values. In case of 1D-problem, + that will be ``numpy.array``, while the parameter for higher dimensionality problems can be set from + ``numpy.meshgrid`` function. + memory_for_cache : int + Allowed amount of memory (in percentage) for data storage. + boundary_width : int|list + The number of unaccountable elements at the edges of the domain. + function_form : callable, optional + Testing function connected to the weak derivative notion, the default value is None, that + corresponds with the product of normalized inverse square functions of the coordinates, + centered at the middle of the domain. + prune_domain : bool + Flag, enabling area cropping by removing subdomains with constant values, default - False. + pivotal_tensor_label : np.ndarray + Pattern that guides the domain pruning, the default is None. + pruner : DomainPruner + Object for selecting domain region, the default is None. + threshold : float, optional + The boundary at which values are considered zero, the default is 1e-5. + division_fractions : int, optional + Number of fraction for each axis (if this is integer than all axis are dividing by + same fractions), the default is 3. + rectangular : bool, optional + Flag indecating that crop subdomains are rectangle, default - True. - Returns: - None + Returns + ------- + None. """ - self.create_caches(coordinate_tensors=coordinate_tensors, memory_for_cache=memory_for_cache) + self._create_caches(coordinate_tensors=coordinate_tensors, memory_for_cache=memory_for_cache) if prune_domain: self.domain_pruning(pivotal_tensor_label, pruner, threshold, division_fractions, rectangular) self.set_boundaries(boundary_width) - self.upload_g_func(function_form) + self._upload_g_func(function_form) def set_preprocessor(self, preprocessor_pipeline: PreprocessingPipe = None, - default_preprocessor_type: str = 'poly', - preprocessor_kwargs: dict = {}): - """ - - """ + default_preprocessor_type: str = 'poly', preprocessor_kwargs: dict = {}): + ''' + Specification of preprocessor, devoted to smoothing the raw input data and + calculating the derivatives. + + Parameters + ---------- + preprocessor_pipeline : PreprocessingPipe, optional + Pipeline of operators, aimed on preparing all necessary data for equation discovery. + default_preprocessor_type : str, optional + Key for selection of pre-defined preprocessors: **'poly'** matches Savitsky-Golay filtering, 'ANN' if for + neural network data approximation and further finite-difference differentiation, 'spectral' for + spectral differentiation. The default is 'poly'. + preprocessor_kwargs : dict, optional + Keyword arguments for preprocessor setup and operation. The default is an empty dictionary, corresponding to + all default parameters of the preprocessors. + + Returns + ------- + None. + + ''' if preprocessor_pipeline is None: setup = PreprocessorSetup() builder = ConcretePrepBuilder() @@ -527,8 +618,10 @@ def set_preprocessor(self, preprocessor_pipeline: PreprocessingPipe = None, setup.build_poly_diff_preprocessing(**preprocessor_kwargs) elif default_preprocessor_type == 'spectral': setup.build_spectral_preprocessing(**preprocessor_kwargs) + elif default_preprocessor_type == 'FD': + setup.build_FD_preprocessing(**preprocessor_kwargs) else: - raise NotImplementedError('Incorrect default preprocessor type. Only ANN or poly are allowed.') + raise NotImplementedError('Incorrect default preprocessor type. Only ANN, spectral or poly are allowed.') preprocessor_pipeline = setup.builder.prep_pipeline if 'max_order' not in preprocessor_pipeline.deriv_calculator_kwargs.keys(): @@ -539,17 +632,29 @@ def set_preprocessor(self, preprocessor_pipeline: PreprocessingPipe = None, def create_pool(self, data: Union[np.ndarray, list, tuple], variable_names=['u',], derivs=None, max_deriv_order=1, additional_tokens=[], - data_fun_pow: int = 1, deriv_fun_pow: int = 1): + data_fun_pow: int = 1, deriv_fun_pow: int = 1, grid: list = None, + data_nn: torch.nn.Sequential = None, fourier_layers: bool = True, + fourier_params: dict = {'L' : [4,], 'M' : [3,]}): + ''' + Create pool of tokens to represent elementary functions, that can be included in equations. + + Args: + data : np.ndarray | list of np.ndarrays | tuple of np.ndarrays + + ''' + grid = grid if grid is not None else global_var.grid_cache.get_all()[1] + self.pool_params = {'variable_names' : variable_names, 'max_deriv_order' : max_deriv_order, 'additional_tokens' : [family.token_family.ftype for family in additional_tokens]} - assert (isinstance(derivs, list) and isinstance(derivs[0], np.ndarray)) or derivs is None + # assert (isinstance(derivs, list) and isinstance(derivs[0], np.ndarray)) or derivs is None + # TODO: add better checks if isinstance(data, np.ndarray): data = [data,] if derivs is None: if len(data) != len(variable_names): - print(len(data), len(variable_names)) - raise ValueError('Mismatching lengths of data tensors and the names of the variables') + msg = f'Mismatching nums of data tensors {len(data)} and the names of the variables { len(variable_names)}' + raise ValueError(msg) else: if not (len(data) == len(variable_names) == len(derivs)): raise ValueError('Mismatching lengths of data tensors, names of the variables and passed derivatives') @@ -558,22 +663,38 @@ def create_pool(self, data: Union[np.ndarray, list, tuple], variable_names=['u', self.set_preprocessor() data_tokens = [] - + if self._mode_info['solver_fitness']: + base_derivs = [] + for data_elem_idx, data_tensor in enumerate(data): - assert isinstance(data_tensor, np.ndarray), 'Input data must be in format of numpy ndarrays or iterable (list or tuple) of numpy arrays' - entry = InputDataEntry(var_name=variable_names[data_elem_idx], + # TODO: add more relevant checks + # assert isinstance(data_tensor, np.ndarray), 'Input data must be in format of numpy ndarrays or iterable (list or tuple) of numpy arrays' + entry = InputDataEntry(var_name=variable_names[data_elem_idx], var_idx=data_elem_idx, data_tensor=data_tensor) derivs_tensor = derivs[data_elem_idx] if derivs is not None else None entry.set_derivatives(preprocesser=self.preprocessor_pipeline, deriv_tensors=derivs_tensor, - grid=global_var.grid_cache.get_all()[1], max_order=max_deriv_order) + grid=grid, max_order=max_deriv_order) entry.use_global_cache() - self.set_derivatives(variable=variable_names[data_elem_idx], deriv=entry.derivatives) + self.save_derivatives(variable=variable_names[data_elem_idx], deriv=entry.derivatives) entry.create_derivs_family(max_deriv_power=deriv_fun_pow) entry.create_polynomial_family(max_power=data_fun_pow) - + if self._mode_info['solver_fitness']: + base_derivs.extend(entry.matched_derivs(max_order = 2)) # TODO: add setup of Sobolev learning order + data_tokens.extend(entry.get_families()) + if self._mode_info['solver_fitness']: + if data_nn is not None: + print('Using pre-trained ANN') + global_var.reset_data_repr_nn(data = data, derivs = base_derivs, train = False, + grids = grid, predefined_ann = data_nn, device = self._device) + else: + epochs_max = 1e5 + global_var.reset_data_repr_nn(data = data, derivs = base_derivs, epochs_max=epochs_max, + grids = grid, predefined_ann = None, device = self._device, + use_fourier = fourier_layers, fourier_params = fourier_params) + if isinstance(additional_tokens, list): if not all([isinstance(tf, (TokenFamily, PreparedTokens)) for tf in additional_tokens]): raise TypeError(f'Incorrect type of additional tokens: expected list or TokenFamily/Prepared_tokens - obj, instead got list of {type(additional_tokens[0])}') @@ -587,7 +708,23 @@ def create_pool(self, data: Union[np.ndarray, list, tuple], variable_names=['u', print(f'The cardinality of defined token pool is {self.pool.families_cardinality()}') print(f'Among them, the pool contains {self.pool.families_cardinality(meaningful_only=True)}') - def set_derivatives(self, variable, deriv): + def save_derivatives(self, variable:str, deriv:np.ndarray): + ''' + Pass the derivatives of a variable as a np.ndarray. + + Parameters + ---------- + variable : str + Key for the variable to have the derivatives set. + deriv : np.ndarray + Arrays of derivatives. Have to be shaped as (n, m), where n is the number of passed derivatives + (for example, when you differentiate the dataset once for the first axis, and up to the second order for + the second, and you have no mixed derivatives, *n = 3*), and m is the number of data points in the domain. + + Returns + ------- + None. + ''' try: self._derivatives except AttributeError: @@ -602,134 +739,119 @@ def saved_derivaties(self): print('Trying to get derivatives before their calculation. Call EPDESearch.create_pool() to calculate derivatives') return None - def fit(self, data: Union[np.ndarray, list, tuple], equation_terms_max_number=6, + def fit(self, data: Union[np.ndarray, list, tuple] = None, equation_terms_max_number=6, equation_factors_max_number=1, variable_names=['u',], eq_sparsity_interval=(1e-4, 2.5), - derivs=None, max_deriv_order=1, additional_tokens=[], data_fun_pow: int = 1): + derivs=None, max_deriv_order=1, additional_tokens=[], data_fun_pow: int = 1, deriv_fun_pow: int = 1, + optimizer: Union[SimpleOptimizer, MOEADDOptimizer] = None, pool: TFPool = None, + population: Union[ParetoLevels, Population] = None, data_nn = None, + fourier_layers: bool = True, fourier_params: dict = {'L' : [4,], 'M' : [3,]}): """ Fit epde search algorithm to obtain differential equations, describing passed data. - Args: - data (`np.ndarray|list|tuple`): Values of modeled variables. If the variable is single (i.e. deriving a single equation), - it can be passed as the numpy.ndarray or as the list/tuple with a single element; - multiple variables are not supported yet, use older interfaces. - time_axis (`int`): optional, default - 0 - Index of the axis in the data, representing time. - equation_terms_max_number (`int`):optional, default - 6 - The maximum number of terms, present in the derived equations. - equation_factors_max_number (`int`): optional, default - 1 - The maximum number of factors (token functions; real-valued coefficients are not counted here), - present in terms of the equaton. - variable_names (`list|str`): optional, default - ``['u',]``, representinga single variable *u*. - Names of the independent variables, passed into search mechanism. Length of the list must correspond - to the number of np.ndarrays, sent with in ``data`` parameter. In case of system of differential equation discovery, - all variables shall be named here. - eq_sparsity_interval (`tuple`): optional, default - ``(1e-4, 2.5)``. - The left and right boundaries of interval with sparse regression values. Influence the number of - terms in the equation. - derivs (`list`): list of lists of np.ndarrays, optional, default - None - Pre-computed values of derivatives. If ``None`` is passed, the derivatives are calculated in the - method. Recommended to use, if the computations of derivatives take too long. For further information - about using data, prepared in advance, check ``epde.preprocessing.derivatives.preprocess_derivatives`` function - max_deriv_order (`int|list|tuple`): optional, default - 1 - Highest order of calculated derivatives. - additional_tokens (`list of TokenFamily or Prepared_tokens`): optional, defult - None - Additional tokens, that would be used to construct the equations among the main variables and their - derivatives. Objects of this list must be of type ``epde.interface.token_family.TokenFamily`` or - of ``epde.interface.prepared_tokens.Prepared_tokens`` subclasses types. - coordinate_tensors (`list|np.ndarrays`): optional, default - None - Values of the coordinates on the grid nodes with studied functions values. In case of 1D-problem, - that will be ``numpy.array``, while the parameter for higher dimensionality problems can be set from - ``numpy.meshgrid`` function. With None, the tensors will be created as ranges with step of 1 between - nodes. - field_smooth (`bool`): optional, default - False - Parameter, if the input variable fields shall be smoothed to avoid the errors. If the data is - assumed to be noiseless, shall be set to False, otherwise - True. - memory_for_cache (`int|float`): optional, default - 5 - Limit for the cache (in fraction of the memory) for precomputed tensor values to be stored: - if int, will be considered as the percentage of the entire memory, and if float, - then as a fraction of memory. - data_fun_pow (`int`): optional, default - 1 - Maximum power of token - + Parameters + ---------- + data : np.ndarray | list | tuple, optional + Values of modeled variables. If the variable is single (i.e. deriving a single equation), + it can be passed as the numpy.ndarray or as the list/tuple with a single element; + multiple variables are not supported yet, use older interfaces. Default value is None, but it + shall be used only for retraining, when the pool argument is passed. + equation_terms_max_number : int, optional + The maximum number of terms, present in the derived equations, the default is 6. + equation_factors_max_number : int, optional + The maximum number of factors (token functions; real-valued coefficients are not counted here), + present in terms of the equaton, the default is 1. + variable_names : list | str, optional + Names of the independent variables, passed into search mechanism. Length of the list must correspond + to the number of np.ndarrays, sent with in ``data`` parameter. In case of system of differential equation discovery, + all variables shall be named here, default - ``['u',]``, representing a single variable *u*. + eq_sparsity_interval : tuple, optional + The left and right boundaries of interval with sparse regression values. Undirectly influences the + number of active terms in the equation, the default is ``(1e-4, 2.5)``. + derivs : list or list of lists of np.ndarrays, optional + Pre-computed values of derivatives. If ``None`` is passed, the derivatives are calculated in the + method. Recommended to use, if the computations of derivatives take too long. For further information + about using data, prepared in advance, check ``epde.preprocessing.derivatives.preprocess_derivatives`` + function, default - None. + max_deriv_order : int | list | tuple, optional + Highest order of calculated derivatives, the default is 1. + additional_tokens : list of TokenFamily or Prepared_tokens, optional + Additional tokens, that would be used to construct the equations among the main variables and their + derivatives. Objects of this list must be of type ``epde.interface.token_family.TokenFamily`` or + of ``epde.interface.prepared_tokens.Prepared_tokens`` subclasses types. The default is None. + field_smooth : bool, optional + Parameter, if the input variable fields shall be smoothed to avoid the errors. If the data is + assumed to be noiseless, shall be set to False, otherwise - True, the default - False. + memory_for_cache : int | float, optional + Limit for the cache (in fraction of the memory) for precomputed tensor values to be stored: + if int, will be considered as the percentage of the entire memory, and if float, + then as a fraction of memory, the default is 5. + data_fun_pow : int, optional + Maximum power of token, the default is 1. + optimizer : SimpleOptimizer | MOEADDOptimizer, optional + Pre-defined optimizer, that will be used during evolution. Shall correspond with the mode + (single- and multiobjective). The default is None, matching no use of pre-defined optimizer. + pool : TFPool, optional + Pool of tokens, that can be explicitly passed. The default is None, matching no use of passed pool. + population : Population | ParetoLevels, optional + Population of candidate equatons, that can be optionally passed in explicit form. The type of objects + must match the optimization algorithm: epde.optimizers.single_criterion.optimizer.Population for + single-objective mode and epde.optimizers.moeadd.moeadd.ParetoLevels for multiobjective optimization. + The default is None, specifing no passed population. + + Returns + ------- + None. """ - + # TODO: ADD EXPLICITLY SENT POPULATION PROCESSING cur_params = {'variable_names' : variable_names, 'max_deriv_order' : max_deriv_order, 'additional_tokens' : [family.token_family.ftype for family in additional_tokens]} - if self.pool == None or self.pool_params != cur_params: + if pool is None: + if self.pool == None or self.pool_params != cur_params: + if data is None: + raise ValueError('Data has to be specified beforehand or passed in fit as an argument.') self.create_pool(data = data, variable_names=variable_names, derivs=derivs, max_deriv_order=max_deriv_order, additional_tokens=additional_tokens, - data_fun_pow=data_fun_pow) + data_fun_pow = data_fun_pow, deriv_fun_pow = deriv_fun_pow, + data_nn = data_nn, fourier_layers=fourier_layers, fourier_params=fourier_params) + else: + self.pool = pool; self.pool_params = cur_params self.optimizer_init_params['population_instruct'] = {"pool": self.pool, "terms_number": equation_terms_max_number, "max_factors_in_term": equation_factors_max_number, "sparsity_interval": eq_sparsity_interval} - if self.multiobjective_mode: - self.optimizer = MOEADDOptimizer(**self.optimizer_init_params) - best_obj = np.concatenate((np.zeros(shape=len([1 for token_family in self.pool.families if token_family.status['demands_equation']])), - np.ones(shape=len([1 for token_family in self.pool.families if token_family.status['demands_equation']])))) - print('best_obj', len(best_obj)) - self.optimizer.pass_best_objectives(*best_obj) + if optimizer is None: + self.optimizer = self._create_optimizer(self.multiobjective_mode, self.optimizer_init_params, + self.director) else: - self.optimizer = SimpleOptimizer(**self.optimizer_init_params) - - self.optimizer.set_strategy(self.director) + self.optimizer = optimizer + self.optimizer.optimize(**self.optimizer_exec_params) - - print('The optimization has been conducted.') - self.search_conducted = True - - def fit_multiobjective(self, equation_terms_max_number=6, equation_factors_max_number=1, eq_sparsity_interval=(1e-4, 2.5)): - """ - Fitting functional for multiojective optimization - - Args: - equation_terms_max_number (`int`): maximum count of terms in differential equation, default - 6 - equation_factors_max_number (`int`): maximum count of factors in term of differential equation, default - 1 - eq_sparsity_interval (`tuple`): interval of allowed values of sparsity constant for lasso regression, default - (1e-4, 2.5) - - Returns: - None - """ - self.optimizer_init_params['population_instruct'] = {"pool": self.pool, "terms_number": equation_terms_max_number, - "max_factors_in_terms": equation_factors_max_number, "sparsity_interval": eq_sparsity_interval} - - self.optimizer = MOEADDOptimizer(**self.optimizer_init_params) - self.optimizer.set_strategy(self.director) - best_obj = np.concatenate((np.zeros(shape=len([1 for token_family in self.pool.families if token_family.status['demands_equation']])), - np.ones(shape=len([1 for token_family in self.pool.families if token_family.status['demands_equation']])))) - print('best_obj', len(best_obj)) - self.optimizer.pass_best_objectives(*best_obj) - self.optimizer.optimize(**self.optimizer_exec_params) - print('The optimization has been conducted.') self.search_conducted = True - - def fit_singleobjective(self, equation_terms_max_number=6, equation_factors_max_number=1, eq_sparsity_interval=(1e-4, 2.5)): - """ - Fitting functional for singleobjective optimization - - Args: - equation_terms_max_number (`int`): maximum count of terms in differential equation, default - 6 - equation_factors_max_number (`int`): maximum count of factors in term of differential equation, default - 1 - eq_sparsity_interval (`tuple`): interval of allowed values of sparsity constant for lasso regression, default - (1e-4, 2.5) - - Returns: - None - """ - self.optimizer_init_params['population_instruct'] = {"pool": self.pool, "terms_number": equation_terms_max_number, - "max_factors_in_terms": equation_factors_max_number, "sparsity_interval": eq_sparsity_interval} - self.optimizer = SimpleOptimizer(**self.optimizer_init_params) - self.optimizer.set_strategy(self.director) - self.optimizer.optimize(**self.optimizer_exec_params) - print('The optimization has been conducted.') - self.search_conducted = True + @staticmethod + def _create_optimizer(multiobjective_mode:bool, optimizer_init_params:dict, + opt_strategy_director:OptimizationPatternDirector): + if multiobjective_mode: + optimizer = MOEADDOptimizer(**optimizer_init_params) + + best_obj = np.concatenate((np.zeros(shape=len([1 for token_family in optimizer_init_params['population_instruct']['pool'].families + if token_family.status['demands_equation']])), + np.ones(shape=len([1 for token_family in optimizer_init_params['population_instruct']['pool'].families + if token_family.status['demands_equation']])))) + print('best_obj', len(best_obj)) + optimizer.pass_best_objectives(*best_obj) + else: + optimizer = SimpleOptimizer(**optimizer_init_params) + + optimizer.set_strategy(opt_strategy_director) + return optimizer @property def _resulting_population(self): @@ -739,15 +861,17 @@ def _resulting_population(self): return self.optimizer.pareto_levels.levels else: return self.optimizer.population.population - def equations(self, only_print : bool = True, only_str = False, num = 1): """ Method for print or getting results of searching differential equation - Args: - only_print (`bool`): flag about action (print ot get) for results, default - True - num (`int`): count of results for getting or print, default - 1 + Parameters + ---------- + only_print : `bool`, optional + Flag about action (print ot get) for results, the default is True. + Num : `int`, optional + Number of results for return or printing, the default is 1. Returns: None, when `only_print` == True @@ -791,11 +915,11 @@ def solver_forms(self, grids: list = None, num: int = 1): for level in self._resulting_population[:min(num, len(self._resulting_population))]: temp = [] for sys in level: #self.resulting_population[idx]: - temp.append(SystemSolverInterface(sys).form(grids=grids)) + temp.append(SystemSolverInterface(sys, device=self._device).form(grids=grids)) forms.append(temp) else: for sys in self._resulting_population[:min(num, len(self._resulting_population))]: - forms.append(SystemSolverInterface(sys).form(grids=grids)) + forms.append(SystemSolverInterface(sys, device=self._device).form(grids=grids)) return forms @property @@ -805,11 +929,62 @@ def cache(self): else: return None, global_var.tensor_cache - def get_equations_by_complexity(self, complexity : Union[int, list]): + def get_equations_by_complexity(self, complexity : Union[float, list]): + ''' + Get equations with desired complexity. Works best with ``EpdeSearch.visualize_solutions(...)`` + + Parameters + ---------- + complexity : float | list of floats + The complexity metric of the desited equation. For systems of equations shall be passed as the list of complexities. + + Returns + ------- + list of ``epde.structure.main_structures.SoEq objects``. + ''' return self.optimizer.pareto_levels.get_by_complexity(complexity) - def predict(self, system : SoEq, boundary_conditions : BoundaryConditions, grid : list = None, data = None, - system_file : str = None, solver_kwargs : dict = {'use_cache' : True}, mode = 'NN'): + def predict(self, system : SoEq, boundary_conditions: BoundaryConditions = None, grid : list = None, data = None, + system_file: str = None, mode: str = 'NN', compiling_params: dict = {}, optimizer_params: dict = {}, + cache_params: dict = {}, early_stopping_params: dict = {}, plotting_params: dict = {}, + training_params: dict = {}, use_cache: bool = False, use_fourier: bool = False, + fourier_params: dict = None, net = None, use_adaptive_lambdas: bool = False): + ''' + Predict state by automatically solving discovered equation or system. Employs solver implementation, adapted from + https://github.com/ITMO-NSS-team/torch_DE_solver. + + Parameters + ---------- + system : SoEq + Object, containing the system (or a single equation as a system of one equation) to solve. + boundary_conditions : BoundaryConditions, optional + Boundary condition objects, should match the order of differential equations due to no internal checks. + Over/underdefined solution can happen, if the number of conditions is incorrect. The default value is None, + matching automatic construction of the required Dirichlet BC from data. + grid : list of np.ndarrays, optional + Grids, defining Cartesian coordinates, on which the equations will be solved. The default is None, specifing + the use of grids, stored in cache during equation learning. + data : TYPE, optional + Dataset, from which the boundary conditions can be automatically created. The default is None, making use of + the training datasets, stored in cache during equation training. + system_file : str, optional + Filename for the pickled equation/system of equations. If passed, **system** can be None. The default is None, meaning no equation. + solver_kwargs : dict, optional + Parameters of the solver. The default is {'use_cache' : True}, with that no + mode : TYPE, optional + Key, defining used method of the automatic DE solution. Supported methods: 'NN', 'mat' and 'autodiff'. The default is 'NN'. + + Raises + ------ + ValueError + DESCRIPTION. + + Returns + ------- + TYPE + DESCRIPTION. + + ''' if system is not None: print('Using explicitly sent system of equations.') @@ -824,19 +999,411 @@ def predict(self, system : SoEq, boundary_conditions : BoundaryConditions, grid if grid is None: grid = global_var.grid_cache.get_all()[1] - adapter = SolverAdapter(var_number = len(system.vars_to_describe)) - adapter.set_solver_params(**solver_kwargs) + adapter = SolverAdapter(net = net, use_cache = use_cache) # var_number = len(system.vars_to_describe), + + # Setting various adapater parameters + adapter.set_compiling_params(**compiling_params) + + adapter.set_optimizer_params(**optimizer_params) + + adapter.set_cache_params(**cache_params) + + adapter.set_early_stopping_params(**early_stopping_params) + + adapter.set_plotting_params(**plotting_params) + + adapter.set_training_params(**training_params) + + adapter.change_parameter('mode', mode, param_dict_key = 'compiling_params') print(f'grid.shape is {grid[0].shape}') - print(f'Shape of the grid for solver {adapter.convert_grid(grid, mode = mode).shape}') solution_model = adapter.solve_epde_system(system = system, grids = grid, data = data, - boundary_conditions = boundary_conditions, mode = mode) - if mode == 'mat': - return solution_model - else: - return solution_model(adapter.convert_grid(grid, mode = mode)).detach().numpy() + boundary_conditions = boundary_conditions, + mode = mode, use_cache = use_cache, + use_fourier = use_fourier, fourier_params = fourier_params, + use_adaptive_lambdas = use_adaptive_lambdas) + return solution_model - def visualize_solutions(self, dimensions:list = [0, 1], **visulaizer_kwargs): + def visualize_solutions(self, dimensions:list = [0, 1], **visulaizer_kwargs) -> None: + ''' + Plot discovered equation, using matplotlib tools. By default the method plots only the Pareto-optimal + equations from the population. Furthermore, the annotate of the candidate equations are made with LaTeX toolkit. + ''' if self.multiobjective_mode: self.optimizer.plot_pareto(dimensions=dimensions, **visulaizer_kwargs) else: raise NotImplementedError('Solution visualization is implemented only for multiobjective mode.') + + +class ExperimentCombiner(object): + def __init__(self, candidates: Union[ParetoLevels, List[SoEq], List[ParetoLevels]]): + self.complexity_matched = self.get_complexities(candidates) + complexity_sets = [set() for i in self.complexity_matched[0][1]] + for eq, complexity in self.complexity_matched: + for idx, compl in enumerate(complexity): + complexity_sets[idx].add(compl) + self.ordered_complexities = [sorted(compl_set) for compl_set in complexity_sets] + + @singledispatchmethod + def get_complexities(self, candidates) -> list: + raise NotImplementedError('Incorrect type of equations to parse') + + @get_complexities.register + def _(self, candidates: list) -> list: + if isinstance(candidates[0], ParetoLevels): + return reduce(lambda x, y: x.append(y), [self.get_complexities(pareto_level) for + pareto_level in candidates], []) + elif isinstance(candidates[0], SoEq): + # Here we assume, that the number of objectives is even, having quality + # and complexity for each equation + compl_objs_num = int(candidates[0].obj_fun.size/2) + # print(compl_objs_num) + return [(candidate, candidate.obj_fun[-compl_objs_num:]) for candidate in candidates] + else: + raise ValueError(f'Incorrect type of the equation, got {type(candidates[0])}') + + @get_complexities.register + def _(self, candidates: ParetoLevels) -> list: + eqs = reduce(lambda x, y: x.append(y), [self.get_complexities(level) for + level in candidates.levels], []) + return eqs + + def create_best_for_complexity(self, complexity: tuple, pool: TFPool): + vars_to_describe = self.complexity_matched[0][0].vars_to_describe # Get dependent variables + + best_eqs = [] + for idx, elem in enumerate(complexity): + if elem is not None: + relaxed_compl = [None,]*len(complexity) + relaxed_compl[idx] = elem + candidates = [candidate for candidate, _ in self.complexity_matched + if candidate.matches_complexitiy(relaxed_compl)] + best_candidate = sorted(candidates, key=lambda x: x.obj_fun[idx])[0] + # best_eqs.append(best_candidate.vals[vars_to_describe[idx]]) + else: + best_candidate = sorted([candidate for candidate, _ in self.complexity_matched], + key=lambda x: x.obj_fun[idx])[0] + best_eqs.append(best_candidate.vals[vars_to_describe[idx]]) + compound_equation = deepcopy(self.complexity_matched[0][0]) + compound_equation.create(passed_equations = best_eqs) + return compound_equation + + def create_best(self, pool: TFPool): + best_qualities_compl = [complexities[-1] for complexities in self.ordered_complexities] + return self.create_best_for_complexity(best_qualities_compl, pool) + +class EpdeMultisample(EpdeSearch): + def __init__(self, data_samples : List[List], multiobjective_mode: bool = True, + use_default_strategy: bool = True, director=None, + director_params: dict = {'variation_params': {}, 'mutation_params': {}, + 'pareto_combiner_params': {}, 'pareto_updater_params': {}}, + time_axis: int = 0, function_form=None, boundary: int = 0, + use_solver: bool = False, dimensionality: int = 1, verbose_params: dict = {'show_iter_idx' : True}, + memory_for_cache=5, prune_domain: bool = False, + pivotal_tensor_label=None, pruner=None, threshold: float = 1e-2, + division_fractions=3, rectangular: bool = True, params_filename: str = None): + """ + Args: + use_default_strategy (`bool`): optional + True (base and recommended value), if the default evolutionary strategy will be used, + False if the user-defined strategy will be passed further. Otherwise, the search will + not be conducted. + time_axis (`int`): optional + Indicator of time axis in data and grids. Used in normalization for regressions. + function_form (`callable`): optional + Auxilary function, used in the weak derivative definition. Default function is negative square function + with maximum values in the center of the domain. + boundary (`int|tuple/list of integers`): optional + Boundary width for the domain. Boundary points will be ignored for the purposes of equation discovery + use_solver (`bool`): optional + Allow use of the automaic partial differential solver to evaluate fitness of the candidate solutions. + dimensionality (`int`): optional + Dimensionality of the problem. ! Currently you should pass value, reduced by one ! + verbose_params (`dict`): optional + Description, of algorithm details, that will be demonstrated to the user. Usual + memory_for_cache (`int|float`): optional + Rough estimation of the memory, which can be used for cache of pre-evaluated tensors during the equation + prune_domain (`bool`): optional + If ``True``, subdomains with no dynamics will be pruned from data. Default value: ``False``. + pivotal_tensor_label (`str`): optional + Indicator, according to which token data will be pruned. Default value - ``'du/dt'``, where + ``t`` is selected as a time axis from ``time_axis`` parameter. + pruner (`object`): optional + Pruner object, which will remove subdomains with no dynamics i.e. with derivative + identically equal to zero. + threshold (`float`): optional + Pruner parameter, indicating the boundary of interval in which the pivotal tensor values are + considered as zeros. Default value: 1e-2 + division_fractions (`int`): optional + Number of subdomains along each axis, defining the division of the domain for pruning. + Default value: 3 + rectangular(`bool`): optional + A line of subdomains along an axis can be removed if all values inside them are identical to zero. + """ + super().__init__(multiobjective_mode = multiobjective_mode, use_default_strategy = use_default_strategy, + director = director, director_params = director_params, time_axis = time_axis, + define_domain = False, function_form = function_form, boundary = boundary, + use_solver = use_solver, dimensionality = dimensionality, verbose_params = verbose_params, + coordinate_tensors = None, memory_for_cache = memory_for_cache, prune_domain = prune_domain, + pivotal_tensor_label = pivotal_tensor_label, pruner = pruner, threshold = threshold, + division_fractions = division_fractions, rectangular = rectangular, + params_filename = params_filename) + self._memory_for_cache = memory_for_cache + self._boundary = boundary + self._function_form = function_form + + grids = [sample[0] for sample in data_samples] + print('grids shape is', [(type(subgrid), len(subgrid)) for subgrid in grids]) + + subgrids = [list() for var_grid in grids[0]] + for sample_grids in grids: + for idx, var_grid in enumerate(sample_grids): + subgrids[idx].append(var_grid) + + grids_stacked = [np.concatenate(var_grid) for var_grid in subgrids] + self.set_domain_properties(grids_stacked, self._memory_for_cache, self._boundary, self._function_form) + + global_var.grid_cache.g_func = np.concatenate([self.g_func(grid) for grid in grids]) + + # Domain will not be set properly in init, thus a separate initialization is necessary + + def set_domain_properties(self, coordinate_tensors, memory_for_cache, boundary_width: Union[int, list], + function_form: Callable = None, prune_domain: bool = False, + pivotal_tensor_label=None, pruner=None, threshold: float = 1e-5, + division_fractions: int = 3, rectangular: bool = True): + """ + Setting properties for processing considered domain, such as removing areas with no dynamics, + and setting bounderes. Can be used for uploading test function. In enseble equation learning can not + take coordinates as the argument. + + Parameters + ---------- + memory_for_cache : int + Allowed amount of memory (in percentage) for data storage. + boundary_width : int|list + The number of unaccountable elements at the edges of the domain. + function_form : callable, optional + Testing function connected to the weak derivative notion, the default value is None, that + corresponds with the product of normalized inverse square functions of the coordinates, + centered at the middle of the domain. + prune_domain : bool + Flag, enabling area cropping by removing subdomains with constant values, default - False. + pivotal_tensor_label : np.ndarray + Pattern that guides the domain pruning, the default is None. + pruner : DomainPruner + Object for selecting domain region, the default is None. + threshold : float, optional + The boundary at which values are considered zero, the default is 1e-5. + division_fractions : int, optional + Number of fraction for each axis (if this is integer than all axis are dividing by + same fractions), the default is 3. + rectangular : bool, optional + Flag indecating that crop subdomains are rectangle, default - True. + + Returns + ------- + None. + + """ + # raise NotImplementedError('In ensemble mode the domain is set in `set_samples` method.') + + # assert self.coodinate_tensors is not None, 'Coordinate tensors for the sample have to be set beforehand.' + self._create_caches(coordinate_tensors=coordinate_tensors, memory_for_cache=memory_for_cache) + if prune_domain: + self.domain_pruning(pivotal_tensor_label, pruner, threshold, division_fractions, rectangular) + self.set_boundaries(boundary_width) + + # TODO$ + self._upload_g_func(function_form) + + def _upload_g_func(self, function_form: Union[Callable, np.ndarray, list] = None, boundary_width: int = None): + """ + Loading testing function connected to the weak derivative notion. In contrast to a single equation + discovery approach the testing function is not immediately stored in cache, but saved to be used + later and applied to equations. + + Args: + function_form (`callable`, or `np.ndarray`, or `list[np.ndarray]`) + Test function, default using inverse polynomial with max in the domain center. + + Returns: + None + """ + boundary_width = boundary_width if boundary_width is not None else global_var.grid_cache.boundary_width + if isinstance(function_form, (np.ndarray, list)): + self.g_func = function_form + else: + try: + decorator = BoundaryExclusion(boundary_width=boundary_width) + if function_form is None: + def baseline_exp_function(grids): + def uniformize(data): + temp = -(data - np.mean(data))**2 + if np.min(temp) == np.max(temp): + return np.ones_like(temp) + else: + return (temp - np.min(temp)) / (np.max(temp) - np.min(temp)) + + exponent_partial = np.array([uniformize(grid) for grid in grids]) + exponent = np.multiply.reduce(exponent_partial, axis=0) + return exponent + + self.g_func = decorator(baseline_exp_function) + else: + self.g_func = decorator(function_form) + + except NameError: + raise NameError('Cache for grids has not been initilized yet!') + + + def set_samples(self, data_samples: List[List], sample_derivs: List[List[np.ndarray]] = None, var_names: List[str] = ['u',], + max_deriv_orders: Union[int, list[int]] = 1, additional_tokens: list = [], data_fun_pow: int = 1, + deriv_fun_pow: int = 1): + if isinstance(data_samples[0][1], np.ndarray): + data_comb = [sample[1] for sample in data_samples] + print('Samples are np.ndarrays somehow') + elif isinstance(data_samples[0][1], tuple) or isinstance(data_samples[0][1], list): + data_comb = [] + assert all([isinstance(sample_var, np.ndarray) for sample_var in data_samples[0][1]]), f'Samples must be passed as \ + a list of multiple numpy ndarrays, if the equations are derived for mutiple dependent variables.' + print(f'Presumably we have {len(data_samples[0][1])} dependent variables') + for var_idx in range(len(data_samples[0][1])): + data_comb.append([sample[1][var_idx] for sample in data_samples]) + + grids = [sample[0] for sample in data_samples] + + # subgrids = [list() for var_grid in grids[0]] + # for sample_grids in grids: + # for idx, var_grid in sample_grids: + # subgrids[idx].append(var_grid) + + # grids_stacked = [np.concatenate(var_grid) for var_grid in subgrids] + + self.create_pool(data = data_comb, variable_names = var_names, derivs = sample_derivs, + max_deriv_order = max_deriv_orders, additional_tokens = additional_tokens, + data_fun_pow = data_fun_pow, deriv_fun_pow=deriv_fun_pow, grid = grids) # Implement sample-wise differentiation. + + # for sample in data_samples[1:]: + # if multi_var_mode: + # pass + # TODO: calculated derivatives, combine them into single arrays to correctly create tokens. + + + # def set_derivatives(self, variable:str, deriv:np.ndarray): + # ''' + # Pass the derivatives of a variable as a np.ndarray. + + # Parameters + # ---------- + # variable : str + # Key for the variable to have the derivatives set. + # deriv : np.ndarray + # Arrays of derivatives. Have to be shaped as (n, m), where n is the number of passed derivatives + # (for example, when you differentiate the dataset once for the first axis, and up to the second order for + # the second, and you have no mixed derivatives, *n = 3*), and m is the number of data points in the domain. + + # Returns + # ------- + # None. + # ''' + # try: + # self._derivatives + # except AttributeError: + # self._derivatives = {} + # self._derivatives[variable] = deriv + + + def fit(self, samples: List[Tuple], equation_terms_max_number=6, equation_factors_max_number=1, variable_names=['u',], + eq_sparsity_interval=(1e-4, 2.5), derivs=None, max_deriv_order=1, additional_tokens=[], + data_fun_pow: int = 1, deriv_fun_pow: int = 1, optimizer: Union[SimpleOptimizer, MOEADDOptimizer] = None, + pool: TFPool = None, population: Union[ParetoLevels, Population] = None): + """ + Fit epde search algorithm to obtain differential equations, describing passed data. + + Parameters + ---------- + data : np.ndarray | list | tuple, optional + Values of modeled variables. If the variable is single (i.e. deriving a single equation), + it can be passed as the numpy.ndarray or as the list/tuple with a single element; + multiple variables are not supported yet, use older interfaces. Default value is None, but it + shall be used only for retraining, when the pool argument is passed. + equation_terms_max_number : int, optional + The maximum number of terms, present in the derived equations, the default is 6. + equation_factors_max_number : int, optional + The maximum number of factors (token functions; real-valued coefficients are not counted here), + present in terms of the equaton, the default is 1. + variable_names : list | str, optional + Names of the independent variables, passed into search mechanism. Length of the list must correspond + to the number of np.ndarrays, sent with in ``data`` parameter. In case of system of differential equation discovery, + all variables shall be named here, default - ``['u',]``, representing a single variable *u*. + eq_sparsity_interval : tuple, optional + The left and right boundaries of interval with sparse regression values. Undirectly influences the + number of active terms in the equation, the default is ``(1e-4, 2.5)``. + derivs : list or list of lists of np.ndarrays, optional + Pre-computed values of derivatives. If ``None`` is passed, the derivatives are calculated in the + method. Recommended to use, if the computations of derivatives take too long. For further information + about using data, prepared in advance, check ``epde.preprocessing.derivatives.preprocess_derivatives`` + function, default - None. + max_deriv_order : int | list | tuple, optional + Highest order of calculated derivatives, the default is 1. + additional_tokens : list of TokenFamily or Prepared_tokens, optional + Additional tokens, that would be used to construct the equations among the main variables and their + derivatives. Objects of this list must be of type ``epde.interface.token_family.TokenFamily`` or + of ``epde.interface.prepared_tokens.Prepared_tokens`` subclasses types. The default is None. + field_smooth : bool, optional + Parameter, if the input variable fields shall be smoothed to avoid the errors. If the data is + assumed to be noiseless, shall be set to False, otherwise - True, the default - False. + memory_for_cache : int | float, optional + Limit for the cache (in fraction of the memory) for precomputed tensor values to be stored: + if int, will be considered as the percentage of the entire memory, and if float, + then as a fraction of memory, the default is 5. + data_fun_pow : int, optional + Maximum power of token, the default is 1. + optimizer : SimpleOptimizer | MOEADDOptimizer, optional + Pre-defined optimizer, that will be used during evolution. Shall correspond with the mode + (single- and multiobjective). The default is None, matching no use of pre-defined optimizer. + pool : TFPool, optional + Pool of tokens, that can be explicitly passed. The default is None, matching no use of passed pool. + population : Population | ParetoLevels, optional + Population of candidate equatons, that can be optionally passed in explicit form. The type of objects + must match the optimization algorithm: epde.optimizers.single_criterion.optimizer.Population for + single-objective mode and epde.optimizers.moeadd.moeadd.ParetoLevels for multiobjective optimization. + The default is None, specifing no passed population. + + Returns + ------- + None. + """ + # TODO: ADD EXPLICITLY SENT POPULATION PROCESSING + cur_params = {'variable_names' : variable_names, 'max_deriv_order' : max_deriv_order, + 'additional_tokens' : [family.token_family.ftype for family in additional_tokens]} + + # if pool is None: + # if self.pool == None or self.pool_params != cur_params: + # if data is None: + # raise ValueError('Data has to be specified beforehand or passed in fit as an argument.') + # self.create_pool(data = data, variable_names=variable_names, + # derivs=derivs, max_deriv_order=max_deriv_order, + # additional_tokens=additional_tokens, + # data_fun_pow=data_fun_pow) + # else: + # self.pool = pool; self.pool_params = cur_params + if pool is None: + self.set_samples(samples, sample_derivs=derivs, var_names = variable_names, max_deriv_orders = max_deriv_order, + additional_tokens = additional_tokens, data_fun_pow = data_fun_pow, deriv_fun_pow=deriv_fun_pow) + else: + self.pool = pool; self.pool_params = cur_params + + self.optimizer_init_params['population_instruct'] = {"pool": self.pool, "terms_number": equation_terms_max_number, + "max_factors_in_term": equation_factors_max_number, + "sparsity_interval": eq_sparsity_interval} + + if optimizer is None: + self.optimizer = self._create_optimizer(self.multiobjective_mode, self.optimizer_init_params, + self.director) + else: + self.optimizer = optimizer + + self.optimizer.optimize(**self.optimizer_exec_params) + + print('The optimization has been conducted.') + self.search_conducted = True diff --git a/epde/interface/logger.py b/epde/interface/logger.py index 3c30a67..6315bd4 100644 --- a/epde/interface/logger.py +++ b/epde/interface/logger.py @@ -195,6 +195,4 @@ def to_pandas(self, terms: Union[list, tuple], metrics: list = [np.mean, np.var, metric_frames.append(pd.DataFrame(data, index = row_labels)) - return metric_frames - - \ No newline at end of file + return metric_frames \ No newline at end of file diff --git a/epde/interface/prepared_tokens.py b/epde/interface/prepared_tokens.py index 9e1a249..a088806 100644 --- a/epde/interface/prepared_tokens.py +++ b/epde/interface/prepared_tokens.py @@ -5,22 +5,23 @@ @author: mike_ubuntu """ -import numpy as np from abc import ABC from collections import OrderedDict -from typing import Union, Callable -import time +from typing import Union, Callable, List, Tuple + +import numpy as np +import torch from epde.supplementary import define_derivatives from epde.preprocessing.derivatives import preprocess_derivatives import epde.globals as global_var from epde.interface.token_family import TokenFamily -from epde.evaluators import CustomEvaluator, EvaluatorTemplate, trigonometric_evaluator, simple_function_evaluator -from epde.evaluators import const_evaluator, const_grad_evaluator, grid_evaluator -from epde.evaluators import velocity_evaluator, velocity_grad_evaluators, phased_sine_evaluator from epde.cache.cache import upload_simple_tokens, prepare_var_tensor # np_ndarray_section, +from epde.evaluators import CustomEvaluator, EvaluatorTemplate, trigonometric_evaluator, \ + simple_function_evaluator, const_evaluator, const_grad_evaluator, grid_evaluator, \ + velocity_evaluator, velocity_grad_evaluators, phased_sine_evaluator, sign_evaluator class PreparedTokens(ABC): """ @@ -38,6 +39,47 @@ def token_family(self): raise AttributeError(f'Some attributes of the token family have not been declared.') return self._token_family +class ArbitraryDataFunction(PreparedTokens): + def __init__(self, token_type: str, var_name: str, token_labels: list, + evaluator: Union[CustomEvaluator, EvaluatorTemplate, Callable], + params_ranges: dict, params_equality_ranges: dict = None, unique_specific_token=True, + unique_token_type=True, meaningful=True, non_default_power = False, + deriv_solver_orders: list = [[None,],]): # Add more intuitive method of declaring solver orders + """ + Class for tokens, representing arbitrary functions of the modelled variable passed in `var_name` or its derivatives. + """ + self._token_family = TokenFamily(token_type = token_type, variable = var_name, + family_of_derivs=True) + + self._token_family.set_status(demands_equation=False, meaningful=meaningful, + unique_specific_token=unique_specific_token, unique_token_type=unique_token_type, + s_and_d_merged=False, non_default_power = non_default_power) + + self._token_family.set_params(token_labels, params_ranges, params_equality_ranges, + derivs_solver_orders=deriv_solver_orders) + self._token_family.set_evaluator(evaluator) + +class DerivSignFunction(PreparedTokens): + def __init__(self, token_type: str, var_name: str, token_labels: list, unique_specific_token=True, + # evaluator: Union[CustomEvaluator, EvaluatorTemplate, Callable], + # params_ranges: dict, params_equality_ranges: dict = None, + unique_token_type=True, meaningful=True, non_default_power = False, + deriv_solver_orders: list = [[None,],]): # Add more intuitive method of declaring solver orders + """ + Class for tokens, representing arbitrary functions of the modelled variable passed in `var_name` or its derivatives. + """ + self._token_family = TokenFamily(token_type = token_type, variable = var_name, + family_of_derivs=True) + + self._token_family.set_status(demands_equation=False, meaningful=meaningful, + unique_specific_token=unique_specific_token, unique_token_type=unique_token_type, + s_and_d_merged=False, non_default_power = non_default_power) + + params_ranges = OrderedDict([('power', (1, 1))]) + params_equality_ranges = {'power': 0} + self._token_family.set_params(token_labels, params_ranges, params_equality_ranges, + derivs_solver_orders=deriv_solver_orders) + self._token_family.set_evaluator(sign_evaluator) class DataPolynomials(PreparedTokens): def __init__(self, var_name: str, max_power: int = 1): @@ -79,10 +121,102 @@ def latex_form(label, **params): s_and_d_merged=False, non_default_power = True) self._token_family.set_params([var_name,], OrderedDict([('power', (1, max_power))]), {'power': 0}, [[None,],]) - self._token_family.set_evaluator(simple_function_evaluator, []) + self._token_family.set_evaluator(simple_function_evaluator) + +class DataSign(PreparedTokens): + def __init__(self, var_name: str, max_power: int = 1): + """ + Class for tokens, representing power products of the modelled variable. + Argument `max_power` represents the maximum power, in which the tokens will exponentiated. + Should be included into the pool by default, replacing the default 1-st power of the data. + """ + raise NotImplementedError('TBD.') + self._token_family = TokenFamily(token_type=f'poly of {var_name}', variable = var_name, + family_of_derivs=True) + + def latex_form(label, **params): + ''' + Parameters + ---------- + label : str + label of the token, for which we construct the latex form. + **params : dict + dictionary with parameter labels as keys and tuple of parameter values + and their output text forms as values. + + Returns + ------- + form : str + LaTeX-styled text form of token. + ''' + if '/' in label: + label = label[:label.find('x')+1] + '_' + label[label.find('x')+1:] + label = label.replace('d', r'\partial ').replace('/', r'}{') + label = r'\frac{' + label + r'}' + + if params['power'][0] > 1: + label = r'\left(' + label + r'\right)^{{{0}}}'.format(params["power"][1]) + return label + + self._token_family.set_latex_form_constructor(latex_form) + self._token_family.set_status(demands_equation=False, meaningful=True, + unique_specific_token=True, unique_token_type=True, + s_and_d_merged=False, non_default_power = True) + self._token_family.set_params([var_name,], OrderedDict([('power', (1, max_power))]), + {'power': 0}, [[None,],]) + self._token_family.set_evaluator(simple_function_evaluator) + +class ControlVarTokens(PreparedTokens): + def __init__(self, sample: Union[np.ndarray, List[np.ndarray]], ann: torch.nn.Sequential = None, + var_name: Union[str, List[str]] = 'ctrl', arg_var: List[Tuple[Union[int, List]]] = [(0, [None,]),], + eval_torch: Union[Callable, dict] = None, eval_np: Union[Callable, dict] = None, device:str = 'cpu'): + vars, der_ords = zip(*arg_var) + if isinstance(sample, List): + assert isinstance(var_name, List), 'Both samples and var names have to be set as Lists or single elements.' + num_ctrl_comp = len(var_name) + else: + num_ctrl_comp = 1 + + token_params = OrderedDict([('power', (1, 1)),]) + equal_params = {'power': 0} + + self._token_family = TokenFamily(token_type = 'ctrl', variable = vars, + family_of_derivs=True) + + self._token_family.set_status(demands_equation=False, meaningful=True, + unique_specific_token=True, unique_token_type=True, + s_and_d_merged=False, non_default_power = False) + if isinstance(var_name, str): var_name = [var_name,] + self._token_family.set_params(var_name, token_params, equal_params, + derivs_solver_orders=[der_ords for label in var_name]) + def nn_eval_torch(*args, **kwargs): + if isinstance(args[0], torch.Tensor): + inp = torch.cat([torch.reshape(tensor, (-1, 1)) for tensor in args], dim = 1).to(device) + else: + inp = torch.cat([torch.reshape(torch.Tensor([elem,]), (-1, 1)) for elem in args], dim = 1).to(device) + return global_var.control_nn.net(inp).to(device) + + def nn_eval_np(*args, **kwargs): + return nn_eval_torch(*args, **kwargs).detach().cpu().numpy() + + if eval_np is None: eval_np = nn_eval_np + if eval_torch is None: eval_torch = nn_eval_torch + + eval = CustomEvaluator(evaluation_functions_np = eval_np, + evaluation_functions_torch = eval_torch, + eval_fun_params_labels = ['power']) + + global_var.reset_control_nn(n_control = num_ctrl_comp, ann = ann, + ctrl_args = arg_var, device = device) + if isinstance(sample, np.ndarray): + global_var.tensor_cache.add(tensor = sample, label = (var_name[0], (1.0,))) + else: + for idx, var_elem in enumerate(var_name): + global_var.tensor_cache.add(tensor = sample[idx], label = (var_elem, (1.0,))) + self._token_family.set_evaluator(eval) class TrigonometricTokens(PreparedTokens): """ @@ -132,7 +266,7 @@ def latex_form(label, **params): trig_equal_params = {'power': 0, 'freq': (freq[1] - freq[0]) / freq_equality_fraction, 'dim': 0} self._token_family.set_params(['sin', 'cos'], trig_token_params, trig_equal_params) - self._token_family.set_evaluator(trigonometric_evaluator, []) + self._token_family.set_evaluator(trigonometric_evaluator) class PhasedSine1DTokens(PreparedTokens): @@ -170,7 +304,7 @@ def latex_form(label, **params): sine_equal_params = {'power': 0, 'freq': (freq[1] - freq[0]) / freq_equality_fraction, 'phase': 0.05} self._token_family.set_params(['sine',], sine_token_params, sine_equal_params) - self._token_family.set_evaluator(phased_sine_evaluator, []) + self._token_family.set_evaluator(phased_sine_evaluator) class GridTokens(PreparedTokens): @@ -217,7 +351,7 @@ def latex_form(label, **params): grid_equal_params = {'power': 0, 'dim': 0} self._token_family.set_params(labels, grid_token_params, grid_equal_params) - self._token_family.set_evaluator(grid_evaluator, []) + self._token_family.set_evaluator(grid_evaluator) class LogfunTokens(PreparedTokens): @@ -253,7 +387,9 @@ def __init__(self, token_type: str, token_labels: list, """ self._token_family = TokenFamily(token_type=token_type) self._token_family.set_status(unique_specific_token=unique_specific_token, - unique_token_type=unique_token_type, meaningful=meaningful) + unique_token_type=unique_token_type, + meaningful=meaningful, + non_default_power = non_default_power) default_param_eq_fraction = 0.5 if params_equality_ranges is not None: for param_key, interval in params_ranges.items(): @@ -271,20 +407,21 @@ def __init__(self, token_type: str, token_labels: list, params_equality_ranges[param_key] = 0 self._token_family.set_params(token_labels, params_ranges, params_equality_ranges) - self._token_family.set_evaluator(evaluator, []) + self._token_family.set_evaluator(evaluator) class CacheStoredTokens(CustomTokens): def __init__(self, token_type: str, token_labels: list, token_tensors: dict, params_ranges: dict, params_equality_ranges: Union[None, dict], dimensionality: int = 1, - unique_specific_token=True, unique_token_type=True, meaningful=False): + unique_specific_token=True, unique_token_type=True, meaningful=False, + non_default_power = True): if set(token_labels) != set(list(token_tensors.keys())): raise KeyError('The labels of tokens do not match the labels of passed tensors') upload_simple_tokens(list(token_tensors.keys()), global_var.tensor_cache, list(token_tensors.values())) super().__init__(token_type=token_type, token_labels=token_labels, evaluator=simple_function_evaluator, params_ranges=params_ranges, params_equality_ranges=params_equality_ranges, dimensionality=dimensionality, unique_specific_token=unique_specific_token, - unique_token_type=unique_token_type, meaningful=meaningful) + unique_token_type=unique_token_type, meaningful=meaningful, non_default_power = non_default_power) class ExternalDerivativesTokens(CustomTokens): @@ -339,9 +476,9 @@ def __init__(self, values_range=(-np.inf, np.inf)): print('Conducting init procedure for ConstantToken:') self._token_family.set_params(['const'], const_token_params, const_equal_params) print('Parameters set') - self._token_family.set_evaluator(const_evaluator, []) + self._token_family.set_evaluator(const_evaluator) print('Evaluator set') - self._token_family.set_deriv_evaluator({'value': const_grad_evaluator}, []) + self._token_family.set_deriv_evaluator({'value': const_grad_evaluator}) class VelocityHEQTokens(PreparedTokens): @@ -363,6 +500,6 @@ def __init__(self, param_ranges): equal_params = {'power': 0} equal_params.update(opt_params_equality) self._token_family.set_params(['v'], token_params, equal_params) - self._token_family.set_evaluator(velocity_evaluator, []) + self._token_family.set_evaluator(velocity_evaluator) grad_eval_labeled = {'p'+str(idx+1): fun for idx, fun in enumerate(velocity_grad_evaluators)} self._token_family.set_deriv_evaluator(grad_eval_labeled, []) diff --git a/epde/interface/solver_integration.py b/epde/interface/solver_integration.py deleted file mode 100644 index eed6d24..0000000 --- a/epde/interface/solver_integration.py +++ /dev/null @@ -1,529 +0,0 @@ -''' - -''' - -# !/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import numpy as np -import torch - -from typing import Callable, Union -from types import FunctionType -from epde.solver.models import FourierNN -from epde.solver.solver import grid_format_prepare - -VAL_TYPES = Union[FunctionType, int, float, torch.Tensor, np.ndarray] -BASE_SOLVER_PARAMS = {'lambda_bound' : 100, 'verbose' : True, - 'gamma' : 0.9, 'lr_decay' : 400, 'derivative_points' : 3, - 'learning_rate' : 1e-3, 'eps' : 1e-6, 'tmin' : 5000, - 'tmax' : 2*1e4, 'use_cache' : False, 'cache_verbose' : True, - 'patience' : 10, 'loss_oscillation_window' : 100, - 'no_improvement_patience' : 100, 'save_always' : False, - 'print_every' : 1000, 'optimizer_mode' : 'Adam', - 'model_randomize_parameter' : 1e-5, 'step_plot_print' : False, - 'step_plot_save' : True, 'image_save_dir' : '/home/maslyaev/epde/EPDE_main/ann_imgs/', 'tol' : 0.01 } - -from functools import singledispatchmethod, singledispatch - -from epde.structure.main_structures import Equation, SoEq -import epde.globals as global_var - -from epde.solver.input_preprocessing import Equation as SolverEquation -import epde.solver.solver as solver -from epde.solver.models import mat_model - -class PregenBOperator(object): - def __init__(self, system: SoEq, system_of_equation_solver_form: list): - self.system = system - self.equation_sf = [eq for eq in system_of_equation_solver_form] - self.variables = list(system.vars_to_describe) - # print('Varibales:', self.variables) - # self.max_ord = self.max_deriv_orders - - def demonstrate_required_ords(self): - linked_ords = list(zip([eq.main_var_to_explain for eq in self.system], - self.max_deriv_orders)) - print( - f'Orders, required by an equation, are as follows: {linked_ords}') - - @property - def conditions(self): - return self._bconds - - @conditions.setter - def conditions(self, conds: list): - self._bconds = [] - if len(conds) != int(sum([value.sum() for value in self.max_deriv_orders.values()])): - raise ValueError( - 'Number of passed boundry conditions does not match requirements of the system.') - for condition in conds: - if isinstance(condition, BOPElement): - self._bconds.append(condition()) - else: - print('condition is ', type(condition), condition) - raise NotImplementedError( - 'In-place initialization of boundary operator has not been implemented yet.') - - @property - def max_deriv_orders(self): - return self.get_max_deriv_orders(self.equation_sf, self.variables) - - @staticmethod - def get_max_deriv_orders(system_sf: list, variables: list = ['u',]) -> np.ndarray: - def count_factor_order(factor_code, deriv_ax): - if factor_code is None: - return 0 - else: - if isinstance(factor_code, list): - return factor_code.count(deriv_ax) - elif isinstance(factor_code, int): - return 1 if factor_code == deriv_ax else 0 - else: - raise TypeError('Incorrect type of the input.') - - @singledispatch - def get_equation_requirements(equation_sf, variables=['u',]): - raise NotImplementedError( - 'Single-dispatch called in generalized form') - - @get_equation_requirements.register - def _(equation_sf: dict, variables=['u',]) -> dict: # dict = {u: 0}): - dim = global_var.grid_cache.get('0').ndim - if len(variables) == 1: - print('processing a single variable') - var_max_orders = np.zeros(dim) - for term in equation_sf.values(): - if isinstance(term['pow'], list): - for deriv_factor in term['term']: - orders = np.array([count_factor_order(deriv_factor, ax) for ax - in np.arange(dim)]) - var_max_orders = np.maximum(var_max_orders, orders) - else: - orders = np.array([count_factor_order(term['term'], ax) for ax - in np.arange(dim)]) - var_max_orders = {variables[0]: np.maximum(var_max_orders, orders)} - return var_max_orders - else: - var_max_orders = {var_key: np.zeros(dim) for var_key in variables} - for term_key, symb_form in equation_sf.items(): - if isinstance(symb_form['var'], list): - assert len(symb_form['term']) == len(symb_form['var']) - for factor_idx, deriv_factor in enumerate(symb_form['term']): - var_orders = np.array([count_factor_order(deriv_factor, ax) for ax - in np.arange(dim)]) - var_key = symb_form['var'][factor_idx] - 1 - var_max_orders[variables[var_key]] = np.maximum(var_max_orders[variables[var_key]], - var_orders) - elif isinstance(symb_form['var'], int): - raise NotImplementedError() - assert len(symb_form['term']) == 1 - for factor_idx, factor in enumerate([count_factor_order(symb_form['term'], ax) for ax - in np.arange(dim)]): - var_orders = np.array([count_factor_order(deriv_factor, ax) for ax - in np.arange(dim)]) - var_key = symb_form['var'][factor_idx] - var_max_orders[var_key] = np.maximum(var_max_orders[var_key], var_orders) - return var_max_orders - - @get_equation_requirements.register - def _(equation_sf: list, variables=['u',]): - raise NotImplementedError( - 'TODO: add equation list form processing') # TODO - - eq_forms = [] - for equation_form in system_sf: - eq_forms.append(get_equation_requirements(equation_form, variables)) - - max_orders = {var: np.maximum.accumulate([eq_list[var] for eq_list in eq_forms])[-1] - for var in variables} # TODO - return max_orders - - def generate_default_bc(self, vals: Union[np.ndarray, dict] = None, grids: list = None, - allow_high_ords: bool = False): - # Implement allow_high_ords - selection of derivatives from - required_bc_ord = self.max_deriv_orders - assert set(self.variables) == set(required_bc_ord.keys()), 'Some conditions miss required orders.' - # assert (len(self.variables) == 1) == isinstance(vals, dict), f'{isinstance(vals, dict), vals} ' - - grid_cache = global_var.initial_data_cache - tensor_cache = global_var.initial_data_cache - - if vals is None: - val_keys = {key: (key, (1.0,)) for key in self.variables} - - if grids is None: - _, grids = grid_cache.get_all() - - relative_bc_location = {0: (), 1: (0,), 2: (0, 1), - 3: (0., 0.5, 1.), 4: (0., 1/3., 2/3., 1.)} - - bconds = [] - tensor_shape = grids[0].shape - - def get_boundary_ind(tensor_shape, axis, rel_loc): - return tuple(np.meshgrid(*[np.arange(shape) if dim_idx != axis else min(int(rel_loc * shape), shape-1) - for dim_idx, shape in enumerate(tensor_shape)], indexing='ij')) - - for var_idx, variable in enumerate(self.variables): - for ax_idx, ax_ord in enumerate(required_bc_ord[variable]): - for loc in relative_bc_location[ax_ord]: - indexes = get_boundary_ind(tensor_shape, ax_idx, rel_loc=loc) - # print(indexes) - coords = np.array([grids[idx][indexes] for idx in np.arange(len(tensor_shape))]).T - if coords.ndim > 2: - coords = coords.squeeze() - - if vals is None: - bc_values = tensor_cache.get(val_keys[variable])[indexes] - else: - bc_values = vals[indexes] - - bc_values = np.expand_dims(bc_values, axis=0).T - coords = torch.from_numpy(coords).float() # torch.FloatTensor - - bc_values = torch.from_numpy(bc_values).float() # torch.FloatTensor - operator = BOPElement(axis=ax_idx, key=variable, coeff=1, term=[None], - power=1, var=var_idx, rel_location=loc) - operator.set_grid(grid=coords) - operator.values = bc_values - bconds.append(operator) # )(rel_location=loc) - self.conditions = bconds - print('cond[0]', [cond[0].shape for cond in self.conditions]) - print('cond[2]', [cond[2].shape for cond in self.conditions]) - - - -class BOPElement(object): - def __init__(self, axis: int, key: str, coeff: float = 1., term: list = [None], - power: Union[list, int] = 1, var: Union[list, int] = 1, rel_location: float = 0.): - self.axis = axis - self.key = key - self.coefficient = coeff - self.term = term - self.power = power - self.variables = var - self.location = rel_location - self.grid = None - - self.status = {'boundary_location_set': False, - 'boundary_values_set': False} - - def set_grid(self, grid: torch.Tensor): - self.grid = grid - self.status['boundary_location_set'] = True - - @property - def operator_form(self): - form = { - 'coeff': self.coefficient, - self.key: self.term, - 'pow': self.power, - 'var': self.variables - } - return self.key, form - - @property - def values(self): - if isinstance(self._values, FunctionType): - assert self.grid_set, 'Tring to evaluate variable coefficent without a proper grid.' - res = self._values(self.grids) - assert res.shape == self.grids[0].shape - return torch.from_numpy(res) - else: - return self._values - - @values.setter - def values(self, vals): - if isinstance(vals, (FunctionType, int, float, torch.Tensor)): - self._values = vals - self.vals_set = True - elif isinstance(vals, np.ndarray): - self._values = torch.from_numpy(vals) - self.vals_set = True - else: - raise TypeError( - f'Incorrect type of coefficients. Must be a type from list {VAL_TYPES}.') - - def __call__(self, values: VAL_TYPES = None): # , boundary: list = None , self.grid - if not self.vals_set and values is not None: - self.values = values - self.status['boundary_values_set'] = True - elif not self.vals_set and values is None: - raise ValueError('No location passed into the BOP.') - if self.grid is not None: - boundary = self.grid - elif self.grid is None and self.location is not None: - _, all_grids = global_var.grid_cache.get_all() # str(self.axis) - - abs_loc = self.location * all_grids[0].shape[self.axis] - if all_grids[0].ndim > 1: - boundary = np.array(all_grids[:self.axis] + all_grids[self.axis+1:]) - if isinstance(values, FunctionType): - raise NotImplementedError # TODO: evaluation of BCs passed as functions or lambdas - boundary = torch.from_numpy(np.expand_dims(boundary, axis=self.axis)).float() # .reshape(bnd_shape)) - - boundary = torch.cartesian_prod(boundary, - torch.from_numpy(np.array([abs_loc,], dtype=np.float64))).float() - boundary = torch.moveaxis(boundary, source=0, destination=self.axis).resize() - else: - boundary = torch.from_numpy(np.array([[abs_loc,],])).float() # TODO: work from here - print('boundary.shape', boundary.shape, boundary.ndim) - - elif boundary is None and self.location is None: - raise ValueError('No location passed into the BOP.') - - form = self.operator_form - boundary_operator = {form[0]: form[1]} - - boundary_value = self.values - - return [boundary, boundary_operator, boundary_value, self.variables, 'operator'] - - -class BoundaryConditions(object): - def __init__(self, grids=None, partial_operators: dict = []): - self.grids_set = (grids is not None) - if grids is not None: - self.grids = grids - self.operators = partial_operators - - def form_operator(self): - return [list(bcond()) for bcond in self.operators.values()] - - -def solver_formed_grid(training_grid=None): - if training_grid is None: - keys, training_grid = global_var.grid_cache.get_all() - else: - keys, _ = global_var.grid_cache.get_all() - - assert len(keys) == training_grid[0].ndim, 'Mismatching dimensionalities' - - training_grid = np.array(training_grid).reshape((len(training_grid), -1)) - #return torch.from_numpy(training_grid).T.type(torch.FloatTensor) - return torch.from_numpy(training_grid).T.float() - - -class SystemSolverInterface(object): - def __init__(self, system_to_adapt: SoEq, coeff_tol: float = 1.e-9): - self.variables = list(system_to_adapt.vars_to_describe) - self.adaptee = system_to_adapt - self.grids = None - self.coeff_tol = coeff_tol - - @staticmethod - def _term_solver_form(term, grids, default_domain, variables: list = ['u',]): - deriv_orders = [] - deriv_powers = [] - deriv_vars = [] - derivs_detected = False - - try: - coeff_tensor = np.ones_like(grids[0]) - - except KeyError: - raise NotImplementedError('No cache implemented') - for factor in term.structure: - if factor.is_deriv: - for param_idx, param_descr in factor.params_description.items(): - if param_descr['name'] == 'power': - power_param_idx = param_idx - deriv_orders.append(factor.deriv_code) - deriv_powers.append(factor.params[power_param_idx]) - try: - cur_deriv_var = variables.index(factor.variable) - except ValueError: - raise ValueError( - f'Variable family of passed derivative {variables}, other than {cur_deriv_var}') - derivs_detected = True - - deriv_vars.append(cur_deriv_var) - else: - grid_arg = None if default_domain else grids - coeff_tensor = coeff_tensor * factor.evaluate(grids=grid_arg) - if not derivs_detected: - deriv_powers = [0] - deriv_orders = [[None,],] - if len(deriv_powers) == 1: - deriv_powers = deriv_powers[0] - deriv_orders = deriv_orders[0] - - coeff_tensor = torch.from_numpy(coeff_tensor) - - if deriv_vars == []: - if deriv_powers != 0: - # print() - raise Exception('Something went wrong with parsing an equation for solver') - else: - deriv_vars = [0] - res = {'coeff': coeff_tensor, - 'term': deriv_orders, - 'pow': deriv_powers, - 'var': deriv_vars} - - return res - - @singledispatchmethod - def set_boundary_operator(self, operator_info): - raise NotImplementedError() - - def _equation_solver_form(self, equation, variables, grids=None, mode = 'NN'): - assert mode in ['NN', 'autograd', 'mat'], 'Incorrect mode passed. Form available only \ - for "NN", "autograd "and "mat" methods' - - def adjust_shape(tensor, mode = 'NN'): - if mode in ['NN', 'autograd']: - return torch.flatten(tensor).unsqueeze(1).type(torch.FloatTensor) - elif mode == 'mat': - return tensor.type(torch.FloatTensor) - - _solver_form = {} - if grids is None: - grids = self.grids - default_domain = True - else: - default_domain = False - for term_idx, term in enumerate(equation.structure): - if term_idx != equation.target_idx: - if term_idx < equation.target_idx: - weight = equation.weights_final[term_idx] - else: - weight = equation.weights_final[term_idx-1] - if not np.isclose(weight, 0, rtol = self.coeff_tol): - _solver_form[term.name] = self._term_solver_form(term, grids, default_domain, variables) - _solver_form[term.name]['coeff'] = _solver_form[term.name]['coeff'] * weight - _solver_form[term.name]['coeff'] = adjust_shape(_solver_form[term.name]['coeff'], mode = mode) - #torch.flatten(_solver_form[term.name]['coeff']).unsqueeze(1).type(torch.FloatTensor) - - free_coeff_weight = torch.from_numpy(np.full_like(a=grids[0], - fill_value=equation.weights_final[-1])) - free_coeff_weight = adjust_shape(free_coeff_weight, mode = mode) - #torch.flatten(free_coeff_weight).unsqueeze(1).type(torch.FloatTensor) - free_coeff_term = {'coeff': free_coeff_weight, - 'term': [None], - 'pow': 0, - 'var': [0,]} - _solver_form['C'] = free_coeff_term - - target_weight = torch.from_numpy(np.full_like(a=grids[0], fill_value=-1.)) - target_form = self._term_solver_form(equation.structure[equation.target_idx], grids, default_domain, variables) - target_form['coeff'] = target_form['coeff'] * target_weight - # if mode in ['NN', 'autograd']: - # target_form['coeff'] = torch.flatten(target_form['coeff']).unsqueeze(1).type(torch.FloatTensor) - # elif mode == 'mat': - # target_form['coeff'] = target_form['coeff'].type(torch.FloatTensor) - target_form['coeff'] = adjust_shape(target_form['coeff'], mode = mode) - print(f'target_form shape is {target_form["coeff"].shape}') - - _solver_form[equation.structure[equation.target_idx].name] = target_form - - return _solver_form - - def use_grids(self, grids=None): - if grids is None and self.grids is None: - _, self.grids = global_var.grid_cache.get_all() - elif grids is not None: # elif self.grids is None: - if len(grids) != len(global_var.grid_cache.get_all()[1]): - raise ValueError( - 'Number of passed grids does not match the problem') - self.grids = grids - - - def form(self, grids=None, mode = 'NN'): - self.use_grids(grids=grids) - equation_forms = [] - - for equation in self.adaptee.vals: - equation_forms.append((equation.main_var_to_explain, - self._equation_solver_form(equation, variables=self.variables, - grids=grids, mode = mode))) - return equation_forms - - -class SolverAdapter(object): - def __init__(self, model=None, use_cache: bool = True, var_number: int = 1): - dim_number = global_var.grid_cache.get('0').ndim - print(f'dimensionality is {dim_number}') - if model is None: - if dim_number == 1: - model = FourierNN([400, 400, 400, 400, var_number], [15], [7]) - else: - model = FourierNN([112, 112, 112, 112, var_number], [None,] + [None,]*(dim_number - 1), - [None,] + [None,]*(dim_number - 1)) # 15, 3 - - self.model = model - - self._solver_params = dict() - _solver_params = BASE_SOLVER_PARAMS - - self.set_solver_params(**_solver_params) - self.use_cache = use_cache - self.prev_solution = None - - def set_solver_params(self, lambda_bound=None, verbose: bool = None, gamma: float = None, - lr_decay: int = 400, derivative_points: int = None, learning_rate: float = None, - eps: float = None, tmin: int = None, tmax: int = None, - use_cache: bool = None, cache_verbose: bool = None, - patience: int = None, loss_oscillation_window : int = None, - no_improvement_patience: int = None, - save_always: bool = None, print_every: bool = 5000, optimizer_mode = None, - model_randomize_parameter: bool = None, step_plot_print: bool = None, - step_plot_save: bool = True, image_save_dir: str = None, tol: float = None): - - params = {'lambda_bound': lambda_bound, 'verbose': verbose, 'gamma': gamma, - 'lr_decay': lr_decay, 'derivative_points': derivative_points, - 'learning_rate': learning_rate, 'eps': eps, 'tmin': tmin, - 'tmax': tmax, 'use_cache': use_cache, 'cache_verbose': cache_verbose, - 'patience' : patience, 'loss_oscillation_window' : loss_oscillation_window, - 'no_improvement_patience' : no_improvement_patience, 'save_always': save_always, - 'print_every': print_every, 'optimizer_mode': optimizer_mode, - 'model_randomize_parameter': model_randomize_parameter, 'step_plot_print': step_plot_print, - 'step_plot_save': step_plot_save, 'image_save_dir': image_save_dir, 'tol': tol} - - for param_key, param_vals in params.items(): - if param_vals is not None: - try: - self._solver_params[param_key] = param_vals - except KeyError: - print(f'Parameter {param_key} can not be passed into the solver.') - - def set_param(self, param_key: str, value): - self._solver_params[param_key] = value - - def solve_epde_system(self, system: SoEq, grids: list=None, boundary_conditions=None, - mode='NN', data=None): - system_interface = SystemSolverInterface(system_to_adapt=system) - - system_solver_forms = system_interface.form(grids = grids, mode = mode) - if boundary_conditions is None: - op_gen = PregenBOperator(system=system, - system_of_equation_solver_form=[sf_labeled[1] for sf_labeled - in system_solver_forms]) - op_gen.generate_default_bc(vals = data, grids = grids) - boundary_conditions = op_gen.conditions - - if grids is None: - _, grids = global_var.grid_cache.get_all() - - return self.solve(system_form=[form[1] for form in system_solver_forms], grid=grids, - boundary_conditions=boundary_conditions, mode = mode) - - @staticmethod - def convert_grid(grid, mode): - assert isinstance(grid, (list, tuple)), 'Convertion of the tensor can be only held from tuple or list.' - - conv_grid = grid_format_prepare([np.unique(var_grid) for var_grid in grid], mode) - return conv_grid - - def solve(self, system_form=None, grid=None, boundary_conditions=None, mode = 'NN'): - if isinstance(grid, (list, tuple)): - grid = self.convert_grid(grid, mode) - print('Grid is ', type(grid), grid.shape) - self.equation = SolverEquation(grid, system_form, boundary_conditions).set_mode(mode) # set h < 0.001 - - print(grid[0].shape) - if mode == 'mat': - self.model = mat_model(grid, self.equation) - self.prev_solution = solver.Solver(grid, self.equation, self.model, mode).solve(**self._solver_params) - return self.prev_solution diff --git a/epde/interface/token_family.py b/epde/interface/token_family.py index c9b0317..16c8583 100644 --- a/epde/interface/token_family.py +++ b/epde/interface/token_family.py @@ -8,53 +8,20 @@ import numpy as np import itertools -from typing import Union, Callable -from collections import Iterable +from typing import Union, Callable, List +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable import epde.globals as global_var -from epde.structure.factor import Factor +from epde.structure.factor import Factor, EvaluatorContained def constancy_hard_equality(tensor, epsilon=1e-7): return np.abs(np.max(tensor) - np.min(tensor)) < epsilon -class EvaluatorContained(object): - """ - Class for evaluator of token (factor of the term in the sought equation) values with arbitrary function - - Attributes: - _evaluator (`callable`): a function, which returns the vector of token values, evaluated on the studied area; - params (`dict`): dictionary, containing parameters of the evaluator (like grid, on which the function is evaluated or matrices of pre-calculated function) - - Methods: - set_params(**params) - set the parameters of the evaluator, using keyword arguments - apply(token, token_params) - apply the defined evaluator to evaluate the token with specific parameters - """ - - def __init__(self, eval_function, eval_kwargs_keys={}): - self._evaluator = eval_function - self.eval_kwargs_keys = eval_kwargs_keys - - def apply(self, token, structural=False, grids=None, **kwargs): - """ - Apply the defined evaluator to evaluate the token with specific parameters. - - Args: - token (`epde.main_structures.factor.Factor`): symbolic label of the specific token, e.g. 'cos'; - token_params (`dict`): dictionary with keys, naming the token parameters (such as frequency, axis and power for trigonometric function) - and values - specific values of corresponding parameters. - - Raises: - `TypeError` - If the evaluator could not be applied to the token. - """ - assert list(kwargs.keys()) == self.eval_kwargs_keys - return self._evaluator(token, structural, grids, **kwargs) - - class TokenFamily(object): """ Class for the type (family) of tokens, from which the tokens are taken as factors in the terms of the equation @@ -91,7 +58,7 @@ class TokenFamily(object): set_params(tokens, token_params) Method to set the list of tokens, present in the family, and their parameters; - set_evaluator(eval_function, **eval_params) + set_evaluator(eval_function) Method to set the evaluator for the token family & its parameters; test_evaluator() @@ -182,13 +149,13 @@ def set_params(self, tokens, token_params, equality_ranges, derivs_solver_orders self.params_set = True self.equality_ranges = equality_ranges - if self.family_of_derivs: - print(f'self.tokens is {self.tokens}') - print(f'Here, derivs order is {self.derivs_ords}') + # if self.family_of_derivs: + # print(f'self.tokens is {self.tokens}') + # print(f'Here, derivs order is {self.derivs_ords}') if self.evaluator_set: self.test_evaluator() - def set_evaluator(self, eval_function, eval_kwargs_keys=[], suppress_eval_test=True): + def set_evaluator(self, eval_function, suppress_eval_test=True): """ Define the evaluator for the token family and its parameters @@ -231,14 +198,14 @@ def set_evaluator(self, eval_function, eval_kwargs_keys=[], suppress_eval_test=T >>> return value >>> >>> der_eval_params = {'token_matrices':simple_functions, 'params_names':['power'], 'params_equality':{'power' : 0}} - >>> trig_eval_params = {'grid':grid, 'params_names':['power', 'freq', 'dim'], 'params_equality':{'power': 0, 'freq':0.05, 'dim':0}} - >>> trigonometric_tokens.set_evaluator(trigonometric_evaluator, **trig_eval_params) + # >>> trig_eval_params = {'grid':grid, 'params_names':['power', 'freq', 'dim'], 'params_equality':{'power': 0, 'freq':0.05, 'dim':0}} + >>> trigonometric_tokens.set_evaluator(trigonometric_evaluator,) """ if isinstance(eval_function, EvaluatorContained): self._evaluator = eval_function else: - self._evaluator = EvaluatorContained(eval_function, eval_kwargs_keys) + self._evaluator = EvaluatorContained(eval_function) self.evaluator_set = True if self.params_set and not suppress_eval_test: self.test_evaluator() @@ -325,7 +292,7 @@ def evaluate(self, token): raise TypeError( 'Evaluator function or its parameters not set before evaluator application.') - def create(self, label=None, token_status: dict = None, + def create(self, label=None, token_status: dict = None, all_vars: List[str] = None, create_derivs: bool = False, **factor_params): """ Method for creating element of the token family @@ -361,7 +328,7 @@ def create(self, label=None, token_status: dict = None, else: factor_deriv_code = None new_factor = Factor(token_name=label, deriv_code=factor_deriv_code, status=self.status, - family_type=self.ftype, variable = self.variable, + family_type=self.ftype, variable = self.variable, all_vars = all_vars, latex_constructor = self.latex_constructor) if self.status['unique_token_type']: @@ -399,7 +366,7 @@ def cardinality(self, token_status: Union[dict, None] = None): for label in self.tokens} return len([token for token in self.tokens if token_status[token][0] < token_status[token][1]]) - def evaluate_all(self): + def evaluate_all(self, all_vars: List[str]): """ Apply method of evaluation for all tokens in token family """ @@ -416,14 +383,14 @@ def evaluate_all(self): for params_selection in params_sets: params_sets_labeled = dict(zip(list(self.token_params.keys()), params_selection)) - _, generated_token = self.create(token_label, **params_sets_labeled) + _, generated_token = self.create(token_label, all_vars=all_vars, **params_sets_labeled) generated_token.use_cache() if self.status['requires_grid']: generated_token.use_grids_cache() generated_token.scaled = False _ = generated_token.evaluate() print(generated_token.cache_label) - if generated_token.cache_label not in global_var.tensor_cache.memory_default.keys(): + if generated_token.cache_label not in global_var.tensor_cache.memory_default['numpy'].keys(): raise KeyError('Generated token somehow was not stored in cache.') @@ -524,6 +491,8 @@ def create(self, label=None, create_meaningful: bool = False, token_status=None, p=probabilities).create(label=None, token_status=token_status, create_derivs=create_derivs, + all_vars = [family.variable for family in + self.families_demand_equation], **kwargs) else: probabilities = (self.families_cardinality(False, token_status) / @@ -532,6 +501,8 @@ def create(self, label=None, create_meaningful: bool = False, token_status=None, p=probabilities).create(label=None, token_status=token_status, create_derivs=create_derivs, + all_vars = [family.variable for family in + self.families_demand_equation], **kwargs) else: token_families = [family for family in self.families if label in family.tokens] @@ -543,8 +514,8 @@ def create(self, label=None, create_meaningful: bool = False, token_status=None, raise Exception( 'Desired label does not match tokens in any family.') else: - return token_families[0].create(label=label, - token_status=token_status, + return token_families[0].create(label=label, token_status=token_status, + all_vars = [family.variable for family in self.families_demand_equation], **kwargs) def create_from_family(self, family_label: str, token_status=None, **kwargs): @@ -560,7 +531,9 @@ def create_from_family(self, family_label: str, token_status=None, **kwargs): """ # print([f.ftype for f in self.families], family_label) family = [f for f in self.families if family_label == f.ftype][0] - return family.create(label=None, token_status=token_status, **kwargs) + return family.create(label=None, token_status=token_status, + all_vars = [family.variable for family in self.families_demand_equation], + **kwargs) def create_with_var(self, variable: str, token_status=None, **kwargs): """ @@ -581,7 +554,9 @@ def create_with_var(self, variable: str, token_status=None, **kwargs): try: probabilities = np.array([len(f.tokens) for f in families]) family = np.random.choice(families, p = probabilities/probabilities.sum()) - return family.create(label=None, token_status=token_status, **kwargs) + return family.create(label=None, token_status=token_status, + all_vars = [family.variable for family in self.families_demand_equation], + **kwargs) except ValueError: families.remove(family) diff --git a/epde/loader.py b/epde/loader.py index bd81caf..5c701a6 100644 --- a/epde/loader.py +++ b/epde/loader.py @@ -8,10 +8,16 @@ import sys import os + +import tempfile import dill as pickle import time -from collections import Iterable +from typing import Union +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable from epde.structure.factor import Factor from epde.structure.main_structures import SoEq, Equation, Term @@ -32,7 +38,7 @@ 'ParetoLevels' : (['levels'], ['population']), 'Population' : ([], [])} -LOADING_PRESETS = {'SoEq' : {'SoEq' : []}} +# LOADING_PRESETS = {'SoEq' : {'SoEq' : []}} def get_size(obj, seen=None): size = sys.getsizeof(obj) @@ -168,6 +174,17 @@ def attrs_from_dict(obj, attributes, except_attrs: dict = {}): obj.manual_reconst(man_attr, attributes[man_attr]['elements'], except_attrs) + +def temp_pickle_save(obj : Union[SoEq, Cache, TFPool, ParetoLevels, Population], + not_to_pickle = [], manual_pickle = []): + loader = EPDELoader() + pickled_obj = loader.saves(obj, not_to_pickle = [], manual_pickle = []) + + temp_file = tempfile.NamedTemporaryFile() + temp_file.write(pickled_obj) + return temp_file + + class LoaderAssistant(object): def __init__(self): pass @@ -176,14 +193,14 @@ def __init__(self): def system_preset(pool: TFPool): # Validate correctness of attribute definitions return {'SoEq' : {'tokens_for_eq' : TFPool(pool.families_demand_equation), 'tokens_supp' : TFPool(pool.families_equationless), - 'latex_form' : None}, # TBD, make better loading procedure + 'latex_form' : None}, 'Equation' : {'pool' : pool, 'latex_form' : None, '_history' : None}, 'Term' : {'pool' : pool, 'latex_form' : None}, - 'Factor' : {'_latex_constructor' : None}} # 'latex_form' : None, - + 'Factor' : {'_latex_constructor' : None, + '_evaluator' : None}} @staticmethod def pool_preset(): @@ -194,20 +211,32 @@ def cache_preset(): return {} @staticmethod - def population_preset(): - return {} + def population_preset(pool: TFPool): + return {'Population' : {}, + 'SoEq' : {'tokens_for_eq' : TFPool(pool.families_demand_equation), + 'tokens_supp' : TFPool(pool.families_equationless), + 'latex_form' : None}, + 'Equation' : {'pool' : pool, + 'latex_form' : None, + '_history' : None}, + 'Term' : {'pool' : pool, + 'latex_form' : None}, + 'Factor' : {'_latex_constructor' : None, + '_evaluator' : None}} @staticmethod def pareto_levels_preset(pool: TFPool): - return { - 'SoEq' : {'tokens_for_eq' : TFPool(pool.families_demand_equation), + return {'ParetoLevels' : {}, + 'SoEq' : {'tokens_for_eq' : TFPool(pool.families_demand_equation), 'tokens_supp' : TFPool(pool.families_equationless), - 'latex_form' : None}, # TBD, make better loading procedure - 'Equation' : {'pool' : pool, 'latex_form' : None}, + 'Equation' : {'pool' : pool, + 'latex_form' : None, + '_history' : None}, 'Term' : {'pool' : pool, 'latex_form' : None}, - 'Factor' : {'_latex_constructor' : None}} + 'Factor' : {'_latex_constructor' : None, + '_evaluator' : None}} class EPDELoader(object): @@ -252,4 +281,4 @@ def load(self, filename:str, **kwargs): def loads(self, byteobj, **kwargs): obj_pickled = pickle.loads(byteobj) - return self.use_pickles(obj_pickled, **kwargs) + return self.use_pickles(obj_pickled, **kwargs) \ No newline at end of file diff --git a/epde/operators/common/coeff_calculation.py b/epde/operators/common/coeff_calculation.py index 58144dd..7feacd6 100644 --- a/epde/operators/common/coeff_calculation.py +++ b/epde/operators/common/coeff_calculation.py @@ -86,4 +86,4 @@ def apply(self, objective : Equation, arguments : dict = None): objective.weights_final = weights def use_default_tags(self): - self._tags = {'coefficient calculation', 'chromosome level', 'no suboperators', 'inplace'} + self._tags = {'coefficient calculation', 'gene level', 'no suboperators', 'inplace'} diff --git a/epde/operators/common/fitness.py b/epde/operators/common/fitness.py index d025222..76bfbc9 100644 --- a/epde/operators/common/fitness.py +++ b/epde/operators/common/fitness.py @@ -8,14 +8,19 @@ import numpy as np from copy import deepcopy +import torch import matplotlib.pyplot as plt from matplotlib import cm -from epde.interface.solver_integration import SolverAdapter +from epde.integrate import SolverAdapter from epde.structure.main_structures import SoEq, Equation from epde.operators.utils.template import CompoundOperator import epde.globals as global_var +from sklearn.linear_model import LinearRegression +from scipy.optimize import minimize + +LOSS_NAN_VAL = 1e7 class L2Fitness(CompoundOperator): """ @@ -68,7 +73,7 @@ def apply(self, objective: Equation, arguments: dict): if features is None: discr_feats = 0 else: - discr_feats = np.dot(features, objective.weights_final[:-1][objective.weights_internal != 0]) # weights_final -> weights_internal + discr_feats = np.dot(features, objective.weights_final[:-1][objective.weights_internal != 0]) discr = (discr_feats + np.full(target.shape, objective.weights_final[-1]) - target) self.g_fun_vals = global_var.grid_cache.g_func.reshape(-1) @@ -92,55 +97,221 @@ def use_default_tags(self): class SolverBasedFitness(CompoundOperator): + # To be modified to include physics-informed information criterion (PIC) + key = 'SolverBasedFitness' - def __init__(self, param_keys: list, solver_kwargs: dict = {'model' : None, 'use_cache' : True}): + def __init__(self, param_keys: list): super().__init__(param_keys) - solver_kwargs['dim'] = len(global_var.grid_cache.get_all()[1]) - - self.adapter = None # SolverAdapter(var_number = len(system.vars_to_describe)) + self.adapter = None + + def set_adapter(self, net = None): - def set_adapter(self, var_number): - if self.adapter is not None: - self.adapter = SolverAdapter(var_number = var_number) + if self.adapter is None or net is not None: + compiling_params = {'mode': 'autograd', 'tol':0.01, 'lambda_bound': 100} # 'h': 1e-1 + optimizer_params = {} + training_params = {'epochs': 4e3, 'info_string_every' : 1e3} + early_stopping_params = {'patience': 4, 'no_improvement_patience' : 250} + + explicit_cpu = True + device = 'cuda' if (torch.cuda.is_available and not explicit_cpu) else 'cpu' + + self.adapter = SolverAdapter(net = net, use_cache = False, device=device) + + self.adapter.set_compiling_params(**compiling_params) + self.adapter.set_optimizer_params(**optimizer_params) + self.adapter.set_early_stopping_params(**early_stopping_params) + self.adapter.set_training_params(**training_params) def apply(self, objective : SoEq, arguments : dict): - self.set_adapter(len(objective.vars_to_describe)) self_args, subop_args = self.parse_suboperator_args(arguments = arguments) + try: + net = deepcopy(global_var.solution_guess_nn) + except NameError: + net = None + + self.set_adapter(net=net) + self.suboperators['sparsity'].apply(objective, subop_args['sparsity']) self.suboperators['coeff_calc'].apply(objective, subop_args['coeff_calc']) - # _, target, features = objective.evaluate(normalize = False, return_val = False) + print('solving equation:') + print(objective.text_form) - grid = global_var.grid_cache.get_all()[1] - solution_model = self.adapter.solve_epde_system(system = objective, grids = grid, - boundary_conditions = None) - - self.g_fun_vals = global_var.grid_cache.g_func #.reshape(-1) + loss_add, solution_nn = self.adapter.solve_epde_system(system = objective, grids = None, + boundary_conditions = None, use_fourier=True) + _, grids = global_var.grid_cache.get_all(mode = 'torch') - solution = solution_model(self.adapter.convert_grid(grid)).detach().numpy() - for eq_idx, eq in enumerate(objective.structure): - referential_data = global_var.tensor_cache.get((eq.main_var_to_explain, (1.0,))) - - discr = (solution[eq_idx, ...] - referential_data.reshape(solution[eq_idx, ...].shape)) + grids = torch.stack([grid.reshape(-1) for grid in grids], dim = 1).float() + solution = solution_nn(grids).detach().cpu().numpy() + self.g_fun_vals = global_var.grid_cache.g_func + + for eq_idx, eq in enumerate(objective.vals): + if torch.isnan(loss_add): + fitness_value = 2*LOSS_NAN_VAL + else: + referential_data = global_var.tensor_cache.get((eq.main_var_to_explain, (1.0,))) - discr = np.multiply(discr, self.g_fun_vals.reshape(discr.shape)) - rl_error = np.linalg.norm(discr, ord = 2) - - fitness_value = rl_error - if np.sum(eq.weights_final) == 0: - fitness_value /= self.params['penalty_coeff'] + print(f'solution shape {solution.shape}') + print(f'solution[..., eq_idx] {solution[..., eq_idx].shape}, eq_idx {eq_idx}') + discr = (solution[..., eq_idx] - referential_data.reshape(solution[..., eq_idx].shape)) + discr = np.multiply(discr, self.g_fun_vals.reshape(discr.shape)) + rl_error = np.linalg.norm(discr, ord = 2) + + print(f'fitness error is {rl_error}, while loss addition is {float(loss_add)}') + fitness_value = rl_error + self.params['pinn_loss_mult'] * float(loss_add) # TODO: make pinn_loss_mult case dependent + if np.sum(eq.weights_final) == 0: + fitness_value /= self.params['penalty_coeff'] eq.fitness_calculated = True eq.fitness_value = fitness_value - if global_var.verbose.plot_DE_solutions: - plot_data_vs_solution(self.adapter.convert_grid(grid), - data = referential_data.reshape(solution[eq_idx, ...].shape), - solution = solution[eq_idx, ...]) + def use_default_tags(self): + self._tags = {'fitness evaluation', 'chromosome level', 'contains suboperators', 'inplace'} + + +class PIC(CompoundOperator): + + key = 'PIC' + + def __init__(self, param_keys: list): + super().__init__(param_keys) + self.adapter = None + self.window_size = None + + def set_adapter(self, net=None): + + if self.adapter is None or net is not None: + compiling_params = {'mode': 'autograd', 'tol': 0.01, 'lambda_bound': 100} # 'h': 1e-1 + optimizer_params = {} + training_params = {'epochs': 4e3, 'info_string_every': 1e3} + early_stopping_params = {'patience': 4, 'no_improvement_patience': 250} + + explicit_cpu = False + device = 'cuda' if (torch.cuda.is_available and not explicit_cpu) else 'cpu' + + self.adapter = SolverAdapter(net=net, use_cache=False, device=device) + + self.adapter.set_compiling_params(**compiling_params) + self.adapter.set_optimizer_params(**optimizer_params) + self.adapter.set_early_stopping_params(**early_stopping_params) + self.adapter.set_training_params(**training_params) + + def apply(self, objective: SoEq, arguments: dict): + self_args, subop_args = self.parse_suboperator_args(arguments=arguments) + + try: + net = deepcopy(global_var.solution_guess_nn) + except NameError: + net = None + + self.set_adapter(net=net) + + self.suboperators['sparsity'].apply(objective, subop_args['sparsity']) + self.suboperators['coeff_calc'].apply(objective, subop_args['coeff_calc']) + + print('solving equation:') + print(objective.text_form) + + loss_add, solution_nn = self.adapter.solve_epde_system(system=objective, grids=None, + boundary_conditions=None, use_fourier=True) + + _, grids = global_var.grid_cache.get_all(mode='torch') + + grids = torch.stack([grid.reshape(-1) for grid in grids], dim=1).float() + solution = solution_nn(grids).detach().cpu().numpy() + self.g_fun_vals = global_var.grid_cache.g_func + + for eq_idx, eq in enumerate(objective.vals): + # Calculate r-loss + target = eq.structure[eq.target_idx] + target_vals = target.evaluate(False) + features_vals = [] + nonzero_features_indexes = [] + + for i in range(len(eq.structure)): + if i == eq.target_idx: + continue + idx = i if i < eq.target_idx else i - 1 + if eq.weights_internal[idx] != 0: + features_vals.append(eq.structure[i].evaluate(False)) + nonzero_features_indexes.append(idx) + + if len(features_vals) == 0: + objective.weights_final = np.zeros(len(objective.structure)) + else: + features = features_vals[0] + if len(features_vals) > 1: + for i in range(1, len(features_vals)): + features = np.vstack([features, features_vals[i]]) + features = np.vstack([features, np.ones(features_vals[0].shape)]) # Add constant feature + features = np.transpose(features) + + self.window_size = len(target_vals) // 2 + num_horizons = len(target_vals) - self.window_size + 1 + eq_window_weights = [] + + # Compute coefficients and collect statistics over horizons + for start_idx in range(num_horizons): + end_idx = start_idx + self.window_size + + target_window = target_vals[start_idx:end_idx] + feature_window = features[start_idx:end_idx, :] + + estimator = LinearRegression(fit_intercept=False) + if feature_window.ndim == 1: + feature_window = feature_window.reshape(-1, 1) + try: + self.g_fun_vals_window = self.g_fun_vals.reshape(-1)[start_idx:end_idx] + except AttributeError: + self.g_fun_vals_window = None + estimator.fit(feature_window, target_window, sample_weight=self.g_fun_vals_window) + valuable_weights = estimator.coef_ + + window_weights = np.zeros(len(eq.structure)) + for weight_idx in range(len(window_weights)): + if weight_idx in nonzero_features_indexes: + window_weights[weight_idx] = valuable_weights[nonzero_features_indexes.index(weight_idx)] + eq_window_weights.append(window_weights) + + eq_cv = [np.abs(np.std(_) / (np.mean(_))) for _ in zip(*eq_window_weights)] # As in paper's repo + eq_cv_valuable = [x for x in eq_cv if not np.isnan(x)] + lr = np.mean(eq_cv_valuable) + + # Calculate p-loss + if torch.isnan(loss_add): + lp = 2 * LOSS_NAN_VAL + else: + print(f'solution shape {solution.shape}') + print(f'solution[..., eq_idx] {solution[..., eq_idx].shape}, eq_idx {eq_idx}') + referential_data = global_var.tensor_cache.get((eq.main_var_to_explain, (1.0,))) + initial_data = global_var.tensor_cache.get(('u', (1.0,))).reshape(solution[..., eq_idx].shape) + + sol_pinn = solution[..., eq_idx] + sol_ann = referential_data.reshape(solution[..., eq_idx].shape) + sol_pinn_normalized = (sol_pinn - min(initial_data)) / (max(initial_data) - min(initial_data)) + sol_ann_normalized = (sol_ann - min(initial_data)) / (max(initial_data) - min(initial_data)) + + discr = sol_pinn_normalized - sol_ann_normalized + # discr = (solution[..., eq_idx] - referential_data.reshape(solution[..., eq_idx].shape)) # Default + discr = np.multiply(discr, self.g_fun_vals.reshape(discr.shape)) + rl_error = np.linalg.norm(discr, ord=2) + + print(f'fitness error is {rl_error}, while loss addition is {float(loss_add)}') + lp = rl_error + self.params['pinn_loss_mult'] * float( + loss_add) # TODO: make pinn_loss_mult case dependent + if np.sum(eq.weights_final) == 0: + lp /= self.params['penalty_coeff'] + + eq.fitness_calculated = True + print('Lr: ', lr, '\t Lp: ', lp, '\t PIC: ', lr * lp) + eq.fitness_value = lr * lp + + def use_default_tags(self): + self._tags = {'fitness evaluation', 'chromosome level', 'contains suboperators', 'inplace'} + - def plot_data_vs_solution(grid, data, solution): if grid.shape[1]==2: fig = plt.figure() diff --git a/epde/operators/common/sparsity.py b/epde/operators/common/sparsity.py index 53f0681..4793a2b 100644 --- a/epde/operators/common/sparsity.py +++ b/epde/operators/common/sparsity.py @@ -72,3 +72,5 @@ def apply(self, objective : Equation, arguments : dict): def use_default_tags(self): self._tags = {'sparsity', 'gene level', 'no suboperators', 'inplace'} + + diff --git a/epde/operators/multiobjective/mutations.py b/epde/operators/multiobjective/mutations.py index c1d0f3a..0722ac0 100644 --- a/epde/operators/multiobjective/mutations.py +++ b/epde/operators/multiobjective/mutations.py @@ -15,7 +15,7 @@ from epde.structure.main_structures import Equation, SoEq, Term from epde.structure.structure_template import check_uniqueness -from epde.supplementary import filter_powers, try_iterable +from epde.supplementary import filter_powers from epde.operators.utils.template import CompoundOperator, add_base_param_to_operator diff --git a/epde/operators/multiobjective/variation.py b/epde/operators/multiobjective/variation.py index cb9d33c..ea7120b 100644 --- a/epde/operators/multiobjective/variation.py +++ b/epde/operators/multiobjective/variation.py @@ -230,7 +230,12 @@ def apply(self, objective : tuple, arguments : dict): if objective[1].structure[i].label == objective[0].structure[term1_token_idx].label][0] for param_idx, param_descr in objective[0].structure[term1_token_idx].params_description.items(): if param_descr['name'] == 'power': power_param_idx = param_idx - if param_descr['name'] == 'dim': dim_param_idx = param_idx + if param_descr['name'] == 'dim': dim_param_idx = param_idx + + try: # TODO: refactor logic + dim_param_idx + except: + dim_param_idx = power_param_idx for param_idx in np.arange(objective[0].structure[term1_token_idx].params.size): if param_idx != power_param_idx and param_idx != dim_param_idx: diff --git a/epde/operators/singleobjective/mutations.py b/epde/operators/singleobjective/mutations.py index 30b2b95..59a5c3e 100644 --- a/epde/operators/singleobjective/mutations.py +++ b/epde/operators/singleobjective/mutations.py @@ -15,7 +15,7 @@ from epde.structure.main_structures import Equation, SoEq, Term from epde.structure.structure_template import check_uniqueness -from epde.supplementary import filter_powers, try_iterable +from epde.supplementary import filter_powers from epde.operators.utils.template import CompoundOperator, add_base_param_to_operator diff --git a/epde/operators/utils/default_parameter_loader.py b/epde/operators/utils/default_parameter_loader.py index 24ad1da..3e39f1d 100644 --- a/epde/operators/utils/default_parameter_loader.py +++ b/epde/operators/utils/default_parameter_loader.py @@ -47,6 +47,7 @@ def __enter__(self): def __exit__(self, type, value, traceback): self._repo_path = None + # self._repo = None def _initialise_repo(self) -> dict: with open(self._repo_path) as repository_json_file: @@ -59,7 +60,6 @@ def get_default_params_for_operator(self, operator_name : str) -> dict: return self._repo[operator_name] else: raise Exception(f'Operator with key {operator_name} is missing from the repo with params') - # return {} def change_operator_param(self, operator_name : str, parameter_name : str, new_value): if type(new_value) != type(self._repo[operator_name][parameter_name]): diff --git a/epde/operators/utils/operator_mappers.py b/epde/operators/utils/operator_mappers.py index 72c1dfd..b0bf60f 100644 --- a/epde/operators/utils/operator_mappers.py +++ b/epde/operators/utils/operator_mappers.py @@ -13,6 +13,26 @@ from epde.operators.utils.template import OPERATOR_LEVELS, CompoundOperator +class OperatorCondition(CompoundOperator): + def __init__(self, operator: CompoundOperator, condition: Callable = None): + super().__init__() + + self._conditioned_operator = operator + self.condition = condition + self._tags = operator.operator_tags + + def apply(self, objective, arguments: dict): + if self.condition(objective): + if 'inplace' in self.operator_tags: + self._conditioned_operator.apply(objective, arguments) + elif 'standard' in self.operator_tags: + objective = self._conditioned_operator.apply(objective, arguments) + else: + raise TypeError('Incorrect type of mapping operator: not inplace nor returns similar object, as input.') + + if 'population level' in self._tags: # Edited 21.05.2024 + return objective + class OperatorMapper(CompoundOperator): def __init__(self, operator_to_map: CompoundOperator, objective_tag: str, source_tag: str, @@ -22,12 +42,13 @@ def __init__(self, operator_to_map: CompoundOperator, objective_tag: str, source self.set_suboperators({'to_map' : operator_to_map}) self.objective_condition = objective_condition; self.element_condition = element_condition if not source_tag in operator_to_map.operator_tags: - raise ValueError(f'Only {source_tag}-level operators can be mapped to the elements of a {objective_tag}.') + raise ValueError(f'Only {source_tag}-level operators can be mapped to the elements of a {objective_tag}. \ + Recieved operator with tags: {operator_to_map.operator_tags}') self._tags = copy.copy(operator_to_map.operator_tags) self._tags.remove(source_tag) self._tags.add(objective_tag) - def apply(self, objective : CompoundOperator, arguments : dict): + def apply(self, objective, arguments: dict): if self.objective_condition is None or self.objective_condition(objective): if 'inplace' in self.operator_tags: for elem in objective: @@ -36,17 +57,16 @@ def apply(self, objective : CompoundOperator, arguments : dict): elif 'standard' in self.operator_tags: for idx, elem in enumerate(objective): if self.element_condition is None or self.element_condition(elem): - # print(objective[idx]) objective[idx] = self.suboperators['to_map'].apply(elem, arguments) else: raise TypeError('Incorrect type of mapping operator: not inplace nor returns similar object, as input.') - if 'population level' in self._tags: - return objective + if 'population level' in self._tags: # Edited 21.05.2024 + return objective -def map_operator_between_levels(operator: CompoundOperator, original_level: Union[str, int], target_level: Union[str, int], - objective_condition: Callable = None, element_condition: Callable = None): +def map_operator_between_levels(operator, original_level: Union[str, int], target_level: Union[str, int], + objective_condition: Callable = None, element_condition: Callable = None) -> CompoundOperator: if isinstance(original_level, str): original_level = OPERATOR_LEVELS.index(original_level) if isinstance(target_level, str): target_level = OPERATOR_LEVELS.index(target_level) diff --git a/epde/operators/utils/parameters/default_parameters_multi_objective.json b/epde/operators/utils/parameters/default_parameters_multi_objective.json index cdfcaa6..9234846 100644 --- a/epde/operators/utils/parameters/default_parameters_multi_objective.json +++ b/epde/operators/utils/parameters/default_parameters_multi_objective.json @@ -18,6 +18,14 @@ "DiscrepancyBasedFitness" : { "penalty_coeff" : 0.2 }, + "SolverBasedFitness" : { + "penalty_coeff" : 0.2, + "pinn_loss_mult" : 1e4 + }, + "PIC" : { + "penalty_coeff" : 0.2, + "pinn_loss_mult" : 1e4 + }, "ParetoLevelsCrossover" : { }, diff --git a/epde/operators/utils/template.py b/epde/operators/utils/template.py index 2c15c50..850b723 100644 --- a/epde/operators/utils/template.py +++ b/epde/operators/utils/template.py @@ -17,22 +17,11 @@ OPERATOR_LEVELS_SUPPORTED_TYPES = {'custom level': None, 'term level': Term, 'gene level': Gene, 'chromosome level': Chromosome, 'population level': ParetoLevels} - -def add_base_param_to_operator(operator): - params_container = EvolutionaryParams() - for param_key, param_value in params_container[operator.key].items(): - operator.params[param_key] = param_value - def add_base_param_to_operator(operator, target_dict): params_container = EvolutionaryParams() for param_key, param_value in params_container.get_default_params_for_operator(operator.key).items(): operator.params[param_key] = target_dict[param_key] if param_key in target_dict.keys( ) else param_value -# def add_param_to_operator(operator, target_dict, labeled_base_val): -# for key, base_val in labeled_base_val.items(): -# if base_val is None and key not in target_dict.keys(): -# raise ValueError('Mandatory parameter for evolutionary operator') -# operator.params[key] = target_dict[key] if key in target_dict.keys() else base_val class CompoundOperator(): ''' @@ -83,7 +72,7 @@ def set_suboperators(self, operators: dict, probas: dict = {}): if not all([isinstance(key, str) and (isinstance(value, (list, tuple, dict)) or issubclass(type(value), CompoundOperator)) for key, value in operators.items()]): - print([(key, isinstance(key, str), + print([(key, isinstance(key, str), value, (isinstance(value, (list, tuple, dict)) or issubclass(type(value), CompoundOperator))) for key, value in operators.items()]) @@ -230,4 +219,4 @@ def __next__(self): self._idx += 1 return res else: - raise StopIteration + raise StopIteration \ No newline at end of file diff --git a/epde/optimizers/blocks.py b/epde/optimizers/blocks.py index 00582f9..438f901 100644 --- a/epde/optimizers/blocks.py +++ b/epde/optimizers/blocks.py @@ -90,10 +90,11 @@ class will try to take the arguments from the operator object. self.arg_keys = self._operator.get_suboperator_args() extra = ['self', 'population_subset', 'population'] for arg_key in extra: - try: + if arg_key in self.arg_keys: + # try: self.arg_keys.remove(arg_key) - except: - pass + # except: + # pass elif parse_operator_args == 'use operator attribute': self.arg_keys = self._operator.arg_keys elif isinstance(parse_operator_args, (list, tuple)): @@ -210,4 +211,4 @@ def traversal(self, input_obj, EA_kwargs): @property def output(self): - return self.final_vertex.output + return self.final_vertex.output \ No newline at end of file diff --git a/epde/optimizers/builder.py b/epde/optimizers/builder.py index aad05bd..983a60d 100644 --- a/epde/optimizers/builder.py +++ b/epde/optimizers/builder.py @@ -6,7 +6,7 @@ @author: maslyaev """ -from abc import ABC, abstractproperty +from abc import ABC, abstractmethod from epde.optimizers.blocks import EvolutionaryBlock, InputBlock from epde.optimizers.strategy import Strategy @@ -22,7 +22,7 @@ def link(op1, op2): op2.add_incoming(op1) -class StrategyBuilder(): # OperatorBuilder ABC +class StrategyBuilder(): """ Class instance for building a strategy @@ -55,9 +55,8 @@ def reset(self, strategy_class): self.reachable = dict() self.initial_label = None - @abstractproperty + @property def processer(self): - # raise NotImplementedError('Tring to return a property of an abstract class.') return self._processer def add_init_operator(self, operator_label : str = 'initial'): @@ -182,4 +181,4 @@ def builder(self): @builder.setter def builder(self, sector_processer_builder : StrategyBuilder): print(f'setting builder with {sector_processer_builder}') - self._builder = sector_processer_builder + self._builder = sector_processer_builder \ No newline at end of file diff --git a/epde/optimizers/moeadd/moeadd.py b/epde/optimizers/moeadd/moeadd.py index 7125e73..af7484b 100644 --- a/epde/optimizers/moeadd/moeadd.py +++ b/epde/optimizers/moeadd/moeadd.py @@ -53,7 +53,7 @@ class ParetoLevels(object): ''' def __init__(self, population, sorting_method = fast_non_dominated_sorting, - update_method = ndl_update, initial_sort : bool = False): + update_method = ndl_update, initial_sort = False): """ Args: population (`list`): List with the elements - canidate solutions of the case-specific subclass of @@ -69,7 +69,7 @@ def __init__(self, population, sorting_method = fast_non_dominated_sorting, self.unplaced_candidates = population def manual_reconst(self, attribute:str, value, except_attrs:dict): - from epde.loader import attrs_from_dict, get_typespec_attrs + from epde.loader import attrs_from_dict supported_attrs = ['population'] if attribute not in supported_attrs: raise ValueError(f'Attribute {attribute} is not supported by manual_reconst method.') @@ -139,7 +139,6 @@ def update(self, point): Returns: None """ - # print(f'IN MOEADD UPDATE {point.text_form}') self.levels = self._update_method(point, self.levels) self.population.append(point) @@ -260,7 +259,8 @@ class MOEADDOptimizer(object): """ def __init__(self, population_instruct, weights_num, pop_size, solution_params, delta, neighbors_number, - nds_method = fast_non_dominated_sorting, ndl_update = ndl_update): # logger: Logger = None, + nds_method = fast_non_dominated_sorting, ndl_update = ndl_update, + passed_population: ParetoLevels = None): # logger: Logger = None, """ Initialization of the evolutionary optimizer is done with the introduction of initial population of candidate solutions, divided into Pareto non-dominated @@ -268,24 +268,31 @@ def __init__(self, population_instruct, weights_num, pop_size, solution_params, another on the same level), and creation of set of weights with a proximity list defined for each of them. - Args: - pop_constructor (``): - weights_num (`int`): Number of the weight vectors, dividing the objective function values space. Often, shall be same, as the population size. - pop_size (`int`): The size of the candidate solution population. - solution_params (`dict`): The dicitionary with the solution parameters, passed into each new created solution during the initialization. - delta (`float`): parameter of uniform spacing between the weight vectors; *H = 1 / delta* should be integer - a number of divisions along an objective coordinate axis. - neighbors_number (`int`): number of neighboring weight vectors to be considered during the operation of evolutionary operators as the "neighbors" of the processed sectors. - nds_method (`callable`): default - ``moeadd.moeadd_supplementary.fast_non_dominated_sorting`` - Method of non-dominated sorting of the candidate solutions. The default method is implemented according to the article - *K. Deb, A. Pratap, S. Agarwal, and T. Meyarivan, “A fast and elitist multiobjective genetic algorithm: NSGA-II,” IEEE Trans. Evol. Comput., - vol. 6, no. 2, pp. 182–197, Apr. 2002.* - ndl_update (`callable`): default - ``moeadd.moeadd_supplementary.ndl_update`` + Parameters + ---------- + population_instruct : dict + Parameters of the individual creation. + weights_num : int + Number of the weight vectors, dividing the objective function values space. Often, shall be same, as the population size. + pop_size : int + The size of the candidate solution population. + solution_params : dict + The dicitionary with the solution parameters, passed into each new created solution during the initialization. + delta : float + The parameter of uniform spacing between the weight vectors; *H = 1 / delta* should be integer - a number of divisions along an objective coordinate axis. + neighbors_number : int + The number of neighboring weight vectors to be considered during the operation of evolutionary operators as the "neighbors" of the processed sectors. + nds_method : callable, optional + Method of non-dominated sorting of the candidate solutions. The default method is implemented according to the article + *K. Deb, A. Pratap, S. Agarwal, and T. Meyarivan, “A fast and elitist multiobjective genetic algorithm: NSGA-II,” IEEE Trans. Evol. Comput., + vol. 6, no. 2, pp. 182–197, Apr. 2002.* The default is ``moeadd.moeadd_supplementary.fast_non_dominated_sorting`` + ndl_update : callable, optional Method of adding a new solution point into the objective functions space, introduced to minimize the recalculation of the non-dominated levels for the entire population. The default method was taken from the *K. Li, K. Deb, Q. Zhang, and S. Kwong, “Efficient non-domination level update approach for steady-state evolutionary multiobjective optimization,” Dept. Electr. Comput. Eng., Michigan State Univ., East Lansing, - MI, USA, Tech. Rep. COIN No. 2014014, 2014.* + MI, USA, Tech. Rep. COIN No. 2014014, 2014.* The default - ``moeadd.moeadd_supplementary.ndl_update`` """ assert weights_num == pop_size, 'Each individual in population has to correspond to a sector' self.abbreviated_search_executed = False @@ -295,30 +302,36 @@ def __init__(self, population_instruct, weights_num, pop_size, solution_params, pop_constructor = SystemsPopulationConstructor(**population_instruct) assert type(solution_params) == type(None) or type(solution_params) == dict, 'The solution parameters, passed into population constructor must be in dictionary' - population = [] - for solution_idx in range(pop_size): - solution_gen_idx = 0 - while True: - if type(solution_params) == type(None): solution_params = {} - temp_solution = pop_constructor.create(**solution_params) - temp_solution.set_domain(solution_idx) - if not np.any([temp_solution == solution for solution in population]): - population.append(temp_solution) - print(f'New solution accepted, confirmed {len(population)}/{pop_size} solutions.') - break - if solution_gen_idx == soluton_creation_attempts_softmax and global_var.verbose.show_warnings: - print('solutions tried:', solution_gen_idx) - warnings.warn('Too many failed attempts to create unique solutions for multiobjective optimization.\ - Change solution parameters to allow more diversity.') - if solution_gen_idx == soluton_creation_attempts_hardmax: - population.append(temp_solution) - print(f'New solution accepted, despite being a dublicate of another solution.\ - Confirmed {len(population)}/{pop_size} solutions.') - break - solution_gen_idx += 1 - self.pareto_levels = ParetoLevels(population, sorting_method = nds_method, update_method = ndl_update, - initial_sort = False) - + if passed_population is None: + population = [] + for solution_idx in range(pop_size): + solution_gen_idx = 0 + while True: + if type(solution_params) == type(None): solution_params = {} + temp_solution = pop_constructor.create(**solution_params) + temp_solution.set_domain(solution_idx) + if not np.any([temp_solution == solution for solution in population]): + population.append(temp_solution) + print(f'New solution accepted, confirmed {len(population)}/{pop_size} solutions.') + break + if solution_gen_idx == soluton_creation_attempts_softmax and global_var.verbose.show_warnings: + print('solutions tried:', solution_gen_idx) + warnings.warn('Too many failed attempts to create unique solutions for multiobjective optimization.\ + Change solution parameters to allow more diversity.') + if solution_gen_idx == soluton_creation_attempts_hardmax: + population.append(temp_solution) + print(f'New solution accepted, despite being a dublicate of another solution.\ + Confirmed {len(population)}/{pop_size} solutions.') + break + solution_gen_idx += 1 + self.pareto_levels = ParetoLevels(population, sorting_method = nds_method, update_method = ndl_update, + initial_sort = False) + else: + if not isinstance(passed_population, ParetoLevels): + raise TypeError(f'Incorrect type of the population passed. Expected ParetoLevels object, instead got \ + {type(passed_population)}') + self.pareto_levels = passed_population + self.weights = []; weights_size = len(population[0].obj_funs) #np.empty((pop_size, len(optimized_functionals))) for weights_idx in range(weights_num): while True: diff --git a/epde/optimizers/moeadd/solution_template.py b/epde/optimizers/moeadd/solution_template.py index 2ab9e53..11dfaa2 100644 --- a/epde/optimizers/moeadd/solution_template.py +++ b/epde/optimizers/moeadd/solution_template.py @@ -108,7 +108,6 @@ def get_domain(self, weights): domains (`int`): Index of the domain, to that the solution belongs. """ if self.precomputed_domain: - # print(self, 'DOMAIN IS:', self._domain) return self._domain else: self._domain = get_domain_idx(self, weights) diff --git a/epde/optimizers/moeadd/strategy.py b/epde/optimizers/moeadd/strategy.py index 942eed0..966e68f 100644 --- a/epde/optimizers/moeadd/strategy.py +++ b/epde/optimizers/moeadd/strategy.py @@ -9,12 +9,12 @@ import numpy as np from functools import partial -from epde.operators.utils.operator_mappers import map_operator_between_levels +from epde.operators.utils.operator_mappers import map_operator_between_levels, OperatorCondition from epde.operators.utils.template import add_base_param_to_operator from epde.operators.multiobjective.selections import MOEADDSelection from epde.operators.multiobjective.variation import get_basic_variation -from epde.operators.common.fitness import L2Fitness +from epde.operators.common.fitness import L2Fitness, SolverBasedFitness, PIC from epde.operators.common.right_part_selection import RandomRHPSelector from epde.operators.multiobjective.moeadd_specific import get_pareto_levels_updater, SimpleNeighborSelector, get_initial_sorter from epde.operators.common.sparsity import LASSOSparsity @@ -27,9 +27,9 @@ class MOEADDDirector(OptimizationPatternDirector): """ Class for creating strategy builder of multicriterian optimization """ -# class MOEADDDirector(OptimizationPatternDirector): - def use_baseline(self, variation_params : dict = {}, mutation_params : dict = {}, sorter_params : dict = {}, - pareto_combiner_params : dict = {}, pareto_updater_params : dict = {}, **kwargs): + def use_baseline(self, use_solver: bool = False, variation_params : dict = {}, mutation_params : dict = {}, + sorter_params : dict = {}, pareto_combiner_params : dict = {}, + pareto_updater_params : dict = {}, **kwargs): add_kwarg_to_operator = partial(add_base_param_to_operator, target_dict = kwargs) neighborhood_selector = SimpleNeighborSelector(['number_of_neighbors']) @@ -43,23 +43,35 @@ def use_baseline(self, variation_params : dict = {}, mutation_params : dict = {} right_part_selector = RandomRHPSelector() - eq_fitness = L2Fitness(['penalty_coeff']) - add_kwarg_to_operator(operator = eq_fitness) - sparsity = LASSOSparsity() - coeff_calc = LinRegBasedCoeffsEquation() + coeff_calc = LinRegBasedCoeffsEquation() + + if use_solver: + # fitness = SolverBasedFitness(['penalty_coeff']) + fitness = PIC(['penalty_coeff']) + + sparsity = map_operator_between_levels(sparsity, 'gene level', 'chromosome level') + coeff_calc = map_operator_between_levels(coeff_calc, 'gene level', 'chromosome level') + else: + fitness = L2Fitness(['penalty_coeff']) + add_kwarg_to_operator(operator = fitness) - eq_fitness.set_suboperators({'sparsity' : sparsity, 'coeff_calc' : coeff_calc}) + fitness.set_suboperators({'sparsity' : sparsity, 'coeff_calc' : coeff_calc}) fitness_cond = lambda x: not getattr(x, 'fitness_calculated') - sys_fitness = map_operator_between_levels(eq_fitness, 'gene level', 'chromosome level', fitness_cond) + if use_solver: + fitness = OperatorCondition(fitness, fitness_cond) + else: + fitness = map_operator_between_levels(fitness, 'gene level', 'chromosome level', + objective_condition=fitness_cond) rps_cond = lambda x: any([not elem_eq.right_part_selected for elem_eq in x.vals]) - sys_rps = map_operator_between_levels(right_part_selector, 'gene level', 'chromosome level', rps_cond) + sys_rps = map_operator_between_levels(right_part_selector, 'gene level', 'chromosome level', + objective_condition=rps_cond) # Separate mutation from population updater for better customization. - initial_sorter = get_initial_sorter(right_part_selector = sys_rps, chromosome_fitness = sys_fitness, + initial_sorter = get_initial_sorter(right_part_selector = sys_rps, chromosome_fitness = fitness, sorter_params = sorter_params) - population_updater = get_pareto_levels_updater(right_part_selector = sys_rps, chromosome_fitness = sys_fitness, + population_updater = get_pareto_levels_updater(right_part_selector = sys_rps, chromosome_fitness = fitness, constrained = False, mutation_params = mutation_params, pl_updater_params = pareto_updater_params, combiner_params = pareto_combiner_params) diff --git a/epde/optimizers/moeadd/supplementary.py b/epde/optimizers/moeadd/supplementary.py index c7cc214..8e4fe3f 100644 --- a/epde/optimizers/moeadd/supplementary.py +++ b/epde/optimizers/moeadd/supplementary.py @@ -28,7 +28,7 @@ from copy import deepcopy import numpy as np -from abc import ABC, abstractproperty, abstractmethod +from abc import ABC, abstractmethod from epde.supplementary import rts @@ -41,11 +41,11 @@ def check_dominance(target, compared_with) -> bool: target (`src.moeadd.moeadd_solution_template.MOEADDSolution`): case-specific subclass object The individual solution on the pareto levels, compared with the other element. compared_with (`src.moeadd.moeadd_solution_template.MOEADDSolution`): case-specific subclass object - The individual solution on the pareto levels, with with the target is compared. + The individual solution on the pareto levels, with which the target is compared. Returns: - domiated (`bool`): Function returns True, if the **compared_with** dominates (has at least one objective - functions with less values, while the others are the same) the **target**; + domiated (`bool`): Function returns True, if the **target** dominates (has at least one objective + functions with less values, while the others are the same) the **compared_with**; False in all other cases. """ @@ -84,7 +84,7 @@ def ndl_update(new_solution, levels) -> list: # efficient_ndl_update """ moving_set = {new_solution} new_levels = deepcopy(levels) # levels# CAUSES ERRORS DUE TO DEEPCOPY - # print(f'type(levels) is {type(levels)}') + for level_idx in np.arange(len(levels)): moving_set_new = set() for ms_idx, moving_set_elem in enumerate(moving_set): @@ -215,7 +215,10 @@ def slow_non_dominated_sorting(population) -> list: def acute_angle(vector_a, vector_b) -> float: - return np.arccos(np.dot(vector_a, vector_b)/(np.sqrt(np.dot(vector_a, vector_a))*np.sqrt(np.dot(vector_b, vector_b)))) + cos_val = np.dot(vector_a, vector_b)/(np.sqrt(np.dot(vector_a, vector_a))*np.sqrt(np.dot(vector_b, vector_b))) + if np.abs(cos_val) > 1.: + cos_val = np.sign(cos_val) + return np.arccos(cos_val) class Constraint(ABC): diff --git a/epde/optimizers/single_criterion/optimizer.py b/epde/optimizers/single_criterion/optimizer.py index 1618998..8d3cb75 100644 --- a/epde/optimizers/single_criterion/optimizer.py +++ b/epde/optimizers/single_criterion/optimizer.py @@ -132,31 +132,39 @@ class SimpleOptimizer(object): """ """ - def __init__(self, population_instruct, pop_size, solution_params, sorting_method = simple_sorting): + def __init__(self, population_instruct, pop_size, solution_params, sorting_method = simple_sorting, + passed_population: Population = None): soluton_creation_attempts_softmax = 10 soluton_creation_attempts_hardmax = 100 pop_constructor = SystemsPopulationConstructor(**population_instruct) assert type(solution_params) == type(None) or type(solution_params) == dict, 'The solution parameters, passed into population constructor must be in dictionary' - initial_population = [] - for solution_idx in range(pop_size): - solution_gen_idx = 0 - while True: - if type(solution_params) == type(None): solution_params = {} - temp_solution = pop_constructor.create(**solution_params) - if not np.any([temp_solution == solution for solution in initial_population]): - initial_population.append(temp_solution) - print(f'New solution accepted, confirmed {len(initial_population)}/{pop_size} solutions.') - break - if solution_gen_idx == soluton_creation_attempts_softmax and global_var.verbose.show_warnings: - print('solutions tried:', solution_gen_idx) - warnings.warn('Too many failed attempts to create unique solutions for multiobjective optimization. Change solution parameters to allow more diversity.') - if solution_gen_idx == soluton_creation_attempts_hardmax: - raise RuntimeError('Can not place an individual into the population even with many attempts.') - solution_gen_idx += 1 - - self.population = Population(elements = initial_population, sorting_method = sorting_method) + if passed_population is None: + initial_population = [] + for solution_idx in range(pop_size): + solution_gen_idx = 0 + while True: + if type(solution_params) == type(None): solution_params = {} + temp_solution = pop_constructor.create(**solution_params) + if not np.any([temp_solution == solution for solution in initial_population]): + initial_population.append(temp_solution) + print(f'New solution accepted, confirmed {len(initial_population)}/{pop_size} solutions.') + break + if solution_gen_idx == soluton_creation_attempts_softmax and global_var.verbose.show_warnings: + print('solutions tried:', solution_gen_idx) + warnings.warn('Too many failed attempts to create unique solutions for multiobjective optimization. Change solution parameters to allow more diversity.') + if solution_gen_idx == soluton_creation_attempts_hardmax: + raise RuntimeError('Can not place an individual into the population even with many attempts.') + solution_gen_idx += 1 + + self.population = Population(elements = initial_population, sorting_method = sorting_method) + + else: + if not isinstance(passed_population, Population): + raise TypeError(f'Incorrect type of the population passed. Expected Population object, instead got \ + {type(passed_population)}') + self.population = passed_population def set_strategy(self, strategy_director): builder = strategy_director.builder diff --git a/epde/parametric/parametric_equation.py b/epde/parametric/parametric_equation.py index 7505fdc..b1232da 100644 --- a/epde/parametric/parametric_equation.py +++ b/epde/parametric/parametric_equation.py @@ -40,13 +40,11 @@ def opt_func(params, *variables): the variables: variables[0] - the object, containing parametric equation. ''' - # print('params in opt_func', params) err = np.linalg.norm(variables[0].evaluate_with_params(params)) print('error:', err) return err def opt_fun_grad(params, *variables): - # print('evaluating gradient') grad = np.zeros_like(params) for param_idx, param_in_term_props in variables[0].param_term_beloning.items(): grad[param_idx] = np.sum( diff --git a/epde/preprocessing/derivatives.py b/epde/preprocessing/derivatives.py index 44e7643..82f176f 100644 --- a/epde/preprocessing/derivatives.py +++ b/epde/preprocessing/derivatives.py @@ -12,7 +12,7 @@ from typing import Union import torch -device = torch.device('cpu') +# device = torch.device('cpu') import epde.globals as global_var from epde.preprocessing.cheb import process_point_cheb diff --git a/epde/preprocessing/preprocessor_setups.py b/epde/preprocessing/preprocessor_setups.py index fddeab0..b417100 100644 --- a/epde/preprocessing/preprocessor_setups.py +++ b/epde/preprocessing/preprocessor_setups.py @@ -38,6 +38,17 @@ def build_ANN_preprocessing(self, test_output=False, epochs_max=1e5, self.builder.set_deriv_calculator(AdaptiveFiniteDeriv, *deriv_calculator_args, **deriv_calculator_kwargs) + def build_FD_preprocessing(self): + smoother_args = () + smoother_kwargs = {} + + deriv_calculator_args = () + deriv_calculator_kwargs = {'grid': None} + + self.builder.set_smoother(PlaceholderSmoother, *smoother_args, **smoother_kwargs) + self.builder.set_deriv_calculator(AdaptiveFiniteDeriv, *deriv_calculator_args, + **deriv_calculator_kwargs) + def build_spectral_preprocessing(self, n=None, steepness=1): smoother_args = () smoother_kwargs = {} diff --git a/epde/preprocessing/smoothers.py b/epde/preprocessing/smoothers.py index 867a7b5..6263a88 100644 --- a/epde/preprocessing/smoothers.py +++ b/epde/preprocessing/smoothers.py @@ -12,7 +12,7 @@ import numpy as np import torch -device = torch.device('cpu') + import epde.globals as global_var @@ -35,13 +35,13 @@ def __call__(self, data, *args, **kwargs): def baseline_ann(dim): model = torch.nn.Sequential( torch.nn.Linear(dim, 256), - torch.nn.Tanh(), + torch.nn.ReLU(), torch.nn.Linear(256, 64), - torch.nn.Tanh(), - torch.nn.Linear(64, 64), - torch.nn.Tanh(), + torch.nn.ReLU(), + # torch.nn.Linear(64, 64), + # torch.nn.Tanh(), torch.nn.Linear(64, 1024), - torch.nn.Tanh(), + torch.nn.ReLU(), torch.nn.Linear(1024, 1) ) return model @@ -52,7 +52,7 @@ def __init__(self): pass def __call__(self, data, grid, epochs_max=1e3, loss_mean=1000, batch_frac=0.5, - learining_rate=1e-4, return_ann: bool = False): + learining_rate=1e-4, return_ann: bool = False, device = 'cpu'): dim = 1 if np.any([s == 1 for s in data.shape]) and data.ndim == 2 else data.ndim model = baseline_ann(dim) grid_flattened = torch.from_numpy(np.array([subgrid.reshape(-1) for subgrid in grid])).float().T @@ -60,6 +60,8 @@ def __call__(self, data, grid, epochs_max=1e3, loss_mean=1000, batch_frac=0.5, original_shape = data.shape field_ = torch.from_numpy(data.reshape(-1, 1)).float() + + # device = torch.device(device) grid_flattened.to(device) field_.to(device) optimizer = torch.optim.Adam(model.parameters(), lr=learining_rate) @@ -90,8 +92,8 @@ def __call__(self, data, grid, epochs_max=1e3, loss_mean=1000, batch_frac=0.5, if loss_mean < min_loss: best_model = model min_loss = loss_mean - if global_var.verbose.show_ann_loss: - print('Surface training t={}, loss={}'.format(t, loss_mean)) + # if global_var.verbose.show_ann_loss: + print('Surface training t={}, loss={}'.format(t, loss_mean)) t += 1 data_approx = best_model(grid_flattened).detach().numpy().reshape(original_shape) diff --git a/epde/solver/__init__.py b/epde/solver/__init__.py index e69de29..2e295ce 100644 --- a/epde/solver/__init__.py +++ b/epde/solver/__init__.py @@ -0,0 +1 @@ +from epde.solver import * diff --git a/epde/solver/callbacks/__init__.py b/epde/solver/callbacks/__init__.py new file mode 100644 index 0000000..2c37c09 --- /dev/null +++ b/epde/solver/callbacks/__init__.py @@ -0,0 +1,2 @@ +from epde.solver.callbacks.adaptive_lambda import AdaptiveLambda +from epde.solver.callbacks.early_stopping import EarlyStopping \ No newline at end of file diff --git a/epde/solver/callbacks/adaptive_lambda.py b/epde/solver/callbacks/adaptive_lambda.py new file mode 100644 index 0000000..237c334 --- /dev/null +++ b/epde/solver/callbacks/adaptive_lambda.py @@ -0,0 +1,125 @@ +import numpy as np +import torch +from typing import Tuple, List +from SALib import ProblemSpec + +from epde.solver.callbacks.callback import Callback +from epde.solver.utils import bcs_reshape, samples_count, lambda_print + +class AdaptiveLambda(Callback): + """ + Serves for computing adaptive lambdas. + """ + def __init__(self, + sampling_N: int = 1, + second_order_interactions = True): + """ + + Args: + sampling_N (int, optional): essentially determines how often the lambda will be re-evaluated. Defaults to 1. + second_order_interactions (bool, optional): Calculate second-order sensitivities. Defaults to True. + """ + super().__init__() + self.second_order_interactions = second_order_interactions + self.sampling_N = sampling_N + + @staticmethod + def lambda_compute(pointer: int, length_list: list, ST: np.ndarray) -> torch.Tensor: + """ Computes lambdas. + + Args: + pointer (int): the label to calculate the lambda for the corresponding parameter. + length_list (list): dict where values are lengths. + ST (np.ndarray): result of SALib.ProblemSpec(). + + Returns: + torch.Tensor: calculated lambdas written as vector + """ + + lambdas = [] + for value in length_list: + lambdas.append(sum(ST) / sum(ST[pointer:pointer + value])) + pointer += value + return torch.tensor(lambdas).float().reshape(1, -1) + + def update(self, + op_length: List, + bval_length: List, + sampling_D: int) -> Tuple[torch.Tensor, torch.Tensor]: + """ Updates all lambdas (operator and boundary). + + Args: + op_length (list): list with lengths of operator solution. + bval_length (list): list with lengths of boundary solution. + sampling_D (int): sum of op_length and bval_length. + + Returns: + lambda_op (torch.Tensor): values of lambdas for operator. + lambda_bound (torch.Tensor): values of lambdas for boundary. + """ + + op_array = np.array(self.op_list) + bc_array = np.array(self.bval_list) + loss_array = np.array(self.loss_list) + + X_array = np.hstack((op_array, bc_array)) + + bounds = [[-100, 100] for _ in range(sampling_D)] + names = ['x{}'.format(i) for i in range(sampling_D)] + + sp = ProblemSpec({'names': names, 'bounds': bounds}) + + sp.set_samples(X_array) + sp.set_results(loss_array) + sp.analyze_sobol(calc_second_order=self.second_order_interactions) + + # + # To assess variance we need total sensitiviy indices for every variable + # + ST = sp.analysis['ST'] + + lambda_op = self.lambda_compute(0, op_length, ST) + + lambda_bnd = self.lambda_compute(sum(op_length), bval_length, ST) + + return lambda_op, lambda_bnd + + def lambda_update(self): + """ Method for lambdas calculation. + """ + sln_cls = self.model.solution_cls + bval = sln_cls.bval + true_bval = sln_cls.true_bval + bval_keys = sln_cls.bval_keys + bval_length = sln_cls.bval_length + op = sln_cls.op if sln_cls.batch_size is None else sln_cls.save_op # if batch mod use accumulative loss else from single eval + self.op_list = sln_cls.op_list + self.bval_list = sln_cls.bval_list + self.loss_list = sln_cls.loss_list + + bcs = bcs_reshape(bval, true_bval, bval_length) + op_length = [op.shape[0]]*op.shape[-1] + + self.op_list.append(torch.t(op).reshape(-1).cpu().detach().numpy()) + self.bval_list.append(bcs.cpu().detach().numpy()) + self.loss_list.append(float(sln_cls.loss_normalized.item())) + + sampling_amount, sampling_D = samples_count( + second_order_interactions = self.second_order_interactions, + sampling_N = self.sampling_N, + op_length=op_length, + bval_length = bval_length) + + if len(self.op_list) == sampling_amount: + sln_cls.lambda_operator, sln_cls.lambda_bound = \ + self.update(op_length=op_length, bval_length=bval_length, sampling_D=sampling_D) + self.op_list.clear() + self.bval_list.clear() + self.loss_list.clear() + + oper_keys = [f'eq_{i}' for i in range(len(op_length))] + lambda_print(sln_cls.lambda_operator, oper_keys) + lambda_print(sln_cls.lambda_bound, bval_keys) + + def on_epoch_end(self, logs=None): + self.lambda_update() diff --git a/epde/solver/callbacks/cache.py b/epde/solver/callbacks/cache.py new file mode 100644 index 0000000..9eb13c4 --- /dev/null +++ b/epde/solver/callbacks/cache.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 24 11:50:12 2021 + +@author: user +""" + +import glob +from typing import Union +import torch +import numpy as np +from copy import deepcopy +import tempfile +import os + +from epde.solver.device import device_type +from epde.solver.callbacks.callback import Callback +from epde.solver.utils import create_random_fn, mat_op_coeff, model_mat, remove_all_files +from epde.solver.model import Model + + +def count_output(model: torch.Tensor) -> int: + """ Determine the out features of the model. + + Args: + model (torch.Tensor): torch neural network. + + Returns: + int: number of out features. + """ + modules, output_layer = list(model.modules()), None + for layer in reversed(modules): + if hasattr(layer, 'out_features'): + output_layer = layer.out_features + break + return output_layer + + +class CachePreprocessing: + """class for preprocessing cache files. + """ + def __init__(self, + model: Model + ): + """ + Args: + model (Model): object of Model class + """ + self.solution_cls = model.solution_cls + + @staticmethod + def _cache_files(files: list, nmodels: Union[int, None]=None) -> np.ndarray: + """ At some point we may want to reduce the number of models that are + checked for the best in the cache. + + Args: + files (list): list with all model names in cache. + nmodels (Union[int, None], optional): models quantity for checking. Defaults to None. + + Returns: + cache_n (np.ndarray): array with random cache files names. + """ + + if nmodels is None: + # here we take all files that are in cache + cache_n = np.arange(len(files)) + else: + # here we take random nmodels from the cache + cache_n = np.random.choice(len(files), nmodels, replace=False) + + return cache_n + + @staticmethod + def _model_reform(init_model: Union[torch.nn.Sequential, torch.nn.ModuleList], + model: Union[torch.nn.Sequential, torch.nn.ModuleList]): + """ + As some models are nn.Sequential class objects, + but another models are nn.Module class objects. + This method does checking the solver model (init_model) + and the cache model (model). + Args: + init_model (nn.Sequential or nn.ModuleList): solver model. + model (nn.Sequential or nn.ModuleList): cache model. + Returns: + init_model (nn.Sequential or nn.ModuleList): checked init_model. + model (nn.Sequential or nn.ModuleList): checked model. + """ + try: + model[0] + except: + model = model.model + + try: + init_model[0] + except: + init_model = init_model.model + + return init_model, model + + def cache_lookup(self, + cache_dir: str, + nmodels: Union[int, None] = None, + save_graph: bool = False, + cache_verbose: bool = False) -> Union[None, dict, torch.nn.Module]: + """Looking for the best model (min loss) model from the cache files. + + Args: + cache_dir (str): folder where system looks for cached models. + nmodels (Union[int, None], optional): maximal number of models that are taken from cache dir. Defaults to None. + save_graph (bool, optional): responsible for saving the computational graph. Defaults to False. + cache_verbose (bool, optional): verbose cache operations. Defaults to False. + + Returns: + Union[None, dict, torch.Tensor]: best model with optimizator state. + """ + + files = glob.glob(cache_dir + '\*.tar') + + if cache_verbose: + print(f"The CACHE will be searched among the models in the folder {cache_dir}.") + + if len(files) == 0: + best_checkpoint = None + return best_checkpoint + + cache_n = self._cache_files(files, nmodels) + + min_loss = np.inf + best_checkpoint = {} + + device = device_type() + + initial_model = self.solution_cls.model + + for i in cache_n: + file = files[i] + + try: + checkpoint = torch.load(file) + except Exception: + if cache_verbose: + print('Error loading file {}'.format(file)) + continue + + model = checkpoint['model'] + model.load_state_dict(checkpoint['model_state_dict']) + + # this one for the input shape fix if needed + + try: + solver_model, cache_model = self._model_reform(self.solution_cls.model, model) + except Exception: + if cache_verbose: + print('Error reforming file {}'.format(file)) + continue + + if cache_model[0].in_features != solver_model[0].in_features: + continue + try: + if count_output(model) != count_output(self.solution_cls.model): + continue + except Exception: + continue + + model = model.to(device) + self.solution_cls._model_change(model) + loss, _ = self.solution_cls.evaluate(save_graph=save_graph) + + if loss < min_loss: + min_loss = loss + best_checkpoint['model'] = model + best_checkpoint['model_state_dict'] = model.state_dict() + if cache_verbose: + print('best_model_num={} , loss={}'.format(i, min_loss.item())) + + self.solution_cls._model_change(initial_model) + + if best_checkpoint == {}: + best_checkpoint = None + + return best_checkpoint + + def scheme_interp(self, + trained_model: torch.nn.Module, + cache_verbose: bool = False) -> torch.nn.Module: + """ If the cache model has another arcitechure to user's model, + we will not be able to use it. So we train user's model on the + outputs of cache model. + + Args: + trained_model (torch.nn.Module): the best model (min loss) from cache. + cache_verbose (bool, optional): verbose on/off of cache operations. Defaults to False. + + """ + + grid = self.solution_cls.grid + + model = self.solution_cls.model + + optimizer = torch.optim.Adam(model.parameters(), lr=0.001) + + loss = torch.mean(torch.square( + trained_model(grid) - model(grid))) + + def closure(): + optimizer.zero_grad() + loss = torch.mean((trained_model(grid) - model(grid)) ** 2) + loss.backward() + return loss + + t = 0 + while loss > 1e-5 and t < 1e5: + optimizer.step(closure) + loss = torch.mean(torch.square( + trained_model(grid) - model(grid))) + t += 1 + if cache_verbose: + print('Interpolate from trained model t={}, loss={}'.format( + t, loss)) + + self.solution_cls._model_change(model) + + def cache_retrain(self, + cache_checkpoint: dict, + cache_verbose: bool = False) -> torch.nn.Module: + """ The comparison of the user's model and cache model architecture. + If they are same, we will use model from cache. In the other case + we use interpolation (scheme_interp method) + + Args: + cache_checkpoint (dict): checkpoint of the cache model + cache_verbose (bool, optional): on/off printing cache operations. Defaults to False. + + """ + + model = self.solution_cls.model + + # do nothing if cache is empty + if cache_checkpoint is None: + return None + # if models have the same structure use the cache model state, + # and the cache model has ordinary structure + if str(cache_checkpoint['model']) == str(model) and \ + isinstance(model, torch.nn.Sequential) and \ + isinstance(model[0], torch.nn.Linear): + model = cache_checkpoint['model'] + model.load_state_dict(cache_checkpoint['model_state_dict']) + model.train() + self.solution_cls._model_change(model) + if cache_verbose: + print('Using model from cache') + # else retrain the input model using the cache model + else: + cache_model = cache_checkpoint['model'] + cache_model.load_state_dict(cache_checkpoint['model_state_dict']) + cache_model.eval() + self.scheme_interp( + cache_model, cache_verbose=cache_verbose) + + +class Cache(Callback): + """ + Prepares user's model. Serves for computing acceleration.\n + Saves the trained model to the cache, and subsequently it is possible to use pre-trained model + (if it saved and if the new model is structurally similar) to sped up computing.\n + If there isn't pre-trained model in cache, the training process will start from the beginning. + """ + + def __init__(self, + nmodels: Union[int, None] = None, + cache_dir: str = 'tedeous_cache', + cache_verbose: bool = False, + cache_model: Union[torch.nn.Sequential, None] = None, + model_randomize_parameter: Union[int, float] = 0, + clear_cache: bool = False + ): + """ + Args: + nmodels (Union[int, None], optional): maximal number of models that are taken from cache dir. Defaults to None. Defaults to None. + cache_dir (str, optional): directory with cached models. Defaults to '../tedeous_cache/' in temporary directoy of user system. + If cache_dir is custom, then file will be searched in *torch_de_solver* directory. + cache_verbose (bool, optional): printing cache operations. Defaults to False. + cache_model (Union[torch.nn.Sequential, None], optional): model for mat method, which will be saved in cache. Defaults to None. + model_randomize_parameter (Union[int, float], optional): creates a random model parameters (weights, biases) + multiplied with a given randomize parameter.. Defaults to 0. + clear_cache (bool, optional): clear cache directory. Defaults to False. + """ + + self.nmodels = nmodels + self.cache_verbose = cache_verbose + self.cache_model = cache_model + self.model_randomize_parameter = model_randomize_parameter + if cache_dir == 'tedeous_cache': + temp_dir = tempfile.gettempdir() + folder_path = os.path.join(temp_dir, 'tedeous_cache/') + if os.path.exists(folder_path) and os.path.isdir(folder_path): + pass + else: + os.makedirs(folder_path) + self.cache_dir = folder_path + else: + try: + file = __file__ + except: + file = os.getcwd() + self.cache_dir = os.path.normpath((os.path.join(os.path.dirname(file), '..', '..', cache_dir))) + if clear_cache: + remove_all_files(self.cache_dir) + + def _cache_nn(self): + """ take model from cache as initial guess for *NN, autograd* modes. + """ + + cache_preproc = CachePreprocessing(self.model) + + r = create_random_fn(self.model_randomize_parameter) + + cache_checkpoint = cache_preproc.cache_lookup(cache_dir=self.cache_dir, + nmodels=self.nmodels, + cache_verbose=self.cache_verbose) + + cache_preproc.cache_retrain(cache_checkpoint, + cache_verbose=self.cache_verbose) + self.model.solution_cls.model.apply(r) + + def _cache_mat(self) -> torch.Tensor: + """ take model from cache as initial guess for *mat* mode. + """ + + net = self.model.net + domain = self.model.domain + equation = mat_op_coeff(deepcopy(self.model.equation)) + conditions = self.model.conditions + lambda_operator = self.model.lambda_operator + lambda_bound = self.model.lambda_bound + weak_form = self.model.weak_form + + net_autograd = model_mat(net, domain) + + autograd_model = Model(net_autograd, domain, equation, conditions) + + autograd_model.compile('autograd', lambda_operator, lambda_bound, weak_form=weak_form) + + r = create_random_fn(self.model_randomize_parameter) + + cache_preproc = CachePreprocessing(autograd_model) + + cache_checkpoint = cache_preproc.cache_lookup( + cache_dir=self.cache_dir, + nmodels=self.nmodels, + cache_verbose=self.cache_verbose) + + if cache_checkpoint is not None: + cache_preproc.cache_retrain( + cache_checkpoint, + cache_verbose=self.cache_verbose) + + autograd_model.solution_cls.model.apply(r) + + model = autograd_model.solution_cls.model( + autograd_model.solution_cls.grid).reshape( + self.model.solution_cls.model.shape).detach() + + self.model.solution_cls._model_change(model.requires_grad_()) + + def cache(self): + """ Wrap for cache_mat and cache_nn methods. + """ + + if self.model.mode != 'mat': + return self._cache_nn() + elif self.model.mode == 'mat': + return self._cache_mat() + + def on_train_begin(self, logs=None): + self.cache() + self.model._save_dir = self.cache_dir diff --git a/epde/solver/callbacks/callback.py b/epde/solver/callbacks/callback.py new file mode 100644 index 0000000..b35b4c3 --- /dev/null +++ b/epde/solver/callbacks/callback.py @@ -0,0 +1,76 @@ +from abc import ABC, abstractmethod + +class Callback(ABC): + """Base class used to build new callbacks. + """ + + def __init__(self): + self.print_every = None + self.verbose = 0 + self.validation_data = None + self._model = None + + def set_params(self, params): + self.params = params + + def set_model(self, model): + self._model = model + + @property + def model(self): + return self._model + + def on_epoch_begin(self, logs=None): + """Called at the start of an epoch. + + Subclasses should override for any actions to run. This function should + only be called during TRAIN mode. + + Args: + epoch: Integer, index of epoch. + logs: Dict. Currently no data is passed to this argument for this + method but that may change in the future. + """ + pass + + def on_epoch_end(self, logs=None): + """Called at the end of an epoch. + + Subclasses should override for any actions to run. This function should + only be called during TRAIN mode. + + Args: + epoch: Integer, index of epoch. + logs: Dict, metric results for this training epoch, and for the + validation epoch if validation is performed. Validation result + keys are prefixed with `val_`. For training epoch, the values of + the `Model`'s metrics are returned. Example: + `{'loss': 0.2, 'accuracy': 0.7}`. + """ + pass + + def on_train_begin(self, logs=None): + """Called at the beginning of training. + + Subclasses should override for any actions to run. + + Args: + logs: Dict. Currently no data is passed to this argument for this + method but that may change in the future. + """ + pass + + def on_train_end(self, logs=None): + """Called at the end of training. + + Subclasses should override for any actions to run. + + Args: + logs: Dict. Currently the output of the last call to + `on_epoch_end()` is passed to this argument for this method but + that may change in the future. + """ + pass + + def during_epoch(self, logs=None): + pass \ No newline at end of file diff --git a/epde/solver/callbacks/callback_list.py b/epde/solver/callbacks/callback_list.py new file mode 100644 index 0000000..42ab6b6 --- /dev/null +++ b/epde/solver/callbacks/callback_list.py @@ -0,0 +1,63 @@ +from epde.solver.callbacks.callback import Callback + +# import tree + +class CallbackList(Callback): + """Container abstracting a list of callbacks.""" + def __init__( + self, + callbacks=None, + model=None, + **params, + ): + """Container for `Callback` instances. + + This object wraps a list of `Callback` instances, making it possible + to call them all at once via a single endpoint + (e.g. `callback_list.on_epoch_end(...)`). + + Args: + callbacks: List of `Callback` instances. + model: The `Model` these callbacks are used with. + **params: If provided, parameters will be passed to each `Callback` + via `Callback.set_params`. + """ + self.callbacks = callbacks if callbacks else [] + + if model: + self.set_model(model) + if params: + self.set_params(params) + + def set_model(self, model): + super().set_model(model) + for callback in self.callbacks: + callback.set_model(model) + + def append(self, callback): + self.callbacks.append(callback) + + def set_params(self, params): + self.params = params + for callback in self.callbacks: + callback.set_params(params) + + def on_epoch_begin(self, logs=None): + logs = logs or {} + for callback in self.callbacks: + callback.on_epoch_begin(logs) + + def on_epoch_end(self, logs=None): + logs = logs or {} + for callback in self.callbacks: + callback.on_epoch_end(logs) + + def on_train_begin(self, logs=None): + logs = logs or {} + for callback in self.callbacks: + callback.on_train_begin(logs) + + def on_train_end(self, logs=None): + logs = logs or {} + for callback in self.callbacks: + callback.on_train_end(logs) diff --git a/epde/solver/callbacks/early_stopping.py b/epde/solver/callbacks/early_stopping.py new file mode 100644 index 0000000..86b7d70 --- /dev/null +++ b/epde/solver/callbacks/early_stopping.py @@ -0,0 +1,150 @@ +import numpy as np +from typing import Union +import torch +import datetime +from epde.solver.callbacks.callback import Callback +from epde.solver.utils import create_random_fn + + +class EarlyStopping(Callback): + """ Class for using adaptive stop criterias at training process. + """ + def __init__(self, + eps: float = 1e-5, + loss_window: int = 100, + no_improvement_patience: int = 1000, + patience: int = 5, + abs_loss: Union[float, None] = None, + normalized_loss: bool = False, + randomize_parameter: float = 1e-5, + info_string_every: Union[int, None] = None, + verbose: bool = True, + save_best: bool = False + ): + """_summary_ + + Args: + eps (float, optional): arbitrarily small number that uses for loss comparison criterion. Defaults to 1e-5. + loss_window (int, optional): width of losses window which is used for average loss estimation. Defaults to 100. + no_improvement_patience (int, optional): number of iterations during which + the loss may not improve.. Defaults to 1000. + patience (int, optional): maximum number of times the stopping criterion + can be satisfied.. Defaults to 5. + abs_loss (Union[float, None], optional): absolute loss value using in _absloss_check().. Defaults to None. + normalized_loss (bool, optional): calculate loss with all lambdas=1. Defaults to False. + randomize_parameter (float, optional): some error for resulting + model weights to to avoid local optima. Defaults to 1e-5. + info_string_every (Union[int, None], optional): prints the loss state after every *int* + step. Defaults to None. + verbose (bool, optional): print or not info about loss and current state of stopping criteria. Defaults to True. + save_best (bool, optional): model with least loss is saved during the training and returned at the end as a result + """ + super().__init__() + self.eps = eps + self.loss_window = loss_window + self.no_improvement_patience = no_improvement_patience + self.patience = patience + self.abs_loss = abs_loss + self.normalized_loss = normalized_loss + self._stop_dings = 0 + self._t_imp_start = 0 + self._r = create_random_fn(randomize_parameter) + self.info_string_every = info_string_every if info_string_every is not None else np.inf + self.verbose = verbose + self.save_best=save_best + self.best_model=None + + + + def _line_create(self): + """ Approximating last_loss list (len(last_loss)=loss_oscillation_window) by the line. + + """ + self._line = np.polyfit(range(self.loss_window), self.last_loss, 1) + + def _window_check(self): + """ Stopping criteria. We divide angle coeff of the approximating + line (line_create()) on current loss value and compare one with *eps* + """ + if self.t % self.loss_window == 0 and self._check is None: + self._line_create() + if abs(self._line[0] / self.model.cur_loss) < self.eps and self.t > 0: + self._stop_dings += 1 + if self.mode in ('NN', 'autograd'): + self.model.net.apply(self._r) + self._check = 'window_check' + + def _patience_check(self): + """ Stopping criteria. We control the minimum loss and count steps + when the current loss is bigger then min_loss. If these steps equal to + no_improvement_patience parameter, the stopping criteria will be achieved. + + """ + if (self.t - self._t_imp_start) == self.no_improvement_patience and self._check is None: + self._stop_dings += 1 + self._t_imp_start = self.t + if self.mode in ('NN', 'autograd'): + if self.save_best: + self.model.net=self.best_model + self.model.net.apply(self._r) + self._check = 'patience_check' + + def _absloss_check(self): + """ Stopping criteria. If current loss absolute value is lower then *abs_loss* param, + the stopping criteria will be achieved. + """ + if self.abs_loss is not None and self.model.cur_loss < self.abs_loss and self._check is None: + self._stop_dings += 1 + self._check = 'absloss_check' + + def verbose_print(self): + """ print info about loss and stopping criteria. + """ + + if self._check == 'window_check': + print('[{}] Oscillation near the same loss'.format( + datetime.datetime.now())) + elif self._check == 'patience_check': + print('[{}] No improvement in {} steps'.format( + datetime.datetime.now(), self.no_improvement_patience)) + elif self._check == 'absloss_check': + print('[{}] Absolute value of loss is lower than threshold'.format( + datetime.datetime.now())) + + if self._check is not None or self.t % self.info_string_every == 0: + try: + self._line + except: + self._line_create() + loss = self.model.cur_loss.item() if isinstance(self.model.cur_loss, torch.Tensor) else self.mdoel.cur_loss + info = '[{}] Step = {} loss = {:.6f} normalized loss line= {:.6f}x+{:.6f}. There was {} stop dings already.'.format( + datetime.datetime.now(), self.t, loss, self._line[0] / loss, self._line[1] / loss, self._stop_dings) + print(info) + + def on_epoch_end(self, logs=None): + self._window_check() + self._patience_check() + self._absloss_check() + + if self.model.cur_loss < self.model.min_loss: + self.model.min_loss = self.model.cur_loss + if self.save_best: + self.best_model=self.model.net + self._t_imp_start = self.t + + if self.verbose: + self.verbose_print() + if self._stop_dings >= self.patience: + self.model.stop_training = True + if self.save_best: + self.model.net=self.best_model + self._check = None + + def on_epoch_begin(self, logs=None): + self.t = self.model.t + self.mode = self.model.mode + self._check = self.model._check + try: + self.last_loss[(self.t - 3) % self.loss_window] = self.model.cur_loss + except: + self.last_loss = np.zeros(self.loss_window) + float(self.model.min_loss) diff --git a/epde/solver/callbacks/inverse_task.py b/epde/solver/callbacks/inverse_task.py new file mode 100644 index 0000000..d60dcd8 --- /dev/null +++ b/epde/solver/callbacks/inverse_task.py @@ -0,0 +1,37 @@ +import numpy as np +from typing import Union +import torch +import datetime +from epde.solver.callbacks.callback import Callback + + +class InverseTask(Callback): + """Class for printing the parameters during inverse task solution. + """ + def __init__(self, + parameters: dict, + info_string_every: Union[int, None] = None): + """ + Args: + parameters (dict): dictioanry with initial guess of parameters. + info_string_every (Union[int, None], optional): print parameters after every *int* step. Defaults to None. + """ + super().__init__() + self.parameters = parameters + self.info_string_every = info_string_every + + def str_param(self): + """ printing the inverse parameters. + """ + if self.info_string_every is not None and self.model.t % self.info_string_every == 0: + param = list(self.parameters.keys()) + for name, p in self.model.net.named_parameters(): + if name in param: + try: + param_str += name + '=' + str(p.item()) + ' ' + except: + param_str = name + '=' + str(p.item()) + ' ' + print(param_str) + + def on_epoch_end(self, logs=None): + self.str_param() \ No newline at end of file diff --git a/epde/solver/callbacks/plot.py b/epde/solver/callbacks/plot.py new file mode 100644 index 0000000..6b0770a --- /dev/null +++ b/epde/solver/callbacks/plot.py @@ -0,0 +1,157 @@ +import os +import datetime +from typing import Union +import numpy as np +import matplotlib.pyplot as plt +from matplotlib import cm +import torch +from epde.solver.callbacks.callback import Callback +from mpl_toolkits.mplot3d import Axes3D + + +class Plots(Callback): + """Class for ploting solutions.""" + + def __init__(self, + print_every: Union[int, None] = 500, + save_every: Union[int, None] = 500, + title: str = None, + img_dir: str = None): + """ + Args: + print_every (Union[int, None], optional): print plots after every *print_every* steps. Defaults to 500. + save_every (Union[int, None], optional): save plots after every *print_every* steps. Defaults to 500. + title (str, optional): plots title. Defaults to None. + img_dir (str, optional): directory title where plots are being saved. Defaults to None. + """ + super().__init__() + self.print_every = print_every if print_every is not None else 0.1 + self.save_every = save_every if save_every is not None else 0.1 + self.title = title + self.img_dir = img_dir + + def _print_nn(self): + """ + Solution plot for *NN, autograd* mode. + + """ + + attributes = {'model': ['out_features', 'output_dim', 'width_out'], + 'layers': ['out_features', 'output_dim', 'width_out']} + + nvars_model = None + + for key, values in attributes.items(): + for value in values: + try: + nvars_model = getattr(getattr(self.net, key)[-1], value) + break + except AttributeError: + pass + + if nvars_model is None: + try: + nvars_model = self.net[-1].out_features + except: + nvars_model = self.net.width_out[-1] + + nparams = self.grid.shape[1] + fig = plt.figure(figsize=(15, 8)) + for i in range(nvars_model): + if nparams == 1: + ax1 = fig.add_subplot(1, nvars_model, i + 1) + if self.title is not None: + ax1.set_title(self.title + ' variable {}'.format(i)) + ax1.scatter(self.grid.detach().cpu().numpy().reshape(-1), + self.net(self.grid)[:, i].detach().cpu().numpy()) + + else: + ax1 = fig.add_subplot(1, nvars_model, i + 1, projection='3d') + if self.title is not None: + ax1.set_title(self.title + ' variable {}'.format(i)) + + ax1.plot_trisurf(self.grid[:, 0].detach().cpu().numpy(), + self.grid[:, 1].detach().cpu().numpy(), + self.net(self.grid)[:, i].detach().cpu().numpy(), + cmap=cm.jet, linewidth=0.2, alpha=1) + ax1.set_xlabel("x1") + ax1.set_ylabel("x2") + + def _print_mat(self): + """ + Solution plot for mat mode. + """ + + nparams = self.grid.shape[0] + nvars_model = self.net.shape[0] + fig = plt.figure(figsize=(15, 8)) + for i in range(nvars_model): + if nparams == 1: + ax1 = fig.add_subplot(1, nvars_model, i + 1) + if self.title is not None: + ax1.set_title(self.title + ' variable {}'.format(i)) + ax1.scatter(self.grid.detach().cpu().numpy().reshape(-1), + self.net[i].detach().cpu().numpy().reshape(-1)) + else: + ax1 = fig.add_subplot(1, nvars_model, i + 1, projection='3d') + + if self.title is not None: + ax1.set_title(self.title + ' variable {}'.format(i)) + ax1.plot_trisurf(self.grid[0].detach().cpu().numpy().reshape(-1), + self.grid[1].detach().cpu().numpy().reshape(-1), + self.net[i].detach().cpu().numpy().reshape(-1), + cmap=cm.jet, linewidth=0.2, alpha=1) + ax1.set_xlabel("x1") + ax1.set_ylabel("x2") + + def _dir_path(self, save_dir: str) -> str: + """ Path for save figures. + + Args: + save_dir (str): directory where saves in + + Returns: + str: directory where saves in + """ + + if save_dir is None: + try: + img_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'img') + except: + current_dir = globals()['_dh'][0] + img_dir = os.path.join(os.path.dirname(current_dir), 'img') + + if not os.path.isdir(img_dir): + os.mkdir(img_dir) + directory = os.path.abspath(os.path.join(img_dir, + str(datetime.datetime.now().timestamp()) + '.png')) + else: + if not os.path.isdir(save_dir): + os.mkdir(save_dir) + directory = os.path.join(save_dir, + str(datetime.datetime.now().timestamp()) + '.png') + return directory + + def solution_print( + self): + """ printing or saving figures. + """ + print_flag = self.model.t % self.print_every == 0 + save_flag = self.model.t % self.save_every == 0 + + if print_flag or save_flag: + self.net = self.model.net + self.grid = self.model.solution_cls.grid + if self.model.mode == 'mat': + self._print_mat() + else: + self._print_nn() + if save_flag: + directory = self._dir_path(self.img_dir) + plt.savefig(directory) + if print_flag: + plt.show() + plt.close() + + def on_epoch_end(self, logs=None): + self.solution_print() diff --git a/epde/solver/config.py b/epde/solver/config.py index 5591d71..5534720 100644 --- a/epde/solver/config.py +++ b/epde/solver/config.py @@ -1,3 +1,4 @@ +from email.policy import default from typing import Union, Optional import json diff --git a/epde/solver/data.py b/epde/solver/data.py new file mode 100644 index 0000000..5094e93 --- /dev/null +++ b/epde/solver/data.py @@ -0,0 +1,294 @@ +"""module for working with inerface for initialize grid, conditions and equation""" + +from typing import List, Union +import torch +import numpy as np +import sys +import os + +from epde.solver.device import check_device +from epde.solver.input_preprocessing import EquationMixin + + +def tensor_dtype(dtype: str): + """convert tensor to dtype format + + Args: + dtype (str): dtype + + Returns: + dtype: torch.dtype + """ + if dtype == 'float32': + dtype = torch.float32 + elif dtype == 'float64': + dtype = torch.float64 + elif dtype == 'float16': + dtype = torch.float16 + + return dtype + + +class Domain(): + """class for grid building + """ + def __init__(self, type='uniform'): + self.type = type + self.variable_dict = {} + + @property + def dim(self): + return len(self.variable_dict) + + def variable( + self, + variable_name: str, + variable_set: Union[List, torch.Tensor], + n_points: Union[None, int], + dtype: str = 'float32') -> None: + """ determine varibles for grid building. + + Args: + varible_name (str): varible name. + variable_set (Union[List, torch.Tensor]): [start, stop] list for spatial variable or torch.Tensor with points for variable. + n_points (int): number of points in discretization for variable. + dtype (str, optional): dtype of result vector. Defaults to 'float32'. + + """ + dtype = tensor_dtype(dtype) + + if isinstance(variable_set, torch.Tensor): + variable_tensor = check_device(variable_set) + variable_tensor = variable_set.to(dtype) + self.variable_dict[variable_name] = variable_tensor + else: + if self.type == 'uniform': + n_points = n_points + 1 + start, end = variable_set + variable_tensor = torch.linspace(start, end, n_points, dtype=dtype) + self.variable_dict[variable_name] = variable_tensor + + def build(self, mode: str) -> torch.Tensor: + """ building the grid for algorithm + + Args: + mode (str): mode for equation solution, *mat, autograd, NN* + + Returns: + torch.Tensor: resulting grid. + """ + var_lst = list(self.variable_dict.values()) + var_lst = [i.cpu() for i in var_lst] + if mode in ('autograd', 'NN'): + if len(self.variable_dict) == 1: + grid = check_device(var_lst[0].reshape(-1, 1)) # TODO: verify the correctness of mat method grids generation + else: + grid = check_device(torch.cartesian_prod(*var_lst)) + else: + grid = np.meshgrid(*var_lst, indexing='ij') + grid = check_device(torch.tensor(np.array(grid))) + return grid + + +class Conditions(): + """class for adding the conditions: initial, boundary, and data. + """ + def __init__(self): + self.conditions_lst = [] + + def dirichlet( + self, + bnd: Union[torch.Tensor, dict], + value: Union[callable, torch.Tensor, float], + var: int = 0): + """ determine dirichlet boundary condition. + + Args: + bnd (Union[torch.Tensor, dict]): boundary points can be torch.Tensor + or dict with keys as coordinates names and values as coordinates values. + value (Union[callable, torch.Tensor, float]): values at the boundary (bnd) + if callable: value = function(bnd) + var (int, optional): variable for system case, for single equation is 0. Defaults to 0. + """ + + self.conditions_lst.append({'bnd': bnd, + 'bop': None, + 'bval': value, + 'var': var, + 'type': 'dirichlet'}) + + def operator(self, + bnd: Union[torch.Tensor, dict], + operator: dict, + value: Union[callable, torch.Tensor, float]): + """ determine operator boundary condition + + Args: + bnd (Union[torch.Tensor, dict]): boundary points can be torch.Tensor + or dict with keys as coordinates names and values as coordinates values + operator (dict): dictionary with opertor terms: {'operator name':{coeff, term, pow, var}} + value (Union[callable, torch.Tensor, float]): value on the boundary (bnd). + if callable: value = function(bnd) + """ + try: + var = operator[operator.keys()[0]]['var'] + except: + var = 0 + operator = EquationMixin.equation_unify(operator) + self.conditions_lst.append({'bnd': bnd, + 'bop': operator, + 'bval': value, + 'var': var, + 'type': 'operator'}) + + def periodic(self, + bnd: Union[List[torch.Tensor], List[dict]], + operator: dict = None, + var: int = 0): + """Periodic can be: periodic dirichlet (example u(x,t)=u(-x,t)) + if form with bnd and var for system case. + or periodic operator (example du(x,t)/dx=du(-x,t)/dx) + in form with bnd and operator. + Parameter 'bnd' is list: [b_coord1:torch.Tensor, b_coord2:torch.Tensor,..] or + bnd = [{'x': 1, 't': [0,1]},{'x': -1, 't':[0,1]}] + + Args: + bnd (Union[List[torch.Tensor], List[dict]]): list with dicionaries or torch.Tensors + operator (dict, optional): operator dict. Defaults to None. + var (int, optional): variable for system case and periodic dirichlet. Defaults to 0. + """ + value = torch.tensor([0.]) + if operator is None: + self.conditions_lst.append({'bnd': bnd, + 'bop': operator, + 'bval': value, + 'var': var, + 'type': 'periodic'}) + else: + try: + var = operator[operator.keys()[0]]['var'] + except: + var = 0 + operator = EquationMixin.equation_unify(operator) + self.conditions_lst.append({'bnd': bnd, + 'bop': operator, + 'bval': value, + 'var': var, + 'type': 'periodic'}) + + def data( + self, + bnd: Union[torch.Tensor, dict], + operator: Union[dict, None], + value: torch.Tensor, + var: int = 0): + """ conditions for available solution data + + Args: + bnd (Union[torch.Tensor, dict]): boundary points can be torch.Tensor + or dict with keys as coordinates names and values as coordinates values + operator (Union[dict, None]): dictionary with opertor terms: {'operator name':{coeff, term, pow, var}} + value (Union[torch.Tensor, float]): values at the boundary (bnd) + var (int, optional): variable for system case and periodic dirichlet. Defaults to 0. + """ + if operator is not None: + operator = EquationMixin.equation_unify(operator) + self.conditions_lst.append({'bnd': bnd, + 'bop': operator, + 'bval': value, + 'var': var, + 'type': 'data'}) + + def _bnd_grid(self, + bnd: Union[torch.Tensor, dict], + variable_dict: dict, + dtype) -> torch.Tensor: + """ build subgrid for every condition. + + Args: + bnd (Union[torch.Tensor, dict]):oundary points can be torch.Tensor + or dict with keys as coordinates names and values as coordinates values + variable_dict (dict): dictionary with torch.Tensors for each domain variable + dtype (dtype): dtype + + Returns: + torch.Tensor: subgrid for boundary cond-s. + """ + + dtype = variable_dict[list(variable_dict.keys())[0]].dtype + + if isinstance(bnd, torch.Tensor): + bnd_grid = bnd.to(dtype) + else: + var_lst = [] + for var in variable_dict.keys(): + if isinstance(bnd[var], torch.Tensor): + var_lst.append(check_device(bnd[var]).to(dtype)) + elif isinstance(bnd[var], (float, int)): + var_lst.append(check_device(torch.tensor([bnd[var]])).to(dtype)) + elif isinstance(bnd[var], list): + lower_bnd = bnd[var][0] + upper_bnd = bnd[var][1] + grid_var = variable_dict[var] + bnd_var = grid_var[(grid_var >= lower_bnd) & (grid_var <= upper_bnd)] + var_lst.append(check_device(bnd_var).to(dtype)) + bnd_grid = torch.cartesian_prod(*var_lst).to(dtype) + if len(bnd_grid.shape) == 1: + bnd_grid = bnd_grid.reshape(-1, 1) + return bnd_grid + + def build(self, + variable_dict: dict) -> List[dict]: + """ preprocessing of initial boundaries data. + + Args: + variable_dict (dict): dictionary with torch.Tensors for each domain variable + + Returns: + List[dict]: list with dicts (where is all info obaut bconds) + """ + if self.conditions_lst == []: + return None + + try: + dtype = variable_dict[list(variable_dict.keys())[0]].dtype + except: + dtype = variable_dict[list(variable_dict.keys())[0]][0].dtype # if periodic + + for cond in self.conditions_lst: + if cond['type'] == 'periodic': + cond_lst = [] + for bnd in cond['bnd']: + cond_lst.append(self._bnd_grid(bnd, variable_dict, dtype)) + cond['bnd'] = cond_lst + else: + cond['bnd'] = self._bnd_grid(cond['bnd'], variable_dict, dtype) + + if isinstance(cond['bval'], torch.Tensor): + cond['bval'] = check_device(cond['bval']).to(dtype) + elif isinstance(cond['bval'], (float, int)): + cond['bval'] = check_device( + torch.ones_like(cond['bnd'][:,0])*cond['bval']).to(dtype) + elif callable(cond['bval']): + cond['bval'] = check_device(cond['bval'](cond['bnd'])).to(dtype) + + return self.conditions_lst + + +class Equation(): + """class for adding eqution. + """ + def __init__(self): + self.equation_lst = [] + + @property + def num(self): + return len(self.equation_lst) + + def add(self, eq: dict): + """ add equation + + Args: + eq (dict): equation in operator form. + """ + self.equation_lst.append(eq) diff --git a/epde/solver/derivative.py b/epde/solver/derivative.py index 07bcbe5..a22126f 100644 --- a/epde/solver/derivative.py +++ b/epde/solver/derivative.py @@ -1,11 +1,17 @@ -import torch -from typing import Any, Union, List +"""Module of derivative calculations. +""" + +from typing import Any, Union, List, Tuple, Callable import numpy as np from scipy import linalg +import torch class DerivativeInt(): + """Interface class + """ def take_derivative(self, value): + """Method that should be built in every child class""" raise NotImplementedError @@ -22,29 +28,55 @@ def __init__(self, model: Any): self.model = model def take_derivative(self, term: Union[list, int, torch.Tensor], *args) -> torch.Tensor: - """ - Auxiliary function serves for single differential operator resulting field + """ Auxiliary function serves for single differential operator resulting field derivation. + Args: - term: differential operator in conventional form. + term (Union[list, int, torch.Tensor]): differential operator in conventional form. Returns: - resulting field, computed on a grid. + torch.Tensor: resulting field, computed on a grid. """ dif_dir = list(term.keys())[1] - if type(term['coeff']) is tuple: + if isinstance(term['coeff'], tuple): coeff = term['coeff'][0](term['coeff'][1]).reshape(-1, 1) else: coeff = term['coeff'] der_term = 1. for j, scheme in enumerate(term[dif_dir][0]): - grid_sum = 0. - for k, grid in enumerate(scheme): - grid_sum += self.model(grid)[:, term['var'][j]].reshape(-1, 1)\ - * term[dif_dir][1][j][k] - print(f'grid_sum in take_derivatives is {grid_sum.shape} with the grid of {grid.shape}') - der_term = der_term * grid_sum ** term['pow'][j] + if isinstance(term['var'][j], (list, tuple)): + raise NotImplementedError('Support for multivariate function tokens was introduced only for autograd.') + if not isinstance(term['pow'][j], (Callable, torch.nn.Sequential)): + raise ValueError('Multivariate function can not be passed as a simple power func.') + der_args = [] + for var_idx, cur_var in enumerate(term['var'][j]): + grid_sum = 0. + for k, grid in enumerate(scheme): + grid_sum += self.model(grid)[:, cur_var].reshape(-1, 1)\ + * term[dif_dir][1][j][k] + der_args.append(grid_sum) + + # if derivative[var_idx] == [None]: + # der_args.append(self.model(grid_points)[:, cur_var].reshape(-1, 1)) + # else: + # der_args.append(self._nn_autograd(self.model, grid_points, cur_var, axis=derivative[var_idx])) + if isinstance(term['pow'][j], torch.nn.Sequential): + der_args = torch.cat(der_args, dim=1) + factor_val = term['pow'][j](der_args) + else: + factor_val = term['pow'][j](*der_args) + der_term = der_term * factor_val + else: + grid_sum = 0. + for k, grid in enumerate(scheme): + grid_sum += self.model(grid)[:, term['var'][j]].reshape(-1, 1)\ + * term[dif_dir][1][j][k] + + if isinstance(term['pow'][j],(int,float)): + der_term = der_term * grid_sum ** term['pow'][j] + elif isinstance(term['pow'][j],Callable): + der_term = der_term * term['pow'][j](grid_sum) der_term = coeff * der_term return der_term @@ -55,25 +87,34 @@ class Derivative_autograd(DerivativeInt): Taking numerical derivative for 'autograd' method. """ - def __init__(self, model): + def __init__(self, model: torch.nn.Module): + """ + Args: + model (torch.nn.Module): model of *autograd* mode. + """ self.model = model @staticmethod - def nn_autograd(model, points, var, axis=[0]): - """ - Computes derivative on the grid using autograd method. + def _nn_autograd(model: torch.nn.Module, + points: torch.Tensor, + var: int, + axis: List[int] = [0]): + """ Computes derivative on the grid using autograd method. + Args: - model: neural network. - points: points, where numerical derivative is calculated. - axis: term of differentiation, example [0,0]->d2/dx2 - if grid_points(x,y) + model (torch.nn.Module): torch neural network. + points (torch.Tensor): points, where numerical derivative is calculated. + var (int): number of dependent variables (for single equation is *0*) + axis (list, optional): term of differentiation, example [0,0]->d2/dx2 + if grid_points(x,y). Defaults to [0]. + Returns: - the result of desired function differentiation + gradient_full (torch.Tensor): the result of desired function differentiation in corresponding axis. - """ points.requires_grad = True + fi = model(points)[:, var].sum(0) for ax in axis: grads, = torch.autograd.grad(fi, points, create_graph=True) @@ -82,16 +123,16 @@ def nn_autograd(model, points, var, axis=[0]): return gradient_full def take_derivative(self, term: dict, grid_points: torch.Tensor) -> torch.Tensor: - """ - Auxiliary function serves for single differential operator resulting field + """ Auxiliary function serves for single differential operator resulting field derivation. + Args: - term: differential operator in conventional form. - grid_points: points, where numerical derivative is calculated. + term (dict): differential operator in conventional form. + grid_points (torch.Tensor): points, where numerical derivative is calculated. + Returns: - resulting field, computed on a grid. + der_term (torch.Tensor): resulting field, computed on a grid. """ - dif_dir = list(term.keys())[1] # it is may be int, function of grid or torch.Tensor if callable(term['coeff']): @@ -99,16 +140,33 @@ def take_derivative(self, term: dict, grid_points: torch.Tensor) -> torch.Tenso else: coeff = term['coeff'] - der_term = 1. + der_term = 1. # TODO: Переписать! for j, derivative in enumerate(term[dif_dir]): - if derivative == [None]: - der = self.model(grid_points)[:, term['var'][j]].reshape(-1, 1) + if (isinstance(term['pow'][j], torch.nn.Sequential) or + (isinstance(term['pow'][j], Callable) and isinstance(term['var'][j], (list, tuple)))): #isinstance(term['var'][j], (list, tuple)): + der_args = [] + iter_arg = term['var'][j] if isinstance(term['var'][j], (list, tuple)) else [term['var'][j],] + for var_idx, cur_var in enumerate(iter_arg): + if derivative[var_idx] == [None] or derivative[var_idx] is None: + der_args.append(self.model(grid_points)[:, cur_var].reshape(-1, 1)) + else: + der_args.append(self._nn_autograd(self.model, grid_points, cur_var, axis=derivative[var_idx])) + if isinstance(term['pow'][j], torch.nn.Sequential): + der_args = torch.cat(der_args, dim=1) + factor_val = term['pow'][j](der_args) + else: + factor_val = term['pow'][j](*der_args) + der_term = der_term * factor_val else: - der = self.nn_autograd( - self.model, grid_points, term['var'][j], axis=derivative) - der_term = der_term * der ** term['pow'][j] + if derivative == [None] or derivative is None: + der = self.model(grid_points)[:, term['var'][j]].reshape(-1, 1) + else: + der = self._nn_autograd(self.model, grid_points, term['var'][j], axis=derivative) + if isinstance(term['pow'][j],(int,float)): + der_term = der_term * der ** term['pow'][j] + elif isinstance(term['pow'][j], Callable): + der_term = der_term * term['pow'][j](der) der_term = coeff * der_term - return der_term @@ -116,32 +174,52 @@ class Derivative_mat(DerivativeInt): """ Taking numerical derivative for 'mat' method. """ - def __init__(self, model, derivative_points): + def __init__(self, model: torch.Tensor, derivative_points: int): """ Args: - model: random matrix. + model (torch.Tensor): model of *mat* mode. + derivative_points (int): points number for derivative calculation. """ self.model = model - self.backward, self.farward = Derivative_mat.labels(derivative_points) + self.backward, self.farward = Derivative_mat._labels(derivative_points) - self.alpha_backward = Derivative_mat.linear_system(self.backward) - self.alpha_farward = Derivative_mat.linear_system(self.farward) + self.alpha_backward = Derivative_mat._linear_system(self.backward) + self.alpha_farward = Derivative_mat._linear_system(self.farward) - num_points = int(len(self.backward)-1) + num_points = int(len(self.backward) - 1) - self.back = [int(0-i) for i in range(1,num_points+1)] + self.back = [int(0 - i) for i in range(1, num_points + 1)] self.farw = [int(i) for i in range(num_points)] - @staticmethod - def labels(num_points): - labels_backward = [i for i in range(-num_points+1,1)] - labels_farward = [i for i in range(num_points)] + def _labels(derivative_points: int) -> Tuple[List, List]: + """ Determine which points are used in derivative calc-n. + If derivative_points = 2, it return ([-1, 0], [0, 1]) + + Args: + derivative_points (int): points number for derivative calculation. + + Returns: + labels_backward (list): points labels for backward scheme. + labels_forward (list): points labels for forward scheme. + """ + labels_backward = list(i for i in range(-derivative_points + 1, 1)) + labels_farward = list(i for i in range(derivative_points)) return labels_backward, labels_farward @staticmethod - def linear_system(labels): + def _linear_system(labels: list) -> np.ndarray: + """ To caclulate coeeficints in numerical scheme, + we have to solve the linear system of algebraic equations. + A*alpha=b + + Args: + labels (list): points labels for backward/foraward scheme. + + Returns: + alpha (np.ndarray): coefficints for numerical scheme. + """ points_num = len(labels) # num_points=number of equations labels = np.array(labels) A = [] @@ -156,28 +234,31 @@ def linear_system(labels): return alpha - def derivative_1d(self, u_tensor: torch.Tensor, h: torch.Tensor) -> torch.Tensor: - """ - Computes derivative in one dimension for matrix method. + def _derivative_1d(self, u_tensor: torch.Tensor, h: torch.Tensor) -> torch.Tensor: + """ Computes derivative in one dimension for matrix method. + Args: - model: random matrix. - grid: array of a n-D points. + u_tensor (torch.Tensor): dependenet varible of equation, + some part of model. + h (torch.Tensor): increment of numerical scheme. + Returns: - computed derivative along one dimension. + du (torch.Tensor): computed derivative along one dimension. """ + shape = u_tensor.shape u_tensor = u_tensor.reshape(-1) du_back = 0 du_farw = 0 - i=0 + i = 0 for shift_b, shift_f in zip(self.backward, self.farward): - du_back += torch.roll(u_tensor, -shift_b)*self.alpha_backward[i] - du_farw += torch.roll(u_tensor, -shift_f)*self.alpha_farward[i] + du_back += torch.roll(u_tensor, -shift_b) * self.alpha_backward[i] + du_farw += torch.roll(u_tensor, -shift_f) * self.alpha_farward[i] i += 1 - du = (du_back+du_farw)/(2*h) - du[self.back] = du_back[self.back]/h - du[self.farw] = du_farw[self.farw]/h + du = (du_back + du_farw) / (2 * h) + du[self.back] = du_back[self.back] / h + du[self.farw] = du_farw[self.farw] / h du = du.reshape(shape) @@ -185,31 +266,44 @@ def derivative_1d(self, u_tensor: torch.Tensor, h: torch.Tensor) -> torch.Tensor return du - def step_h(self, h_tensor: torch.Tensor) -> List[torch.Tensor]: + def _step_h(self, h_tensor: torch.Tensor) -> List[torch.Tensor]: + """ Calculate increment along each axis of the grid. + + Args: + h_tensor (torch.Tensor): grid of *mat* mode. + + Returns: + h (List[torch.Tensor]): lsit with increment + along each axis of the grid. + """ h = [] - NN_grid = torch.vstack([h_tensor[i].reshape(-1) for i in \ + nn_grid = torch.vstack([h_tensor[i].reshape(-1) for i in \ range(h_tensor.shape[0])]).T.float() - - for i in range(NN_grid.shape[-1]): - axis_points = torch.unique(NN_grid[:,i]) + + for i in range(nn_grid.shape[-1]): + axis_points = torch.unique(nn_grid[:,i]) h.append(abs(axis_points[1]-axis_points[0])) return h - def derivative(self, u_tensor: torch.Tensor, h, axis: int) -> torch.Tensor: - """ - Computing derivative for 'matrix' method. + def _derivative(self, + u_tensor: torch.Tensor, + h: torch.Tensor, + axis: int) -> torch.Tensor: + """ Computing derivative for 'mat' method. + Args: - u_tensor: smth. - h_tensor: smth. - axis: axis along which the derivative is calculated. - scheme_order: accuracy inner order for finite difference. Default = 1 - boundary_order: accuracy boundary order for finite difference. Default = 2 + u_tensor (torch.Tensor): dependenet varible of equation, + some part of model. + h (torch.Tensor): increment of numerical scheme. + axis (int): axis along which the derivative is calculated. + Returns: - computed derivative. + du (torch.Tensor): computed derivative. """ + if len(u_tensor.shape)==1 or u_tensor.shape[0]==1: - du = self.derivative_1d(u_tensor, h) + du = self._derivative_1d(u_tensor, h) return du pos = len(u_tensor.shape) - 1 @@ -218,16 +312,16 @@ def derivative(self, u_tensor: torch.Tensor, h, axis: int) -> torch.Tensor: du_back = 0 du_farw = 0 - i=0 + i = 0 for shift_b, shift_f in zip(self.backward, self.farward): - du_back += torch.roll(u_tensor, -shift_b)*self.alpha_backward[i] - du_farw += torch.roll(u_tensor, -shift_f)*self.alpha_farward[i] + du_back += torch.roll(u_tensor, -shift_b) * self.alpha_backward[i] + du_farw += torch.roll(u_tensor, -shift_f) * self.alpha_farward[i] i += 1 - du = (du_back+du_farw)/(2*h) - + du = (du_back + du_farw) / (2 * h) + if pos == 1: - du[:,self.back] = du_back[:,self.back]/h - du[:, self.farw] = du_farw[:, self.farw]/h + du[:,self.back] = du_back[:,self.back] / h + du[:, self.farw] = du_farw[:, self.farw] / h elif pos == 2: du[:,:, self.back] = du_back[:,:, self.back] / h du[:,:, self.farw] = du_farw[:,:, self.farw] / h @@ -236,15 +330,16 @@ def derivative(self, u_tensor: torch.Tensor, h, axis: int) -> torch.Tensor: return du - def take_derivative(self, term: Any, grid_points: torch.Tensor) -> torch.Tensor: - """ - Auxiliary function serves for single differential operator resulting field + def take_derivative(self, term: torch.Tensor, grid_points: torch.Tensor) -> torch.Tensor: + """ Auxiliary function serves for single differential operator resulting field derivation. + Args: - term: differential operator in conventional form. - grid_points: grid points + term (torch.Tensor): differential operator in conventional form. + grid_points (torch.Tensor): grid points. + Returns: - resulting field, computed on a grid. + der_term (torch.Tensor): resulting field, computed on a grid. """ dif_dir = list(term.keys())[1] @@ -255,8 +350,8 @@ def take_derivative(self, term: Any, grid_points: torch.Tensor) -> torch.Tensor: for axis in scheme: if axis is None: continue - h = self.step_h(grid_points)[axis] - prod = self.derivative(prod, h, axis) + h = self._step_h(grid_points)[axis] + prod = self._derivative(prod, h, axis) der_term = der_term * prod ** term['pow'][j] if callable(term['coeff']) is True: der_term = term['coeff'](grid_points) * der_term @@ -267,19 +362,27 @@ def take_derivative(self, term: Any, grid_points: torch.Tensor) -> torch.Tensor: class Derivative(): """ - Interface for taking numerical derivative due to chosen calculation method. + Interface for taking numerical derivative due to chosen calculation mode. """ - def __init__(self, model, derivative_points): - """ - Args: - model: neural network or matrix depending on the selected mode. + def __init__(self, + model: Union[torch.nn.Module, torch.Tensor], + derivative_points: int): + """_summary_ + Args: + model (Union[torch.nn.Module, torch.Tensor]): neural network or + matrix depending on the selected mode. + derivative_points (int): points number for derivative calculation. + If derivative_points=2, numerical scheme will be ([-1,0],[0,1]), + parameter determine number of poins in each forward and backward scheme. """ + self.model = model self.derivative_points = derivative_points - def set_strategy(self, strategy: str) -> Union[Derivative_NN, Derivative_autograd, Derivative_mat]: + def set_strategy(self, + strategy: str) -> Union[Derivative_NN, Derivative_autograd, Derivative_mat]: """ Setting the calculation method. Args: diff --git a/epde/solver/device.py b/epde/solver/device.py index 280f6c5..e9f3db4 100644 --- a/epde/solver/device.py +++ b/epde/solver/device.py @@ -1,17 +1,41 @@ +"""Module for working with device mode""" + +from typing import Any import torch -def solver_device(device): +verbose = False + +def solver_device(device: str): + """ Corresponding to chosen device, all futher + created tensors will be with the same device + + Args: + device (str): device mode, **cuda, gpu, cpu*. + + """ if device in ['cuda','gpu'] and torch.cuda.is_available(): - print('CUDA is available and used.') + if verbose: + print('CUDA is available and used.') return torch.set_default_device('cuda') elif device in ['cuda','gpu'] and not torch.cuda.is_available(): - print('CUDA is not available, cpu is used!') + if verbose: + print('CUDA is not available, cpu is used!') return torch.set_default_device('cpu') else: - print('Default cpu processor is used.') + if verbose: + print('Default cpu processor is used.') return torch.set_default_device('cpu') -def check_device(data): +def check_device(data: Any): + """ checking the device of the data. + If the data.device is not same with torch.set_default_device, + change one. + Args: + data (Any): it could be model or torch.Tensors + + Returns: + data (Any): data with correct device + """ device = torch.tensor([0.]).device.type if data.device.type != device: return data.to(device) @@ -19,4 +43,6 @@ def check_device(data): return data def device_type(): + """ Return the default device. + """ return torch.tensor([0.]).device.type \ No newline at end of file diff --git a/epde/solver/eval.py b/epde/solver/eval.py index abb9177..09cd028 100644 --- a/epde/solver/eval.py +++ b/epde/solver/eval.py @@ -1,26 +1,29 @@ -import torch +"""Module for operatoins with operator and boundaru con-ns.""" + from typing import Tuple, Union, List +import torch from epde.solver.points_type import Points_type from epde.solver.derivative import Derivative from epde.solver.device import device_type, check_device from epde.solver.utils import PadTransform +from torch.utils.data import DataLoader -def integration(func: torch.tensor, grid, pow: Union[int, float] = 2) \ +def integration(func: torch.Tensor, + grid: torch.Tensor, + power: int = 2) \ -> Union[Tuple[float, float], Tuple[list, torch.Tensor]]: - """ - Function realize 1-space integrands, + """ Function realize 1-space integrands, where func=(L(u)-f)*weak_form subintegrands function and definite integral parameter is grid. Args: - func: operator multiplied on test function - grid: array of a n-D points. - pow: string (sqr ar abs) power of func points + func (torch.Tensor): operator multiplied on test function + grid (torch.Tensor): array of a n-D points. + power (int, optional): power of func points. Defults to 2. Returns: - tuple(result, grid) 'result' is integration result through one grid axis 'grid' is initial grid without last column or zero (if grid.shape[N,1]) """ @@ -31,52 +34,52 @@ def integration(func: torch.tensor, grid, pow: Union[int, float] = 2) \ marker = grid[0][column] index = [0] result = [] - U = 0. + u = 0. for i in range(1, len(grid)): if grid[i][column] == marker or column == -1: - U += (grid[i][-1] - grid[i - 1][-1]).item() * \ - (func[i] ** pow + func[i - 1] ** pow) / 2 + u += (grid[i][-1] - grid[i - 1][-1]).item() * \ + (func[i] ** power + func[i - 1] ** power) / 2 else: - result.append(U) + result.append(u) marker = grid[i][column] index.append(i) - U = 0. + u = 0. if column == -1: - return U, 0. + return u, 0. else: - result.append(U) + result.append(u) grid = grid[index, :-1] return result, grid -def dict_to_matrix(bval: dict, true_bval: dict): - '''Function for bounaries values matrix creation from dictionary +def dict_to_matrix(bval: dict, true_bval: dict)\ + -> Tuple[torch.Tensor, torch.Tensor, List, List]: + """ Function for bounaries values matrix creation from dictionary. Args: - bval: dictionary with predicted boundaries values, - where keys are boundaries types - true_bval: dictionary with true boundaries values, - where keys are boundaries types + bval (dict): dictionary with predicted boundaries values, + where keys are boundaries types. + true_bval (dict): dictionary with true boundaries values, + where keys are boundaries types. Returns: - tuple(matrix_bval, matrix_true_bval, keys, len_list) - 'matrix_bval' matrix, where each column is predicted - boundary values of one boundary type - 'matrix_true_bval' matrix, where each column is true - boundary values of one boundary type - 'keys' boundary types list corresponding matrix_bval columns - 'len_list' list of length of each boundary type column - ''' + matrix_bval (torch.Tensor): matrix, where each column is predicted + boundary values of one boundary type. + matrix_true_bval (torch.Tensor):matrix, where each column is true + boundary values of one boundary type. + keys (list): boundary types list corresponding matrix_bval columns. + len_list (list): list of length of each boundary type column. + """ keys = list(bval.keys()) max_len = max([len(i) for i in bval.values()]) pad = PadTransform(max_len, 0) - matrix_bval = pad(bval[keys[0]]).float().reshape(-1,1) - matrix_true_bval = pad(true_bval[keys[0]]).float().reshape(-1,1) + matrix_bval = pad(bval[keys[0]]).reshape(-1,1) + matrix_true_bval = pad(true_bval[keys[0]]).reshape(-1,1) len_list = [len(bval[keys[0]])] for key in keys[1:]: - bval_i = pad(bval[key]).float().reshape(-1,1) - true_bval_i = pad(true_bval[key]).float().reshape(-1,1) + bval_i = pad(bval[key]).reshape(-1,1) + true_bval_i = pad(true_bval[key]).reshape(-1,1) matrix_bval = torch.hstack((matrix_bval, bval_i)) matrix_true_bval = torch.hstack((matrix_true_bval, true_bval_i)) len_list.append(len(bval[key])) @@ -88,9 +91,25 @@ class Operator(): """ Class for differential equation calculation. """ - def __init__(self, grid: torch.Tensor, prepared_operator: Union[list,dict], - model: Union[torch.nn.Sequential, torch.Tensor], mode: str, - weak_form: List[callable], derivative_points: int): + def __init__(self, + grid: torch.Tensor, + prepared_operator: Union[list,dict], + model: Union[torch.nn.Sequential, torch.Tensor], + mode: str, + weak_form: list[callable], + derivative_points: int, + batch_size: int = None): + """ + Args: + grid (torch.Tensor): grid (domain discretization). + prepared_operator (Union[list,dict]): prepared (after Equation class) operator. + model (Union[torch.nn.Sequential, torch.Tensor]): *mat or NN or autograd* model. + mode (str): *mat or NN or autograd* + weak_form (List[callable]): list with basis functions (if the form is *weak*). + derivative_points (int): points number for derivative calculation. + For details to Derivative_mat class. + batch_size (int): size of batch. + """ self.grid = check_device(grid) self.prepared_operator = prepared_operator self.model = model.to(device_type()) @@ -100,74 +119,101 @@ def __init__(self, grid: torch.Tensor, prepared_operator: Union[list,dict], if self.mode == 'NN': self.grid_dict = Points_type(self.grid).grid_sort() self.sorted_grid = torch.cat(list(self.grid_dict.values())) - elif self.mode == 'autograd' or self.mode == 'mat': + elif self.mode in ('autograd', 'mat'): self.sorted_grid = self.grid + self.batch_size = batch_size + if self.batch_size is not None: + self.grid_loader = DataLoader(self.sorted_grid, batch_size=self.batch_size, shuffle=True, + generator=torch.Generator(device=device_type())) + self.n_batches = len(self.grid_loader) + del self.sorted_grid + torch.cuda.empty_cache() + self.init_mini_batches() + self.current_batch_i = 0 + self.derivative = Derivative(self.model, + self.derivative_points).set_strategy(self.mode).take_derivative + + def init_mini_batches(self): + """ Initialization of batch iterator. - def apply_operator(self, operator: list, grid_points: Union[torch.Tensor, None]) -> torch.Tensor: """ - Deciphers equation in a single grid subset to a field. + self.grid_iter = iter(self.grid_loader) + self.grid_batch = next(self.grid_iter) + + def apply_operator(self, + operator: list, + grid_points: Union[torch.Tensor, None]) -> torch.Tensor: + """ Deciphers equation in a single grid subset to a field. Args: - operator: single (len(subset)==1) operator in input form. See + operator (list): prepared (after Equation class) operator. See input_preprocessing.operator_prepare() - grid_points: Points, where numerical derivative is calculated. **Uses only in 'autograd' and 'mat' modes.** + grid_points (Union[torch.Tensor, None]): Points, where numerical + derivative is calculated. **Uses only in 'autograd' and 'mat' modes.** + Returns: - Decoded operator on a single grid subset + total (torch.Tensor): Decoded operator on a single grid subset. """ - derivative = Derivative(self.model,self.derivative_points).set_strategy(self.mode).take_derivative + for term in operator: term = operator[term] - dif = derivative(term, grid_points) + dif = self.derivative(term, grid_points) try: total += dif except NameError: total = dif return total - def pde_compute(self) -> torch.Tensor: - """ - Computes PDE residual. + def _pde_compute(self) -> torch.Tensor: + """ Computes PDE residual. Returns: - PDE residual. + torch.Tensor: P/O DE residual. """ + if self.batch_size is not None: + sorted_grid = self.grid_batch + try: + self.grid_batch = next(self.grid_iter) + except: # if no batches left then reinit + self.init_mini_batches() + self.current_batch_i = -1 + else: + sorted_grid = self.sorted_grid num_of_eq = len(self.prepared_operator) if num_of_eq == 1: op = self.apply_operator( - self.prepared_operator[0], self.sorted_grid).reshape(-1,1) + self.prepared_operator[0], sorted_grid).reshape(-1,1) else: op_list = [] for i in range(num_of_eq): op_list.append(self.apply_operator( - self.prepared_operator[i], self.sorted_grid).reshape(-1,1)) + self.prepared_operator[i], sorted_grid).reshape(-1,1)) op = torch.cat(op_list, 1) return op - def weak_pde_compute(self, weak_form) -> torch.Tensor: - """ - Computes PDE residual in weak form. + def _weak_pde_compute(self) -> torch.Tensor: + """ Computes PDE residual in weak form. - Args: - weak_form: list of basis functions Returns: - weak PDE residual. + torch.Tensor: weak PDE residual. """ + device = device_type() if self.mode == 'NN': grid_central = self.grid_dict['central'] elif self.mode == 'autograd': grid_central = self.grid - op = self.pde_compute() + op = self._pde_compute() sol_list = [] for i in range(op.shape[-1]): sol = op[:, i] - for func in weak_form: + for func in self.weak_form: sol = sol * func(grid_central).to(device).reshape(-1) grid_central1 = torch.clone(grid_central) - for k in range(grid_central.shape[-1]): + for _ in range(grid_central.shape[-1]): sol, grid_central1 = integration(sol, grid_central1) sol_list.append(sol.reshape(-1, 1)) if len(sol_list) == 1: @@ -176,151 +222,192 @@ def weak_pde_compute(self, weak_form) -> torch.Tensor: return torch.cat(sol_list).reshape(1,-1) def operator_compute(self): - if self.weak_form == None or self.weak_form == []: - return self.pde_compute() + """ Corresponding to form (weak or strong) calculate residual of operator. + + Returns: + torch.Tensor: operator residual. + """ + if self.weak_form is None or self.weak_form == []: + return self._pde_compute() else: - return self.weak_pde_compute(self.weak_form) + return self._weak_pde_compute() class Bounds(): """ Class for boundary and initial conditions calculation. """ - def __init__(self, grid: torch.Tensor, prepared_bconds: Union[list,dict], - model: Union[torch.nn.Sequential, torch.Tensor], mode: str, - weak_form: List[callable], derivative_points: int): + def __init__(self, + grid: torch.Tensor, + prepared_bconds: Union[list, dict], + model: Union[torch.nn.Sequential, torch.Tensor], + mode: str, + weak_form: List[callable], + derivative_points: int): + """_summary_ + + Args: + grid (torch.Tensor): grid (domain discretization). + prepared_bconds (Union[list,dict]): prepared (after Equation class) baund-y con-s. + model (Union[torch.nn.Sequential, torch.Tensor]): *mat or NN or autograd* model. + mode (str): *mat or NN or autograd* + weak_form (List[callable]): list with basis functions (if the form is *weak*). + derivative_points (int): points number for derivative calculation. + For details to Derivative_mat class. + """ self.grid = check_device(grid) self.prepared_bconds = prepared_bconds self.model = model.to(device_type()) self.mode = mode - self.apply_operator = Operator(self.grid, self.prepared_bconds, + self.operator = Operator(self.grid, self.prepared_bconds, self.model, self.mode, weak_form, - derivative_points).apply_operator + derivative_points) + + def _apply_bconds_set(self, operator_set: list) -> torch.Tensor: + """ Method only for *NN* mode. Calculate boundary conditions with derivatives + to use them in _apply_neumann method. - def apply_bconds_set(self, operator_set: list) -> torch.Tensor: - """ - Deciphers equation in a whole grid to a field. Args: - operator_set: Multiple (len(subset)>=1) operators in input form. See - input_preprocessing.operator_prepare(). + operator_set (list): list with prepared (after Equation_NN class) boundary operators. + For details to Equation_NN.operator_prepare method. + Returns: - Decoded boundary operator on the whole grid. + torch.Tensor: Decoded boundary operator on the whole grid. """ + field_part = [] for operator in operator_set: - field_part.append(self.apply_operator(operator, None)) + field_part.append(self.operator.apply_operator(operator, None)) field_part = torch.cat(field_part) return field_part - def apply_dirichlet(self, bnd: torch.Tensor, var: int) -> torch.Tensor: - """ - Applies Dirichlet boundary conditions. + def _apply_dirichlet(self, bnd: torch.Tensor, var: int) -> torch.Tensor: + """ Applies Dirichlet boundary conditions. Args: - bnd: terms of prepared boundary conditions (see input_preprocessing.bnd_prepare) in input form. - var: indicates for which equation it is necessary to apply the boundary condition. + bnd (torch.Tensor): terms (boundary points) of prepared boundary conditions. + For more deatails to input_preprocessing (bnd_prepare maethos). + var (int): indicates for which dependent variable it is necessary to apply + the boundary condition. For single equation is 0. + Returns: - calculated boundary condition. + torch.Tensor: calculated boundary condition. """ + if self.mode == 'NN' or self.mode == 'autograd': b_op_val = self.model(bnd)[:, var].reshape(-1, 1) elif self.mode == 'mat': b_op_val = [] for position in bnd: - b_op_val.append(self.model[var][position]) + b_op_val.append(self.model[var][position]) b_op_val = torch.cat(b_op_val).reshape(-1, 1) return b_op_val - def apply_neumann(self, bnd: torch.Tensor, bop: list) -> torch.Tensor: - """ - Applies periodic boundary conditions. + def _apply_neumann(self, bnd: torch.Tensor, bop: list) -> torch.Tensor: + """ Applies boundary conditions with derivative operators. Args: - bnd: terms of prepared boundary conditions (see input_preprocessing.bnd_prepare) in input form. - bop: terms of operator on boundary. + bnd (torch.Tensor): terms (boundary points) of prepared boundary conditions. + bop (list): terms of prepared boundary derivative operator. + Returns: - calculated boundary condition. + torch.Tensor: calculated boundary condition. """ + if self.mode == 'NN': - b_op_val = self.apply_bconds_set(bop) + b_op_val = self._apply_bconds_set(bop) elif self.mode == 'autograd': - b_op_val = self.apply_operator(bop, bnd) + b_op_val = self.operator.apply_operator(bop, bnd) elif self.mode == 'mat': var = bop[list(bop.keys())[0]]['var'][0] - b_op_val = self.apply_operator(bop, self.grid) + b_op_val = self.operator.apply_operator(bop, self.grid) b_val = [] for position in bnd: b_val.append(b_op_val[var][position]) b_op_val = torch.cat(b_val).reshape(-1, 1) return b_op_val - def apply_periodic(self, bnd: torch.Tensor, bop: list, var: int) -> torch.Tensor: - """ - Applies periodic boundary conditions. + def _apply_periodic(self, bnd: torch.Tensor, bop: list, var: int) -> torch.Tensor: + """ Applies periodic boundary conditions. Args: - bnd: terms of prepared boundary conditions (see input_preprocessing.bnd_prepare) in input form. - bop: terms of operator on boundary. - var: indicates for which equation it is necessary to apply the boundary condition. + bnd (torch.Tensor): terms (boundary points) of prepared boundary conditions. + bop (list): terms of prepared boundary derivative operator. + var (int): indicates for which dependent variable it is necessary to apply + the boundary condition. For single equation is 0. Returns: - calculated boundary condition + torch.Tensor: calculated boundary condition """ if bop is None: - b_op_val = self.apply_dirichlet(bnd[0], var).reshape(-1, 1) + b_op_val = self._apply_dirichlet(bnd[0], var).reshape(-1, 1) for i in range(1, len(bnd)): - b_op_val -= self.apply_dirichlet(bnd[i], var).reshape(-1, 1) + b_op_val -= self._apply_dirichlet(bnd[i], var).reshape(-1, 1) else: if self.mode == 'NN': - b_op_val = self.apply_neumann(bnd, bop[0]).reshape(-1, 1) + b_op_val = self._apply_neumann(bnd, bop[0]).reshape(-1, 1) for i in range(1, len(bop)): - b_op_val -= self.apply_neumann(bnd, bop[i]).reshape(-1, 1) - elif self.mode == 'autograd' or self.mode == 'mat': - b_op_val = self.apply_neumann(bnd[0], bop).reshape(-1, 1) + b_op_val -= self._apply_neumann(bnd, bop[i]).reshape(-1, 1) + elif self.mode in ('autograd', 'mat'): + b_op_val = self._apply_neumann(bnd[0], bop).reshape(-1, 1) for i in range(1, len(bnd)): - b_op_val -= self.apply_neumann(bnd[i], bop).reshape(-1, 1) + b_op_val -= self._apply_neumann(bnd[i], bop).reshape(-1, 1) return b_op_val - def apply_data(self, bnd: torch.Tensor, bop: list, var: int) -> torch.Tensor: - '''method for applying data''' + def _apply_data(self, bnd: torch.Tensor, bop: list, var: int) -> torch.Tensor: + """ Method for applying known data about solution. + + Args: + bnd (torch.Tensor): terms (data points) of prepared boundary conditions. + bop (list): terms of prepared data derivative operator. + var (int): indicates for which dependent variable it is necessary to apply + the data condition. For single equation is 0. + + Returns: + torch.Tensor: calculated data condition. + """ if bop is None: - b_op_val = self.apply_dirichlet(bnd, var).reshape(-1, 1) + b_op_val = self._apply_dirichlet(bnd, var).reshape(-1, 1) else: - b_op_val = self.apply_neumann(bnd, bop).reshape(-1, 1) + b_op_val = self._apply_neumann(bnd, bop).reshape(-1, 1) return b_op_val - def b_op_val_calc(self, bcond) -> torch.Tensor: - """ - Auxiliary function. Serves only to evaluate operator on the boundary. + def b_op_val_calc(self, bcond: dict) -> torch.Tensor: + """ Auxiliary function. Serves only to choose *type* of the condition and evaluate one. Args: - bcond: terms of prepared boundary conditions (see input_preprocessing.bnd_prepare) in input form. + bcond (dict): terms of prepared boundary conditions + (see input_preprocessing module -> bnd_prepare method). + Returns: - calculated operator on the boundary. + torch.Tensor: calculated operator on the boundary. """ + if bcond['type'] == 'dirichlet': - b_op_val = self.apply_dirichlet(bcond['bnd'], bcond['var']) + b_op_val = self._apply_dirichlet(bcond['bnd'], bcond['var']) elif bcond['type'] == 'operator': - b_op_val = self.apply_neumann(bcond['bnd'], bcond['bop']) + b_op_val = self._apply_neumann(bcond['bnd'], bcond['bop']) elif bcond['type'] == 'periodic': - b_op_val = self.apply_periodic(bcond['bnd'], bcond['bop'], + b_op_val = self._apply_periodic(bcond['bnd'], bcond['bop'], bcond['var']) elif bcond['type'] == 'data': - b_op_val = self.apply_data(bcond['bnd'], bcond['bop'], + b_op_val = self._apply_data(bcond['bnd'], bcond['bop'], bcond['var']) return b_op_val - def apply_bcs(self) -> Tuple[dict, dict]: - """ - Applies boundary conditions for each term in prepared_bconds. + def apply_bcs(self) -> Tuple[torch.Tensor, torch.Tensor, list, list]: + """ Applies boundary and data conditions for each *type* in prepared_bconds. Returns: - - - model output with boundary conditions at the input. - - true boundary values. - + bval (torch.Tensor): matrix, where each column is predicted + boundary values of one boundary type. + true_bval (torch.Tensor):matrix, where each column is true + boundary values of one boundary type. + keys (list): boundary types list corresponding matrix_bval columns. + bval_length (list): list of length of each boundary type column. """ + bval_dict = {} true_bval_dict = {} diff --git a/epde/solver/finite_diffs.py b/epde/solver/finite_diffs.py index 8a16e58..9cd9603 100644 --- a/epde/solver/finite_diffs.py +++ b/epde/solver/finite_diffs.py @@ -1,26 +1,28 @@ -from copy import deepcopy, copy +"""Module for subgrid creation corresponding to numerical scheme. It's used only *NN* method.""" + +from copy import copy import numpy as np -from typing import Tuple + flatten_list = lambda t: [item for sublist in t for item in sublist] class First_order_scheme(): - """ - Class for numerical scheme construction. Central o(h^2) difference scheme + """Class for numerical scheme construction. Central o(h^2) difference scheme is used for 'central' points, forward ('f') and backward ('b') o(h) schemes are used for boundary points. 'central', and combination 'f','b' are - corresponding to points_type. Args: + corresponding to points_type. """ def __init__(self, term: list, nvars: int, axes_scheme_type: str): """ Args: - term: differentiation direction. Example: [0,0]->d2u/dx2 if x is first - direction in the grid. - nvars: task parameters. Example: if grid(x,t) -> nvars = 2. - axes_scheme_type: scheme type: 'central' or combination of 'f' and 'b' + term (list): differentiation direction. Example: [0,0]->d2u/dx2 + if x is first direction in the grid. + nvars (int): task parameters. Example: if grid(x,t) -> nvars = 2. + axes_scheme_type (str): scheme type: 'central' or combination of 'f' and 'b' """ + self.term = term self.nvars = nvars if axes_scheme_type == 'central': @@ -31,17 +33,18 @@ def __init__(self, term: list, nvars: int, axes_scheme_type: str): # the idea is simple - central difference changes # [0]->([1]-[-1])/(2h) (in terms of grid nodes position) @staticmethod - def finite_diff_shift(diff: list, axis: int, mode: str) -> list: - """ - 1st order points shift for the corresponding finite difference mode. + def _finite_diff_shift(diff: list, axis: int, mode: str) -> list: + """ 1st order points shift for the corresponding finite difference mode. Args: - diff: values of finite differences. - axis: axis. - mode: the finite difference mode (i.e., forward, backward, central). + diff (list): values of finite differences. + axis (int): axis. + mode (str): the finite difference mode (i.e., forward, backward, central). + Returns: - diff_list: list with shifted points. + list: list with shifted points. """ + diff_p = copy(diff) diff_m = copy(diff) if mode == 'central': @@ -54,23 +57,22 @@ def finite_diff_shift(diff: list, axis: int, mode: str) -> list: return [diff_p, diff_m] def scheme_build(self) -> list: - """ - Building first order (in terms of accuracy) finite-difference stencil. - + """ Building first order (in terms of accuracy) finite-difference scheme. Start from list of zeros where them numbers equal nvars. After that we - move value in that axis which corresponding toterm. [0,0]->[[1,0],[-1,0]] + move value in that axis which corresponding to term. [0,0]->[[1,0],[-1,0]] it means that term was [0] (d/dx) and mode (scheme_type) is 'central'. Returns: - numerical scheme. + list: numerical scheme. """ + order = len(self.term) finite_diff = [[0 for _ in range(self.nvars)]] for i in range(order): diff_list = [] for diff in finite_diff: # we use [0,0]->[[1,0],[-1,0]] rule for the axis - f_diff = self.finite_diff_shift( + f_diff = self._finite_diff_shift( diff, self.term[i], self.direction_list[i]) if len(diff_list) == 0: @@ -85,8 +87,8 @@ def scheme_build(self) -> list: return finite_diff def sign_order(self, h: float = 1 / 2) -> list : - """ - Determines the sign of the derivative for the corresponding transformation from Finite_diffs.scheme_build(). + """ Determines the sign of the derivative for the corresponding transformation + from Finite_diffs.scheme_build(). From transformations above, we always start from +1 (1) Every +1 changes to ->[+1,-1] when order of differential rises @@ -95,13 +97,15 @@ def sign_order(self, h: float = 1 / 2) -> list : [[1,0],[-1,0]] ([+1,-1])->[[1,1],[1,-1],[-1,1],[-1,-1]] ([+1,-1,-1,+1]) Args: - h: discretizing parameter in finite difference method (i.e., grid resolution for scheme). + h (float, optional): discretizing parameter in finite- + difference method. Defaults to 1/2. Returns: - list, with signs for corresponding points. + list: list, with signs for corresponding points. """ + sign_list = [1] - for i in range(len(self.term)): + for _ in range(len(self.term)): start_list = [] for sign in sign_list: if np.unique(self.direction_list)[0] == 'central': @@ -120,11 +124,15 @@ class Second_order_scheme(): def __init__(self, term: list, nvars: int, axes_scheme_type: str): """ Args: - term: differentiation direction. Example: [0,0]->d2u/dx2 if x is first + term (list): differentiation direction. Example: [0,0]->d2u/dx2 if x is first direction in the grid. - nvars: task parameters. Example: if grid(x,t) -> nvars = 2. - axes_scheme_type: scheme type: 'central' or combination of 'f' and 'b' + nvars (int): task parameters. Example: if grid(x,t) -> nvars = 2. + axes_scheme_type (str): scheme type: 'central' or combination of 'f' and 'b' + + Raises: + ValueError: _description_ """ + self.term = term self.nvars = nvars try: @@ -135,7 +143,17 @@ def __init__(self, term: list, nvars: int, axes_scheme_type: str): self.direction_list = [axes_scheme_type[i] for i in self.term] @staticmethod - def second_order_shift(diff, axis, mode): + def _second_order_shift(diff, axis, mode) -> list: + """ 2st order points shift for the corresponding finite difference mode. + + Args: + diff (list): values of finite differences. + axis (int): axis. + mode (str): the finite difference mode (i.e., forward, backward). + + Returns: + list: list with shifted points. + """ diff_1 = copy(diff) diff_2 = copy(diff) diff_3 = copy(diff) @@ -150,13 +168,12 @@ def second_order_shift(diff, axis, mode): return [diff_3, diff_2, diff_1] def scheme_build(self) -> list: - """ - Scheme building for Crank-Nicolson variant, it's identical to + """Scheme building for Crank-Nicolson variant, it's identical to 'scheme_build' in first order method, but value is shifted by 'second_order_shift'. Returns: - numerical scheme list. + list: numerical scheme list. """ order = len(self.term) @@ -164,10 +181,9 @@ def scheme_build(self) -> list: # when we increase differential order for i in range(order): diff_list = [] - direction_list = [] for diff in finite_diff: # we use [0,0]->[[1,0],[-1,0]] rule for the axis - f_diff = self.second_order_shift( + f_diff = self._second_order_shift( diff, self.term[i], self.direction_list[i]) if len(diff_list) == 0: # and put it to the pool of differentials if it is empty @@ -181,13 +197,14 @@ def scheme_build(self) -> list: return finite_diff def sign_order(self, h: float = 1/2) -> list: - """ - Signs definition for second order schemes. + """ Signs definition for second order schemes. Args: - h: discretizing parameter in finite difference method (i.e., grid resolution for scheme). + h (float, optional): discretizing parameter in finite- + difference method (i.e., grid resolution for scheme). Defaults to 1/2. + Returns: - list, with signs for corresponding points. + list: list, with signs for corresponding points. """ sign_list = [1] @@ -214,29 +231,29 @@ class Finite_diffs(): def __init__(self, term: list, nvars: int, axes_scheme_type: str): """ Args: - term: differentiation direction. Example: [0,0]->d2u/dx2 if x is first + term (list): differentiation direction. Example: [0,0]->d2u/dx2 if x is first direction in the grid. - nvars: task parameters. Example: if grid(x,t) -> nvars = 2. - axes_scheme_type: scheme type: 'central' or combination of 'f' and 'b' + nvars (int): task parameters. Example: if grid(x,t) -> nvars = 2. + axes_scheme_type (str): scheme type: 'central' or combination of 'f' and 'b' """ + self.term = term self.nvars = nvars self.axes_scheme_type = axes_scheme_type - def scheme_choose(self, scheme_label: str, h:float = 1 / 2): - """ - Method for numerical scheme choosing via realized above. + def scheme_choose(self, scheme_label: str, h:float = 1 / 2) -> list: + """ Method for numerical scheme choosing via realized above. Args: - scheme_label: - '2'- for second order scheme (only boundaries points), + scheme_label (str): '2'- for second order scheme (only boundaries points), '1' - for first order scheme. - - h: discretizing parameter in finite difference method (i.e., grid resolution for scheme). + h (float, optional): discretizing parameter in finite- + difference method (i.e., grid resolution for scheme). Defaults to 1/2. Returns: - list where list[0] is numerical scheme and list[1] is signs. + list: list where list[0] is numerical scheme and list[1] is signs. """ + if self.term == [None]: return [[None], [1]] elif scheme_label == '2': @@ -248,4 +265,4 @@ def scheme_choose(self, scheme_label: str, h:float = 1 / 2): scheme = cl_scheme.scheme_build() sign = cl_scheme.sign_order(h=h) - return [scheme, sign] \ No newline at end of file + return [scheme, sign] diff --git a/epde/solver/input_preprocessing.py b/epde/solver/input_preprocessing.py index 2052bd6..2cd9f9f 100644 --- a/epde/solver/input_preprocessing.py +++ b/epde/solver/input_preprocessing.py @@ -1,190 +1,42 @@ -import torch -import numpy as np + +"""preprocessing module for operator (equation) and boundaries. +""" + from copy import deepcopy -from typing import Union, Tuple +from typing import Union +import numpy as np +import torch from epde.solver.points_type import Points_type from epde.solver.finite_diffs import Finite_diffs from epde.solver.device import check_device -def lambda_prepare(val, lambda_: Union[int, list, torch.Tensor]) -> torch.Tensor : - """ - Prepares lambdas for corresponding equation or bcond type. +def lambda_prepare(val: torch.Tensor, + lambda_: Union[int, list, torch.Tensor]) -> torch.Tensor: + """ Prepares lambdas for corresponding equation or bcond type. Args: - val: operator tensor or bval tensor - lambda_: regularization parameters values + val (_type_): operator tensor or bval tensor + lambda_ (Union[int, list, torch.Tensor]): regularization parameters values Returns: - lambdas: torch.Tensor with lambda_ values, + torch.Tensor: torch.Tensor with lambda_ values, len(lambdas) = number of columns in val - """ - if type(lambda_) is torch.Tensor: + + if isinstance(lambda_, torch.Tensor): return lambda_ - if type(lambda_) is int: + if isinstance(lambda_, (int, float)): try: - lambdas = torch.ones(val.shape[-1], dtype=val.dtype)*lambda_ + lambdas = torch.ones(val.shape[-1], dtype=val.dtype) * lambda_ except: lambdas = torch.tensor(lambda_, dtype=val.dtype) - elif type(lambda_) is list: + elif isinstance(lambda_, list): lambdas = torch.tensor(lambda_, dtype=val.dtype) return lambdas.reshape(1,-1) - -class Boundary(): - """ - Сlass for bringing all boundary conditions to a similar form. - """ - - def __init__(self, bconds: list): - """ - Args: - bconds: list with boundary conditions bconds = [bcond,bcond,..], where - 'bcond' is list with parameters corresponding to boundary - condition. - """ - self.bconds = bconds - - def dirichlet(self, bcond: list) -> list: - """ - Boundary conditions without derivatives (bop is None), it can be - in form: bcond = [bnd, bval, type], 'bnd' is boundary points, 'bval' is - desired function values at 'bnd' points, 'type' should be 'dirichlet'. - If task has several desired functions, bcond will be in form: - bcond = [bnd, bval, var, type] where 'var' is function number. - - Args: - bcond: list in input form: [bnd, bval, type] or [bnd, bval, var, type]. - - Returns: - boundary condition in unified form. - """ - bcond[0] = check_device(bcond[0]) - bcond[1] = check_device(bcond[1]) - if len(bcond) == 3: - boundary = [bcond[0], None, bcond[1], 0, bcond[2]] - elif len(bcond) == 4: - boundary = [bcond[0], None, bcond[1], bcond[2], bcond[3]] - else: - raise NameError('Incorrect Dirichlet condition') - return boundary - - def neumann(self, bcond: list) -> list: - """ - Boundary conditions with derivatives (bop is not None), it can be - in form: bcond = [bnd, bop, bval, type], 'bnd' is boundary points, - 'bval' is desired function values at 'bnd' points, 'type' should be - 'dirichlet'. If task has several desired functions, bcond will be - in form: bcond = [bnd, bop, bval, var, type] where 'var' is function - number. - - Args: - bcond: list in input form: [bnd, bop, bval, type] or - [bnd, bop, bval, var, type] - Returns: - boundary condition in unified form. - """ - bcond[0] = check_device(bcond[0]) - bcond[2] = check_device(bcond[2]) - bcond[1] = EquationMixin.equation_unify(bcond[1]) - if len(bcond) == 4: - boundary = [bcond[0], bcond[1], bcond[2], None, bcond[3]] - elif len(bcond) == 5: - boundary = [bcond[0], bcond[1], bcond[2], None, bcond[4]] - else: - raise NameError('Incorrect operator condition') - return boundary - - def periodic(self, bcond: list) -> list: - """ - Periodic can be: periodic dirichlet (example u(x,t)=u(-x,t)) - in form: bcond = [bnd, type], [bnd, var, type] - or periodic operator (example du(x,t)/dx=du(-x,t)/dx) - if from: [bnd, bop, type]. - Parameter 'bnd' is list: [b_coord1, b_coord2,..] - - Args: - bcond: list in input form: [bnd, type] or [bnd, var, type] or - [bnd, bop, type] - - Returns: - boundary condition in unfied form. - """ - for i in range(len(bcond[0])): - bcond[0][i] = check_device(bcond[0][i]) - if len(bcond) == 2: - b_val = torch.zeros(bcond[0][0].shape[0]) - boundary = [bcond[0], None, b_val, 0, bcond[1]] - elif len(bcond) == 3 and type(bcond[1]) is int: - b_val = torch.zeros(bcond[0][0].shape[0]) - boundary = [bcond[0], None, b_val, bcond[1], bcond[2]] - elif type(bcond[1]) is dict: - b_val = torch.zeros(bcond[0][0].shape[0]) - bcond[1] = EquationMixin.equation_unify(bcond[1]) - boundary = [bcond[0], bcond[1], b_val, None, bcond[2]] - else: - raise NameError('Incorrect periodic condition') - return boundary - - def data(self, bcond:list) -> list: - '''determine type of data condition and call neumann or dirichlet methods''' - - bop_exist = False - for bcond_part in bcond: - if type(bcond_part) == dict: - bop_exist = True - break - - if bop_exist: - boundary = self.neumann(bcond) - else: - boundary = self.dirichlet(bcond) - - return boundary - - def bnd_choose(self, bcond: list) -> list: - """ - Method that choose type of boundary condition. - - Args: - bcond: list with boundary condition parameters. - - Returns: - return unified condition. - - """ - if bcond[-1] == 'periodic': - bnd = self.periodic(bcond) - elif bcond[-1] == 'dirichlet': - bnd = self.dirichlet(bcond) - elif bcond[-1] == 'operator': - bnd = self.neumann(bcond) - elif bcond[-1] == 'data': - bnd = self.data(bcond) - else: - raise NameError('TEDEouS can not use ' + bcond[-1] + ' condition type') - return bnd - - def bnd_unify(self) -> list: - """ - Method that convert result of 'bnd_choose' to dict with correspondung - keys = ('bnd', 'bop', 'bval', 'var', 'type'). - - Returns: - unified boundary conditions in dict form. - """ - unified_bnd = [] - for bcond in self.bconds: - bnd = {} - bnd['bnd'], bnd['bop'], bnd['bval'], bnd['var'], \ - bnd['type'] = self.bnd_choose(bcond) - unified_bnd.append(bnd) - return unified_bnd - - class EquationMixin: """ Auxiliary class. This one contains some methods that uses in other classes. @@ -192,15 +44,14 @@ class EquationMixin: @staticmethod def equation_unify(equation: dict) -> dict: - """ - Adding 'var' to the 'operator' if it's absent or convert to + """ Adding 'var' to the 'operator' if it's absent or convert to list 'pow' and 'var' if it's int or float. Args: - operator: operator in input form. + equation (dict): operator in input form. Returns: - equation: equation with unified for solver parameters. + dict: equation with unified for solver parameters. """ for operator_label in equation.keys(): @@ -213,7 +64,7 @@ def equation_unify(equation: dict) -> dict: operator[dif_dir] = [operator[dif_dir]] operator['pow'] = [operator['pow']] operator['var'] = [0] - elif type(operator['pow']) is list: + elif isinstance(operator['pow'], list): operator['var'] = [0 for _ in operator['pow']] continue if isinstance(operator['pow'], (int, float)): @@ -225,14 +76,16 @@ def equation_unify(equation: dict) -> dict: @staticmethod def closest_point(grid: torch.Tensor, target_point: float) -> int: - """ - Defines the closest boundary point to the grid. + """ Defines the closest boundary point to the grid. + Args: + grid (torch.Tensor): grid (domain discretization). + target_point (float): boundary point. - target_point: boundary point. Returns: - position of the boundary point on the grid. + int: position of the boundary point on the grid. """ + min_dist = np.inf pos = 0 min_pos = 0 @@ -246,37 +99,37 @@ def closest_point(grid: torch.Tensor, target_point: float) -> int: @staticmethod def convert_to_double(bnd: Union[list, np.array]) -> float: - """ - Converts points to double type. + """ Converts points to double type. Args: - bnd: array or list of arrays + bnd (Union[list, np.array]): array or list of arrays points that should be converted + Returns: - bnd with double type. + float: bnd with double type. """ - if type(bnd) == list: + if isinstance(bnd, list): for i, cur_bnd in enumerate(bnd): bnd[i] = EquationMixin.convert_to_double(cur_bnd) return bnd - elif type(bnd) == np.array: + elif isinstance(bnd, np.ndarray): return torch.from_numpy(bnd).double() return bnd.double() @staticmethod def search_pos(grid: torch.Tensor, bnd) -> list: - """ - Method for searching position bnd in grid. + """ Method for searching position bnd in grid. Args: - grid: array of a n-D points. - bnd: points that should be converted. + grid (torch.Tensor): array of a n-D points. + bnd (_type_): points that should be converted. + Returns: - list of positions bnd on grid. + list: list of positions bnd on grid. """ - if type(bnd) == list: + if isinstance(bnd, list): for i, cur_bnd in enumerate(bnd): bnd[i] = EquationMixin.search_pos(grid, cur_bnd) return bnd @@ -292,14 +145,15 @@ def search_pos(grid: torch.Tensor, bnd) -> list: @staticmethod def bndpos(grid: torch.Tensor, bnd: torch.Tensor) -> Union[list, int]: - """ - Returns the position of the boundary points on the grid. + """ Returns the position of the boundary points on the grid. Args: - grid: grid for coefficient in form of torch.Tensor mapping. - bnd: boundary conditions. + grid (torch.Tensor): grid for coefficient in form of + torch.Tensor mapping. + bnd (torch.Tensor):boundary conditions. + Returns: - list of positions of the boundary points on the grid. + Union[list, int]: list of positions of the boundary points on the grid. """ if grid.shape[0] == 1: @@ -316,19 +170,27 @@ class Equation_NN(EquationMixin, Points_type): form. Then it will be used for determine solution by 'NN' method. """ - def __init__(self, grid: torch.Tensor, operator: Union[dict, list], bconds, h: float = 0.001, - inner_order: str = '1', boundary_order: str = '2'): - """ - Prepares equation, boundary conditions for NN method. + def __init__(self, + grid: torch.Tensor, + operator: Union[dict, list], + bconds: list, + h: float = 0.001, + inner_order: str = '1', + boundary_order: str = '2'): + """ Prepares equation, boundary conditions for *NN* mode. Args: - grid: array of a n-D points. - operator: equation. - bconds: boundary conditions. - h: discretizing parameter in finite difference method (i.e., grid resolution for scheme). - inner_order: accuracy inner order for finite difference. Default = 1 - boundary_order: accuracy boundary order for finite difference. Default = 2 + grid (torch.Tensor): tensor of a n-D points. + operator (Union[dict, list]): equation. + bconds (list): boundary conditions. + h (float, optional): discretizing parameter in finite difference + method(i.e., grid resolution for scheme). Defaults to 0.001. + inner_order (str, optional): accuracy inner order for finite difference. + Defaults to '1'. + boundary_order (str, optional):accuracy boundary order for finite difference. + Defaults to '2'. """ + super().__init__(grid) self.grid = grid self.operator = operator @@ -337,18 +199,21 @@ def __init__(self, grid: torch.Tensor, operator: Union[dict, list], bconds, h: self.inner_order = inner_order self.boundary_order = boundary_order - def operator_to_type_op(self, dif_direction: list, nvars: int, axes_scheme_type: str) -> list: - """ - Function serves applying different schemes to a different point types + def _operator_to_type_op(self, + dif_direction: list, + nvars: int, + axes_scheme_type: str) -> list: + """ Function serves applying different schemes to a different point types for entire differentiation direction. Args: - dif_direction: differentiation direction, (example:d2/dx2->[[0,0]]) - nvars: dimensionality of the problem. - axes_scheme_type: 'central' or combination of 'f' and 'b'. + dif_direction (list): differentiation direction, (example:d2/dx2->[[0,0]]) + nvars (int): dimensionality of the problem. + axes_scheme_type (str): 'central' or combination of 'f' and 'b'. Returns: - list, where the conventional operator changed to steps and signs (see scheme_build function description). + list: list, where the conventional operator changed to + steps and signs (see scheme_build function description). """ if axes_scheme_type == 'central': @@ -366,18 +231,20 @@ def operator_to_type_op(self, dif_direction: list, nvars: int, axes_scheme_type: s_order_list.append(s_order) return [fin_diff_list, s_order_list] - def finite_diff_scheme_to_grid_list(self, finite_diff_scheme: list, grid_points: torch.Tensor) -> list: - """ - Method that converts integer finite difference steps in term described + def _finite_diff_scheme_to_grid_list(self, + finite_diff_scheme: list, + grid_points: torch.Tensor) -> list: + """ Method that converts integer finite difference steps in term described in Finite_diffs class to a grids with shifted points, i.e. from field (x,y) -> (x,y+h). Args: - finite_diff_scheme: operator_to_type_op one term - grid_points: grid points that will be shifted corresponding to finite diff - scheme + finite_diff_scheme (list): operator_to_type_op one term. + grid_points (torch.Tensor): grid points that will be shifted + corresponding to finite diff scheme. + Returns: - list, where the steps and signs changed to grid and signs. + list: list, where the steps and signs changed to grid and signs. """ s_grid_list = [] @@ -391,65 +258,73 @@ def finite_diff_scheme_to_grid_list(self, finite_diff_scheme: list, grid_points: s_grid_list.append(s_grid) return s_grid_list - def checking_coeff(self, coeff: Union[int, float, torch.Tensor], grid_points: torch.Tensor): - """ - Checks the coefficient type + def _checking_coeff(self, + coeff: Union[int, float, torch.Tensor, callable], + grid_points: torch.Tensor) -> torch.Tensor: + """ Checks the coefficient type Args: - coeff: coefficient in equation operator. - grid_points: if coeff is callable or torch.Tensor + coeff (Union[int, float, torch.Tensor, callable]): coefficient + in equation operator. + grid_points (torch.Tensor): if coeff is callable or torch.Tensor + + Raises: + NameError: coeff" should be: torch.Tensor or callable or int or float! Returns: - coefficient + torch.Tensor: coefficient """ - if type(coeff) == int or type(coeff) == float: + if isinstance(coeff, (int, float)): coeff1 = coeff elif callable(coeff): coeff1 = (coeff, grid_points) - elif type(coeff) == torch.Tensor: + elif isinstance(coeff, torch.Tensor): coeff = check_device(coeff) pos = self.bndpos(self.grid, grid_points) coeff1 = coeff[pos].reshape(-1, 1) - elif type(coeff) is torch.nn.parameter.Parameter: + elif isinstance(coeff, torch.nn.parameter.Parameter): coeff1 = coeff else: raise NameError('"coeff" should be: torch.Tensor or callable or int or float!') return coeff1 - def type_op_to_grid_shift_op(self, fin_diff_op: list, grid_points) -> list: - """ - Converts operator to a grid_shift form. Includes term coefficient + def _type_op_to_grid_shift_op(self, fin_diff_op: list, grid_points) -> list: + """ Converts operator to a grid_shift form. Includes term coefficient conversion. Coeff may be integer, function or array, last two are mapped to a subgrid that corresponds point type. Args: - fin_diff_op: operator_to_type_op result. - grid_points: grid points that will be shifted corresponding to finite diff scheme. + fin_diff_op (list): operator_to_type_op result. + grid_points (_type_): grid points that will be shifted + corresponding to finite diff scheme. Returns: - shift_grid_op: final form of differential operator used in the algorithm for - single grid type. + list: final form of differential operator used in the algorithm for + single grid type. """ + shift_grid_op = [] for term1 in fin_diff_op: - grid_op = self.finite_diff_scheme_to_grid_list(term1, grid_points) + grid_op = self._finite_diff_scheme_to_grid_list(term1, grid_points) shift_grid_op.append(grid_op) return shift_grid_op - def one_operator_prepare(self, operator: dict, grid_points: torch.Tensor, points_type: str) -> dict: - """ - Method for operator preparing, there is construct all predefined + def _one_operator_prepare(self, + operator: dict, + grid_points: torch.Tensor, + points_type: str) -> dict: + """ Method for operator preparing, there is construct all predefined methods. Args: - operator: operator in input form - grid_points: see type_op_to_grid_shift_op method - points_type: points type of grid_points + operator (dict): operator in input form. + grid_points (torch.Tensor): see type_op_to_grid_shift_op method. + points_type (str): points type of grid_points. Returns: - prepared operator + dict: prepared operator """ nvars = self.grid.shape[-1] @@ -457,81 +332,75 @@ def one_operator_prepare(self, operator: dict, grid_points: torch.Tensor, points for operator_label in operator: term = operator[operator_label] dif_term = list(term.keys())[1] - term['coeff'] = self.checking_coeff(term['coeff'], grid_points) - term[dif_term] = self.operator_to_type_op(term[dif_term], + term['coeff'] = self._checking_coeff(term['coeff'], grid_points) + term[dif_term] = self._operator_to_type_op(term[dif_term], nvars, points_type) - term[dif_term][0] = self.type_op_to_grid_shift_op( + term[dif_term][0] = self._type_op_to_grid_shift_op( term[dif_term][0], grid_points) return operator def operator_prepare(self) -> list: - """ - Method for all operators preparing. If system case is, it will call + """ Method for all operators preparing. If system case is, it will call 'one_operator_prepare' method for number of equations times. Returns: - list of dictionaries, where every dictionary is the result of - 'one_operator_prepare' + list: list of dictionaries, where every dictionary is the result of + 'one_operator_prepare' """ grid_points = self.grid_sort()['central'] - if type(self.operator) is list and type(self.operator[0]) is dict: + if isinstance(self.operator, list) and isinstance(self.operator[0], dict): num_of_eq = len(self.operator) prepared_operator = [] for i in range(num_of_eq): - equation = self.one_operator_prepare( + equation = self._one_operator_prepare( self.operator[i], grid_points, 'central') prepared_operator.append(equation) else: - equation = self.one_operator_prepare( + equation = self._one_operator_prepare( self.operator, grid_points, 'central') prepared_operator = [equation] return prepared_operator - def apply_bnd_operators(self, bnd_operator: dict, bnd_dict: dict) -> list: - """ - Method for applying boundary operator for all points type in bnd_dict. + def _apply_bnd_operators(self, bnd_operator: dict, bnd_dict: dict) -> list: + """ Method for applying boundary operator for all points type in bnd_dict. Args: - bnd_operator: boundary operator in input form. - bnd_dict: dictionary (keys is points type, values is boundary points). + bnd_operator (dict): boundary operator in input form. + bnd_dict (dict): dictionary (keys is points type, values is boundary points). Returns: - final form of differential operator used in the algorithm for - subset grid types. - + list: final form of differential operator used in the algorithm for + subset grid types. """ operator_list = [] for points_type in list(bnd_dict.keys()): - equation = self.one_operator_prepare( + equation = self._one_operator_prepare( deepcopy(bnd_operator), bnd_dict[points_type], points_type) operator_list.append(equation) return operator_list def bnd_prepare(self) -> list: - """ - Method for boundary conditions preparing to final form. + """ Method for boundary conditions preparing to final form. Returns: - list of dictionaries where every dict is one boundary condition + list: list of dictionaries where every dict is one boundary condition """ grid_dict = self.grid_sort() - bconds1 = Boundary(self.bconds).bnd_unify() - if bconds1 == None: - return None - for bcond in bconds1: + + for bcond in self.bconds: bnd_dict = self.bnd_sort(grid_dict, bcond['bnd']) - if bcond['bop'] != None: + if bcond['bop'] is not None: if bcond['type'] == 'periodic': - bcond['bop'] = [self.apply_bnd_operators( + bcond['bop'] = [self._apply_bnd_operators( bcond['bop'], i) for i in bnd_dict] else: - bcond['bop'] = self.apply_bnd_operators( + bcond['bop'] = self._apply_bnd_operators( bcond['bop'], bnd_dict) - return bconds1 + return self.bconds class Equation_autograd(EquationMixin): @@ -539,98 +408,99 @@ class Equation_autograd(EquationMixin): Prepares equation for autograd method (i.e., from conventional form to input form). """ - def __init__(self, grid: torch.Tensor, operator, bconds): - """ - Prepares equation for autograd method (i.e., from conventional form to input form). + def __init__(self, + grid: torch.Tensor, + operator: Union[dict, list], + bconds: list): + """ Prepares equation for autograd method + (i.e., from conventional form to input form). Args: - grid: array of a n-D points. - operator: equation. - bconds: boundary conditions. + grid (torch.Tensor): tensor of a n-D points. + operator (Union[dict, list]): equation. + bconds (list): boundary conditions in input form. """ + self.grid = grid self.operator = operator self.bconds = bconds - def checking_coeff(self, coeff: Union[int, float, torch.Tensor]) -> Union[int, float, torch.Tensor]: - """ - Checks the coefficient type + def _checking_coeff(self, + coeff: Union[int, float, torch.Tensor]) -> Union[int, float, torch.Tensor]: + """ Checks the coefficient type Args: - coeff: coefficient in equation operator. + coeff (Union[int, float, torch.Tensor]): coefficient in equation operator. + + Raises: + NameError: "coeff" should be: torch.Tensor or callable or int or float! + Returns: - coefficient + Union[int, float, torch.Tensor]: coefficient """ - if type(coeff) == int or type(coeff) == float: + if isinstance(coeff, (int, float)): coeff1 = coeff elif callable(coeff): coeff1 = coeff - elif type(coeff) == torch.Tensor: + elif isinstance(coeff, torch.Tensor): coeff = check_device(coeff) coeff1 = coeff.reshape(-1, 1) - elif type(coeff) is torch.nn.parameter.Parameter: + elif isinstance(coeff, torch.nn.parameter.Parameter): coeff1 = coeff else: raise NameError('"coeff" should be: torch.Tensor or callable or int or float!') return coeff1 - def one_operator_prepare(self, operator: dict) -> dict: - """ - Method for all operators preparing. If system case is, it will call + def _one_operator_prepare(self, operator: dict) -> dict: + """ Method for all operators preparing. If system case is, it will call 'one_operator_prepare' method for number of equations times. + Args: + operator (dict): operator in input form. + Returns: - list of dictionaries, where every dictionary is the result of - 'one_operator_prepare' + dict: dict, where coeff is checked. """ operator = self.equation_unify(operator) for operator_label in operator: term = operator[operator_label] - term['coeff'] = self.checking_coeff(term['coeff']) + term['coeff'] = self._checking_coeff(term['coeff']) return operator def operator_prepare(self) -> list: - """ - Method for all operators preparing. If system case is, it will call + """ Method for all operators preparing. If system case is, it will call 'one_operator_prepare' method for number of equations times. Returns: - list of dictionaries, where every dictionary is the result of - 'one_operator_prepare' - + list: list of dictionaries, where every dictionary is the result of + 'one_operator_prepare' """ - if type(self.operator) is list and type(self.operator[0]) is dict: + if isinstance(self.operator, list) and isinstance(self.operator[0], dict): num_of_eq = len(self.operator) prepared_operator = [] for i in range(num_of_eq): equation = self.equation_unify(self.operator[i]) - prepared_operator.append(self.one_operator_prepare(equation)) + prepared_operator.append(self._one_operator_prepare(equation)) else: equation = self.equation_unify(self.operator) - prepared_operator = [self.one_operator_prepare(equation)] + prepared_operator = [self._one_operator_prepare(equation)] return prepared_operator - def bnd_prepare(self): - """ - Method for boundary conditions preparing to final form - Returns - ------- - prepared_bnd : list - list of dictionaries where every dict is one boundary condition - """ - bconds = Boundary(self.bconds).bnd_unify() - if bconds == None: - return None + def bnd_prepare(self) -> list: + """ Method for boundary conditions preparing to final form - for bcond in bconds: - if bcond['bop'] != None: - bcond['bop'] = self.equation_unify(bcond['bop']) + Returns: + list: list of dictionaries where every dict is one boundary condition + """ - return bconds + if self.bconds is None: + return None + else: + return self.bconds class Equation_mat(EquationMixin): @@ -639,29 +509,31 @@ class Equation_mat(EquationMixin): preparing) for 'mat' method. """ - def __init__(self, grid, operator, bconds): - """ - Prepares equation for autograd method (i.e., from conventional form to input form). + def __init__(self, + grid: torch.Tensor, + operator: Union[list, dict], + bconds: list): + """ Prepares equation for autograd method + (i.e., from conventional form to input form). Args: - grid: array of a n-D points. - operator: equation. - bconds: boundary conditions. + grid (torch.Tensor): grid, result of meshgrid. + operator (Union[list, dict]): operator in input form. + bconds (list): boundary conditions in input form. """ + self.grid = grid self.operator = operator self.bconds = bconds def operator_prepare(self) -> list: - """ - Method realizes operator preparing for 'mat' method + """ Method realizes operator preparing for 'mat' method using only 'equation_unify' method. - Returns: - final form of differential operator used in the algorithm. + list: final form of differential operator used in the algorithm. """ - if type(self.operator) is list and type(self.operator[0]) is dict: + if isinstance(self.operator, list) and isinstance(self.operator[0], dict): num_of_eq = len(self.operator) prepared_operator = [] for i in range(num_of_eq): @@ -673,16 +545,16 @@ def operator_prepare(self) -> list: return prepared_operator - def point_position(self, bnd) -> list: - """ - Define position of boundary points on the grid. + def _point_position(self, bnd: torch.Tensor) -> list: + """ Define position of boundary points on the grid. Args: - bnd: + bnd (torch.Tensor): boundary subgrid. Returns: - list of positions, where boundary points intersects on the grid. + list: list of positions, where boundary points intersects on the grid. """ + bpos = [] for pt in bnd: if self.grid.shape[0] == 1: @@ -698,41 +570,48 @@ def point_position(self, bnd) -> list: return bpos def bnd_prepare(self) -> list: - """ - Method for boundary conditions preparing to final form. + """ Method for boundary conditions preparing to final form. Returns: - list of dictionaries where every dict is one boundary condition + list: list of dictionaries where every dict is one boundary condition. """ - bconds = Boundary(self.bconds).bnd_unify() - for bcond in bconds: + + for bcond in self.bconds: if bcond['type'] == 'periodic': bpos = [] for bnd in bcond['bnd']: - bpos.append(self.point_position(bnd)) + bpos.append(self._point_position(bnd)) else: - bpos = self.point_position(bcond['bnd']) - if bcond['bop'] != None: + bpos = self._point_position(bcond['bnd']) + if bcond['bop'] is not None: bcond['bop'] = self.equation_unify(bcond['bop']) bcond['bnd'] = bpos - return bconds + return self.bconds -class Equation(): +class Operator_bcond_preproc(): """ Interface for preparing equations due to chosen calculation method. """ - def __init__(self, grid: torch.Tensor, operator: Union[dict, list], bconds: list, h: float = 0.001, - inner_order: str ='1', boundary_order: str ='2'): - """ + def __init__(self, + grid: torch.Tensor, + operator: Union[dict, list], + bconds: list, + h: float = 0.001, + inner_order: str ='1', + boundary_order: str ='2'): + """_summary_ + Args: - grid: array of a n-D points. - operator: equation. - bconds: boundary conditions. - h: discretizing parameter in finite difference method (i.e., grid resolution for scheme). - inner_order: accuracy inner order for finite difference. Default = 1 - boundary_order: accuracy boundary order for finite difference. Default = 2 + grid (torch.Tensor): grid from cartesian_prod or meshgrid result. + operator (Union[dict, list]): equation. + bconds (list): boundary conditions. + h (float, optional): discretizing parameter in finite- + difference method (i.e., grid resolution for scheme). Defaults to 0.001. + inner_order (str, optional): accuracy inner order for finite difference. Defaults to '1'. + boundary_order (str, optional): accuracy boundary order for finite difference. Defaults to '2'. """ + self.grid = check_device(grid) self.operator = operator self.bconds = bconds @@ -740,20 +619,21 @@ def __init__(self, grid: torch.Tensor, operator: Union[dict, list], bconds: list self.inner_order = inner_order self.boundary_order = boundary_order - def set_mode(self, mode: str) -> Union[Equation_NN, Equation_mat, Equation_autograd]: - """ - Setting the calculation method. + def set_strategy(self, strategy: str) -> Union[Equation_NN, Equation_mat, Equation_autograd]: + """ Setting the calculation method. + Args: - strategy: Calculation method. (i.e., "NN", "autograd", "mat"). + strategy (str): Calculation method. (i.e., "NN", "autograd", "mat"). + Returns: - A given calculation method. + Union[Equation_NN, Equation_mat, Equation_autograd]: A given calculation method. """ - if mode == 'NN': + if strategy == 'NN': return Equation_NN(self.grid, self.operator, self.bconds, h=self.h, inner_order=self.inner_order, boundary_order=self.boundary_order) - if mode == 'mat': + if strategy == 'mat': return Equation_mat(self.grid, self.operator, self.bconds) - if mode == 'autograd': - return Equation_autograd(self.grid, self.operator, self.bconds) \ No newline at end of file + if strategy == 'autograd': + return Equation_autograd(self.grid, self.operator, self.bconds) diff --git a/epde/solver/losses.py b/epde/solver/losses.py index 35eae5b..4c61748 100644 --- a/epde/solver/losses.py +++ b/epde/solver/losses.py @@ -1,7 +1,11 @@ +"""Module for losses calculation""" + from typing import Tuple, Union +import numpy as np +import torch from epde.solver.input_preprocessing import lambda_prepare -from epde.solver.utils import * + class Losses(): """ @@ -9,59 +13,106 @@ class Losses(): """ def __init__(self, mode: str, - weak_form: Union[None, torch.Tensor], + weak_form: Union[None, list], n_t: int, - tol): + tol: Union[int, float], + n_t_operation: callable = None): + """ + Args: + mode (str): calculation mode, *NN, autograd, mat*. + weak_form (Union[None, list]): list of basis functions if form is weak. + n_t (int): number of unique points in time dimension. + tol (Union[int, float])): penalty in *casual loss*. + n_t_operation (callable): function to calculate n_t for each batch + """ + self.mode = mode self.weak_form = weak_form self.n_t = n_t + self.n_t_operation = n_t_operation self.tol = tol - # TODO: refactor loss_op, loss_bcs into one function, carefully figure out when bval is None + fix causal_loss operator crutch (line 76). + # TODO: refactor loss_op, loss_bcs into one function, carefully figure out when bval + # is None + fix causal_loss operator crutch (line 76). + + def _loss_op(self, + operator: torch.Tensor, + lambda_op: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """ Operator term in loss calc-n. + + Args: + operator (torch.Tensor): operator calc-n result. + For more details to eval module -> operator_compute(). - def loss_op(self, operator, lambda_op) -> torch.Tensor: - if self.weak_form != None and self.weak_form != []: + lambda_op (torch.Tensor): regularization parameter for operator term in loss. + + Returns: + loss_operator (torch.Tensor): operator term in loss. + op (torch.Tensor): MSE of operator on the whole grid. + """ + if self.weak_form is not None and self.weak_form != []: op = operator else: op = torch.mean(operator**2, 0) - - loss_operator = op @ lambda_op.T + + loss_operator = op @ lambda_op.T return loss_operator, op - def loss_bcs(self, bval, true_bval, lambda_bound) -> torch.Tensor: - """ - Computes boundary loss for corresponding type. + def _loss_bcs(self, + bval: torch.Tensor, + true_bval: torch.Tensor, + lambda_bound: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """ Computes boundary loss for corresponding type. + + Args: + bval (torch.Tensor): calculated values of boundary conditions. + true_bval (torch.Tensor): true values of boundary conditions. + lambda_bound (torch.Tensor): regularization parameter for boundary term in loss. Returns: - boundary loss + loss_bnd (torch.Tensor): boundary term in loss. + bval_diff (torch.Tensor): MSE of all boundary con-s. """ + bval_diff = torch.mean((bval - true_bval)**2, 0) loss_bnd = bval_diff @ lambda_bound.T return loss_bnd, bval_diff - def default_loss(self, operator, bval, true_bval, lambda_op, lambda_bound, save_graph=True) \ - -> Tuple[torch.Tensor, torch.Tensor]: - """ - Computes l2 loss. + def _default_loss(self, + operator: torch.Tensor, + bval: torch.Tensor, + true_bval: torch.Tensor, + lambda_op: torch.Tensor, + lambda_bound: torch.Tensor, + save_graph: bool = True) -> Tuple[torch.Tensor, torch.Tensor]: + """ Compute l2 loss. Args: - lambda_bound: an arbitrary chosen constant, influence only convergence speed. + operator (torch.Tensor): operator calc-n result. + For more details to eval module -> operator_compute(). + bval (torch.Tensor): calculated values of boundary conditions. + true_bval (torch.Tensor): true values of boundary conditions. + lambda_op (torch.Tensor): regularization parameter for operator term in loss. + lambda_bound (torch.Tensor): regularization parameter for boundary term in loss. + save_graph (bool, optional): saving computational graph. Defaults to True. + Returns: - model loss. + loss (torch.Tensor): loss. + loss_normalized (torch.Tensor): loss, where regularization parameters are 1. """ - if bval == None: + if bval is None: return torch.sum(torch.mean((operator) ** 2, 0)) - loss_oper, op = self.loss_op(operator, lambda_op) - - loss_bnd, bval_diff = self.loss_bcs(bval, true_bval, lambda_bound) + loss_oper, op = self._loss_op(operator, lambda_op) + dtype = op.dtype + loss_bnd, bval_diff = self._loss_bcs(bval, true_bval, lambda_bound) loss = loss_oper + loss_bnd - lambda_op_normalized = lambda_prepare(operator, 1) - lambda_bound_normalized = lambda_prepare(bval, 1) + lambda_op_normalized = lambda_prepare(operator, 1).to(dtype) + lambda_bound_normalized = lambda_prepare(bval, 1).to(dtype) with torch.no_grad(): loss_normalized = op @ lambda_op_normalized.T +\ @@ -76,30 +127,43 @@ def default_loss(self, operator, bval, true_bval, lambda_op, lambda_bound, save_ return loss, loss_normalized - def causal_loss(self, operator, bval, true_bval, lambda_op, lambda_bound) \ - -> torch.Tensor: - """ - Computes causal loss, which is calculated with weights matrix: + def _causal_loss(self, + operator: torch.Tensor, + bval: torch.Tensor, + true_bval: torch.Tensor, + lambda_op: torch.Tensor, + lambda_bound: torch.Tensor)-> Tuple[torch.Tensor, torch.Tensor]: + """ Computes causal loss, which is calculated with weights matrix: W = exp(-tol*(Loss_i)) where Loss_i is sum of the L2 loss from 0 to t_i moment of time. This loss function should be used when one of the DE independent parameter is time. Args: - lambda_bound: an arbitrary chosen constant, influence only convergence speed. - tol: float constant, influences on error penalty. + operator (torch.Tensor): operator calc-n result. + For more details to eval module -> operator_compute(). + bval (torch.Tensor): calculated values of boundary conditions. + true_bval (torch.Tensor): true values of boundary conditions. + lambda_op (torch.Tensor): regularization parameter for operator term in loss. + lambda_bound (torch.Tensor): regularization parameter for boundary term in loss. + Returns: - model loss. + loss (torch.Tensor): loss. + loss_normalized (torch.Tensor): loss, where regularization parameters are 1. """ - - res = torch.sum(operator**2, dim=1).reshape(self.n_t, -1) - res = torch.mean(res, axis=1).reshape(self.n_t, 1) - M = torch.triu(torch.ones((self.n_t, self.n_t), dtype=res.dtype), diagonal=1).T + if self.n_t_operation is not None: # calculate if batch mod + self.n_t = self.n_t_operation(operator) + try: + res = torch.sum(operator**2, dim=1).reshape(self.n_t, -1) + except: # if n_t_operation calculate bad n_t then change n_t to batch size + self.n_t = operator.size()[0] + res = torch.sum(operator**2, dim=1).reshape(self.n_t, -1) + m = torch.triu(torch.ones((self.n_t, self.n_t), dtype=res.dtype), diagonal=1).T with torch.no_grad(): - W = torch.exp(- self.tol * (M @ res)) + w = torch.exp(- self.tol * (m @ res)) - loss_oper = torch.mean(W * res) + loss_oper = torch.mean(w * res) - loss_bnd, bval_diff = self.loss_bcs(bval, true_bval, lambda_bound) + loss_bnd, bval_diff = self._loss_bcs(bval, true_bval, lambda_bound) loss = loss_oper + loss_bnd @@ -110,23 +174,33 @@ def causal_loss(self, operator, bval, true_bval, lambda_op, lambda_bound) \ return loss, loss_normalized - def weak_loss(self, operator, bval, true_bval, lambda_op, lambda_bound) \ - -> torch.Tensor: - """ - Weak solution of O/PDE problem. + def _weak_loss(self, + operator: torch.Tensor, + bval: torch.Tensor, + true_bval: torch.Tensor, + lambda_op: torch.Tensor, + lambda_bound: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """ Weak solution of O/PDE problem. Args: - weak_form: list of basis functions. - lambda_bound: const regularization parameter. + operator (torch.Tensor): operator calc-n result. + For more details to eval module -> operator_compute(). + bval (torch.Tensor): calculated values of boundary conditions. + true_bval (torch.Tensor): true values of boundary conditions. + lambda_op (torch.Tensor): regularization parameter for operator term in loss. + lambda_bound (torch.Tensor): regularization parameter for boundary term in loss. + Returns: - model loss. + loss (torch.Tensor): loss. + loss_normalized (torch.Tensor): loss, where regularization parameters are 1. """ - if bval == None: + + if bval is None: return sum(operator) - loss_oper, op = self.loss_op(operator, lambda_op) + loss_oper, op = self._loss_op(operator, lambda_op) - loss_bnd, bval_diff = self.loss_bcs(bval, true_bval, lambda_bound) + loss_bnd, bval_diff = self._loss_bcs(bval, true_bval, lambda_bound) loss = loss_oper + loss_bnd lambda_op_normalized = lambda_prepare(operator, 1) @@ -138,25 +212,37 @@ def weak_loss(self, operator, bval, true_bval, lambda_op, lambda_bound) \ return loss, loss_normalized - def compute(self, operator, bval, true_bval, lambda_op, lambda_bound, save_graph=True) -> \ - Union[default_loss, weak_loss, causal_loss]: - """ - Setting the required loss calculation method. - - Args: - tol: float constant, influences on error penalty. - Returns: - A given calculation method. - """ - if self.mode == 'mat' or self.mode == 'autograd': - if bval == None: - print('No bconds is not possible, returning infinite loss') - return np.inf - inputs = [operator, bval, true_bval, lambda_op, lambda_bound] - - if self.weak_form != None and self.weak_form != []: - return self.weak_loss(*inputs) - elif self.tol != 0: - return self.causal_loss(*inputs) - else: - return self.default_loss(*inputs, save_graph) \ No newline at end of file + def compute(self, + operator: torch.Tensor, + bval: torch.Tensor, + true_bval: torch.Tensor, + lambda_op: torch.Tensor, + lambda_bound: torch.Tensor, + save_graph: bool = True) -> Union[_default_loss, _weak_loss, _causal_loss]: + """ Setting the required loss calculation method. + + Args: + operator (torch.Tensor): operator calc-n result. + For more details to eval module -> operator_compute(). + bval (torch.Tensor): calculated values of boundary conditions. + true_bval (torch.Tensor): true values of boundary conditions. + lambda_op (torch.Tensor): regularization parameter for operator term in loss. + lambda_bound (torch.Tensor): regularization parameter for boundary term in loss. + save_graph (bool, optional): saving computational graph. Defaults to True. + + Returns: + Union[default_loss, weak_loss, causal_loss]: A given calculation method. + """ + + if self.mode in ('mat', 'autograd'): + if bval is None: + print('No bconds is not possible, returning infinite loss') + return np.inf + inputs = [operator, bval, true_bval, lambda_op, lambda_bound] + + if self.weak_form is not None and self.weak_form != []: + return self._weak_loss(*inputs) + elif self.tol != 0: + return self._causal_loss(*inputs) + else: + return self._default_loss(*inputs, save_graph) diff --git a/epde/solver/model.py b/epde/solver/model.py new file mode 100644 index 0000000..18deff9 --- /dev/null +++ b/epde/solver/model.py @@ -0,0 +1,195 @@ +import torch +from typing import Union, List, Any +import tempfile +import os + +from epde.solver.data import Domain, Conditions, Equation +from epde.solver.input_preprocessing import Operator_bcond_preproc +from epde.solver.callbacks.callback_list import CallbackList +from epde.solver.solution import Solution +from epde.solver.optimizers.optimizer import Optimizer +from epde.solver.utils import save_model_nn, save_model_mat +from epde.solver.optimizers.closure import Closure +from epde.solver.device import device_type +import datetime + + +class Model(): + """class for preprocessing""" + def __init__( + self, + net: Union[torch.nn.Module, torch.Tensor], + domain: Domain, + equation: Equation, + conditions: Conditions, + batch_size: int = None): + """ + Args: + net (Union[torch.nn.Module, torch.Tensor]): neural network or torch.Tensor for mode *mat* + grid (Domain): object of class Domain + equation (Equation): object of class Equation + conditions (Conditions): object of class Conditions + batch_size (int): size of batch + """ + self.net = net + self.domain = domain + self.equation = equation + self.conditions = conditions + + self._check = None + temp_dir = tempfile.gettempdir() + folder_path = os.path.join(temp_dir, 'tedeous_cache/') + if os.path.exists(folder_path) and os.path.isdir(folder_path): + pass + else: + os.makedirs(folder_path) + self._save_dir = folder_path + self.batch_size = batch_size + + def compile( + self, + mode: str, + lambda_operator: Union[List[float], float], + lambda_bound: Union[List[float], float], + normalized_loss_stop: bool = False, + h: float = 0.001, + inner_order: str = '1', + boundary_order: str = '2', + derivative_points: int = 2, + weak_form: List[callable] = None, + tol: float = 0): + """ Compile model for training process. + + Args: + mode (str): *mat, NN, autograd* + lambda_operator (Union[List[float], float]): weight for operator term. + It can be float for single equation or list of float for system. + lambda_bound (Union[List[float], float]): weight for boundary term. + It can be float for all types of boundary cond-ns or list of float for every condition type. + normalized_loss_stop (bool, optional): loss with lambdas=1. Defaults to False. + h (float, optional): increment for finite-difference scheme only for *NN*. Defaults to 0.001. + inner_order (str, optional): order of finite-difference scheme *'1', '2'* for inner points. + Only for *NN*. Defaults to '1'. + boundary_order (str, optional): order of finite-difference scheme *'1', '2'* for boundary points. + Only for *NN*. Defaults to '2'. + derivative_points (int, optional): number of points for finite-difference scheme in *mat* mode. + if derivative_points=2 the central scheme are used. Defaults to 2. + weak_form (List[callable], optional): basis function for weak loss. Defaults to None. + tol (float, optional): tolerance for causual loss. Defaults to 0. + """ + self.mode = mode + self.lambda_bound = lambda_bound + self.lambda_operator = lambda_operator + self.normalized_loss_stop = normalized_loss_stop + self.weak_form = weak_form + + grid = self.domain.build(mode=mode) + dtype = grid.dtype + self.net.to(dtype) + variable_dict = self.domain.variable_dict + operator = self.equation.equation_lst + bconds = self.conditions.build(variable_dict) + + self.equation_cls = Operator_bcond_preproc(grid, operator, bconds, h=h, inner_order=inner_order, + boundary_order=boundary_order).set_strategy(mode) + if self.batch_size != None: + if len(grid) torch.Tensor: - """ - Forward method for Fourier features generation. + """ Forward method for Fourier features generation. Args: - grid: calculation domain. + grid (torch.Tensor): calculation domain. + Returns: - out: embedding with Fourier features. + torch.Tensor: embedding with Fourier features. """ + if self.idx == []: out = grid else: out = grid[:, self.idx] - for i in range(len(self.M)): + for i, _ in enumerate(self.M): if self.M[i] is not None: Mi = self.M[i] Li = self.L[i] w = 2.0 * np.pi / Li - k = torch.arange(1, Mi + 1).reshape(-1, 1).float() + k = torch.arange(1, Mi + 1, device=self._device).reshape(-1, 1).float() x = grid[:, i].reshape(1, -1) x = (k @ x).T embed_cos = torch.cos(w * x) @@ -70,26 +77,21 @@ class FourierNN(nn.Module): """ Class for realizing neural network with Fourier features and skip connection. - - Args: - L: list[float or None], sin(w*x)/cos(w*X) frequency parameter, w = 2*pi/L. - M: list[float or None], number of (sin, cos) pairs in result embedding. - activation: nn.Module object, activation function. - ones: bool, enter or not ones vector in result embedding. """ def __init__(self, layers=[100, 100, 100, 1], L=[1], M=[1], activation=nn.Tanh(), ones=False): """ - Class for realizing neural network with Fourier features - and skip connection. - - Args: - L: list[float or None], sin(w*x)/cos(w*X) frequency parameter, w = 2*pi/L. - M: list[float or None], number of (sin, cos) pairs in result embedding. - activation: nn.Module object, activation function. - ones: bool, enter or not ones vector in result embedding. - """ + + Args: + layers (list, optional): neurons quantity in each layer (exclusion input layer), + the number of neurons in the hidden layers must match. Defaults to [100, 100, 100, 1]. + L (list, optional): (sin(w*x),cos(w*x)) frequency parameter, w=2*pi/L. Defaults to [1]. + M (list, optional): number of (sin, cos) pairs in result embedding. Defaults to [1]. + activation (_type_, optional): nn.Module object, activ-n function. Defaults to nn.Tanh(). + ones (bool, optional): enter or not ones vector in result embedding. Defaults to False. + """ + super(FourierNN, self).__init__() self.L = L self.M = M @@ -106,70 +108,121 @@ def __init__(self, layers=[100, 100, 100, 1], L=[1], M=[1], self.model.append(nn.Linear(layers[i], layers[i + 1])) def forward(self, grid: torch.Tensor) -> torch.Tensor: - """ - Forward pass for neural network. + """ Forward pass for neural network. Args: - grid: calculation domain. - Returns: - predicted values. + grid (torch.Tensor): calculation domain. + Returns: + torch.Tensor: predicted values. """ - ann_input = self.model[0](grid) - V = self.activation(self.linear_v(ann_input)) - U = self.activation(self.linear_u(ann_input)) + + input_ = self.model[0](grid) + v = self.activation(self.linear_v(input_)) + u = self.activation(self.linear_u(input_)) for layer in self.model[1:-1]: - output = self.activation(layer(ann_input)) - ann_input = output * U + (1 - output) * V + output = self.activation(layer(input_)) + input_ = output * u + (1 - output) * v - output = self.model[-1](ann_input) + output = self.model[-1](input_) return output class FeedForward(nn.Module): - def __init__(self, layers, activation, parameters=None): - super(FeedForward, self).__init__() - model = [] - for i in range(len(layers)-2): - model.append(nn.Linear(layers[i], layers[i+1])) - model.append(activation) - model.append(nn.Linear(layers[-2], layers[-1])) - self.net = torch.nn.Sequential(*model) + """Simple MLP neural network""" + + def __init__(self, + layers: List = [2, 100, 100, 100, 1], + activation: nn.Module = nn.Tanh(), + parameters: dict = None): + """ + Args: + layers (List, optional): neurons quantity in each layer. + Defaults to [2, 100, 100, 100, 1]. + activation (nn.Module, optional): nn.Module object, activ-n function. + Defaults to nn.Tanh(). + parameters (dict, optional): parameters initial values (for inverse task). + Defaults to None. + """ + + super().__init__() + self.model = [] + + for i in range(len(layers) - 2): + self.model.append(nn.Linear(layers[i], layers[i + 1])) + self.model.append(activation) + self.model.append(nn.Linear(layers[-2], layers[-1])) + self.net = torch.nn.Sequential(*self.model) if parameters is not None: self.reg_param(parameters) - def forward(self, x): + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ forward run + + Args: + x (torch.Tensor): neural network inputs + + Returns: + torch.Tensor: outputs + """ return self.net(x) - def reg_param(self, parameters: dict): + def reg_param(self, + parameters: dict): + """ Parameters registration as neural network parameters. + Should be used in inverse coefficients tasks. + + Args: + parameters (dict): dict with initial values. + """ for key, value in parameters.items(): parameters[key] = torch.nn.Parameter(torch.tensor([value], - requires_grad=True).float()) + requires_grad=True).float()) self.net.register_parameter(key, parameters[key]) -def parameter_registr(model, parameters): +def parameter_registr(model: torch.nn.Module, + parameters: dict) -> None: + """Parameters registration as neural network (mpdel) parameters. + Should be used in inverse coefficients tasks. + + Args: + model (torch.nn.Module): neural network. + parameters (dict): dict with initial values. + """ for key, value in parameters.items(): parameters[key] = torch.nn.Parameter(torch.tensor([value], - requires_grad=True).float()) + requires_grad=True).float()) model.register_parameter(key, parameters[key]) -def mat_model(grid, equation, nn_model=None): - if type(equation) is list: - eq_num = len(equation) - else: - eq_num = 1 +def mat_model(domain: Any, + equation: Any, + nn_model: torch.nn.Module = None) -> torch.Tensor: + """ Model creation for *mat* mode. + + Args: + domain (Any): object of Domian class. + equation (Any): Equation class object (see data module). + nn_model (torch.nn.Module, optional): neural network which outputs will be *mat* model. + Defaults to None. + + Returns: + torch.nn.Module: model for *mat* mode. + """ + + grid = domain.build('mat') + + eq_num = len(equation.equation_lst) shape = [eq_num] + list(grid.shape)[1:] - if nn_model != None: - nn_grid = torch.vstack([grid[i].reshape(-1) for i in \ - range(grid.shape[0])]).T.float() + if nn_model is not None: + nn_grid = torch.vstack([grid[i].reshape(-1) for i in range(grid.shape[0])]).T.float() model = nn_model(nn_grid).detach() model = model.reshape(shape) else: model = torch.ones(shape) - return model \ No newline at end of file + return model diff --git a/epde/solver/optimizers/__init__.py b/epde/solver/optimizers/__init__.py new file mode 100644 index 0000000..2a9eb61 --- /dev/null +++ b/epde/solver/optimizers/__init__.py @@ -0,0 +1 @@ +from epde.solver.optimizers.optimizer import Optimizer \ No newline at end of file diff --git a/epde/solver/optimizers/closure.py b/epde/solver/optimizers/closure.py new file mode 100644 index 0000000..c354f14 --- /dev/null +++ b/epde/solver/optimizers/closure.py @@ -0,0 +1,147 @@ +import torch +from typing import Any +from epde.solver.device import device_type + +class Closure(): + def __init__(self, + mixed_precision: bool, + model): + + self.mixed_precision = mixed_precision + self.set_model(model) + self.optimizer = self.model.optimizer + self.normalized_loss_stop = self.model.normalized_loss_stop + self.device = device_type() + self.cuda_flag = True if self.device == 'cuda' and self.mixed_precision else False + self.dtype = torch.float16 if self.device == 'cuda' else torch.bfloat16 + if self.mixed_precision: + self._amp_mixed() + + + def set_model(self, model): + self._model = model + + @property + def model(self): + return self._model + + def _amp_mixed(self): + """ Preparation for mixed precsion operations. + + Args: + mixed_precision (bool): use or not torch.amp. + + Raises: + NotImplementedError: AMP and the LBFGS optimizer are not compatible. + + Returns: + scaler: GradScaler for CUDA. + cuda_flag (bool): True, if CUDA is activated and mixed_precision=True. + dtype (dtype): operations dtype. + """ + + self.scaler = torch.cuda.amp.GradScaler(enabled=self.mixed_precision) + if self.mixed_precision: + print(f'Mixed precision enabled. The device is {self.device}') + if self.optimizer.__class__.__name__ == "LBFGS": + raise NotImplementedError("AMP and the LBFGS optimizer are not compatible.") + + + def _closure(self): + self.optimizer.zero_grad() + with torch.autocast(device_type=self.device, + dtype=self.dtype, + enabled=self.mixed_precision): + loss, loss_normalized = self.model.solution_cls.evaluate() + if self.cuda_flag: + self.scaler.scale(loss).backward() + self.scaler.step(self.optimizer) + self.scaler.update() + else: + loss.backward() + + self.model.cur_loss = loss_normalized if self.normalized_loss_stop else loss + + return loss + + def _closure_nncg(self): + self.optimizer.zero_grad() + with torch.autocast(device_type=self.device, + dtype=self.dtype, + enabled=self.mixed_precision): + loss, loss_normalized = self.model.solution_cls.evaluate() + + # if self.optimizer.use_grad: + grads = self.optimizer.gradient(loss) + grads = torch.where(grads != grads, torch.zeros_like(grads), grads) + # else: + # grads = torch.tensor([0.]) + # if self.cuda_flag: + # self.scaler.scale(loss).backward() + # self.scaler.step(self.optimizer) + # self.scaler.update() + # else: + # loss.backward() + self.model.cur_loss = loss_normalized if self.normalized_loss_stop else loss + return loss, grads + + def _closure_pso(self): + def loss_grads(): + self.optimizer.zero_grad() + with torch.autocast(device_type=self.device, + dtype=self.dtype, + enabled=self.mixed_precision): + loss, loss_normalized = self.model.solution_cls.evaluate() + + if self.optimizer.use_grad: + grads = self.optimizer.gradient(loss) + grads = torch.where(grads == float('nan'), torch.zeros_like(grads), grads) + else: + grads = torch.tensor([0.]) + + return loss, grads + + loss_swarm = [] + grads_swarm = [] + for particle in self.optimizer.swarm: + self.optimizer.vec_to_params(particle) + loss_particle, grads = loss_grads() + loss_swarm.append(loss_particle) + grads_swarm.append(grads.reshape(1, -1)) + + losses = torch.stack(loss_swarm).reshape(-1) + gradients = torch.vstack(grads_swarm) + + self.model.cur_loss = min(loss_swarm) + + return losses, gradients + + def _closure_ngd(self): + self.optimizer.zero_grad() + with torch.autocast(device_type=self.device, + dtype=self.dtype, + enabled=self.mixed_precision): + loss, loss_normalized = self.model.solution_cls.evaluate() + if self.cuda_flag: + self.scaler.scale(loss).backward(retain_graph=True) + self.scaler.step(self.optimizer) + self.scaler.update() + else: + loss.backward(retain_graph=True) + + self.model.cur_loss = loss_normalized if self.normalized_loss_stop else loss + + int_res = self.model.solution_cls.operator._pde_compute() + bval, true_bval, _, _ = self.model.solution_cls.boundary.apply_bcs() + + return int_res, bval, true_bval, loss, self.model.solution_cls.evaluate + + def get_closure(self, _type: str): + if _type == 'PSO': + return self._closure_pso + elif _type == 'NGD': + return self._closure_ngd + elif _type == 'NNCG': + return self._closure_nncg + else: + return self._closure diff --git a/epde/solver/optimizers/ngd.py b/epde/solver/optimizers/ngd.py new file mode 100644 index 0000000..c4774a5 --- /dev/null +++ b/epde/solver/optimizers/ngd.py @@ -0,0 +1,212 @@ +import torch +import numpy as np +from torch.nn.utils import parameters_to_vector, vector_to_parameters +from epde.solver.utils import replace_none_by_zero +from epde.solver.device import check_device + + +class NGD(torch.optim.Optimizer): + + """NGD implementation (https://arxiv.org/abs/2302.13163). + """ + + def __init__(self, params, + grid_steps_number: int = 30): + """The Natural Gradient Descent class. + + Args: + grid_steps_number (int, optional): Grid steps number. Defaults to 30. + """ + defaults = {'grid_steps_number': grid_steps_number} + super(NGD, self).__init__(params, defaults) + self.params = self.param_groups[0]['params'] + self.grid_steps_number = grid_steps_number + self.grid_steps = torch.linspace(0, self.grid_steps_number, self.grid_steps_number + 1) + self.steps = 0.5**self.grid_steps + self.cuda_out_of_memory_flag=False + self.cuda_empty_once_for_test=True + + def grid_line_search_update(self, loss_function: callable, f_nat_grad: torch.Tensor) -> None: + """ Update models paramters by natural gradient. + + Args: + loss (callable): function to calculate loss. + + Returns: + None. + """ + # function to update models paramters at each step + def loss_at_step(step, loss_function: callable, f_nat_grad: torch.Tensor) -> torch.Tensor: + params = parameters_to_vector(self.params) + new_params = params - step * f_nat_grad + vector_to_parameters(new_params, self.params) + loss_val, _ = loss_function() + vector_to_parameters(params, self.params) + return loss_val + + losses = [] + for step in self.steps: + losses.append(loss_at_step(step, loss_function, f_nat_grad).reshape(1)) + losses = torch.cat(losses) + step_size = self.steps[torch.argmin(losses)] + + params = parameters_to_vector(self.params) + new_params = params - step_size * f_nat_grad + vector_to_parameters(new_params, self.params) + + def gram_factory(self, residuals: torch.Tensor) -> torch.Tensor: + """ Make Gram matrice. + + Args: + residuals (callable): PDE residual. + + Returns: + torch.Tensor: Gram matrice. + """ + # Make Gram matrice. + def jacobian() -> torch.Tensor: + jac = [] + for l in residuals: + j = torch.autograd.grad(l, self.params, retain_graph=True, allow_unused=True) + j = replace_none_by_zero(j) + j = parameters_to_vector(j).reshape(1, -1) + jac.append(j) + return torch.cat(jac) + + J = jacobian() + return 1.0 / len(residuals) * J.T @ J + + + def gram_factory_cpu(self, residuals: torch.Tensor) -> torch.Tensor: + """ Make Gram matrice. + + Args: + residuals (callable): PDE residual. + + Returns: + torch.Tensor: Gram matrice. + """ + # Make Gram matrice. + def jacobian() -> torch.Tensor: + jac = [] + for l in residuals: + j = torch.autograd.grad(l, self.params, retain_graph=True, allow_unused=True) + j = replace_none_by_zero(j) + j = parameters_to_vector(j).reshape(1, -1) + jac.append(j) + return torch.cat(jac) + + J = jacobian().cpu() + return 1.0 / len(residuals) * J.T @ J + + + + def torch_cuda_lstsq(self, A: torch.Tensor, B: torch.Tensor, tol: float = None) -> torch.Tensor: + """ Find lstsq (least-squares solution) for torch.tensor cuda. + + Args: + A (torch.Tensor): lhs tensor of shape (*, m, n) where * is zero or more batch dimensions. + B (torch.Tensor): rhs tensor of shape (*, m, k) where * is zero or more batch dimensions. + tol (float): used to determine the effective rank of A. By default set to the machine precision of the dtype of A. + + Returns: + torch.Tensor: solution for A and B. + """ + tol = torch.finfo(A.dtype).eps if tol is None else tol + U, S, Vh = torch.linalg.svd(A, full_matrices=False) + Spinv = torch.zeros_like(S) + Spinv[S>tol] = 1/S[S>tol] + UhB = U.adjoint() @ B + if Spinv.ndim!=UhB.ndim: + Spinv = Spinv.unsqueeze(-1) + SpinvUhB = Spinv * UhB + return Vh.adjoint() @ SpinvUhB + + + + def numpy_lstsq(self, A: torch.Tensor, B: torch.Tensor, rcond: float = None) -> torch.Tensor: + + A = A.detach().cpu().numpy() + B = B.detach().cpu().numpy() + + f_nat_grad = np.linalg.lstsq(A, B,rcond=rcond)[0] + + f_nat_grad=torch.from_numpy(f_nat_grad) + + f_nat_grad = check_device(f_nat_grad) + + return f_nat_grad + + + def step(self, closure=None) -> torch.Tensor: + """ It runs ONE step on the natural gradient descent. + + Returns: + torch.Tensor: loss value for NGD step. + """ + + int_res, bval, true_bval, loss, loss_function = closure() + grads = torch.autograd.grad(loss, self.params, retain_graph=True, allow_unused=True) + grads = replace_none_by_zero(grads) + f_grads = parameters_to_vector(grads) + + bound_res = bval-true_bval + + ## assemble gramian + #G_int = self.gram_factory(int_res.reshape(-1)) + #G_bdry = self.gram_factory(bound_res.reshape(-1)) + #G = G_int + G_bdry + + ## Marquardt-Levenberg + #Id = torch.eye(len(G)) + #G = torch.min(torch.tensor([loss, 0.0])) * Id + G + + + + # compute natural gradient + if not self.cuda_out_of_memory_flag: + try: + if self.cuda_empty_once_for_test: + #print('Initial GPU check') + torch.cuda.empty_cache() + self.cuda_empty_once_for_test=False + + # assemble gramian + + #print('NGD GPU step') + + G_int = self.gram_factory(int_res.reshape(-1)) + G_bdry = self.gram_factory(bound_res.reshape(-1)) + G = G_int + G_bdry + + # Marquardt-Levenberg + Id = torch.eye(len(G)) + G = torch.min(torch.tensor([loss, 0.0])) * Id + G + + f_nat_grad = self.torch_cuda_lstsq(G, f_grads) + except torch.OutOfMemoryError: + print('[Warning] Least square returned CUDA out of memory error, CPU and RAM are used, which is significantly slower') + self.cuda_out_of_memory_flag=True + + G_int = self.gram_factory_cpu(int_res.reshape(-1).cpu()) + G_bdry = self.gram_factory_cpu(bound_res.reshape(-1).cpu()) + G = G_int + G_bdry + + + f_nat_grad = self.numpy_lstsq(G, f_grads) + else: + + + #print('NGD CPU step') + + G_int = self.gram_factory_cpu(int_res.reshape(-1).cpu()) + G_bdry = self.gram_factory_cpu(bound_res.reshape(-1).cpu()) + G = G_int + G_bdry + + f_nat_grad = self.numpy_lstsq(G, f_grads) + + # one step of NGD + self.grid_line_search_update(loss_function, f_nat_grad) + self.param_groups[0]['params'] = self.params + + return loss \ No newline at end of file diff --git a/epde/solver/optimizers/nys_newton_cg.py b/epde/solver/optimizers/nys_newton_cg.py new file mode 100644 index 0000000..d6d7e81 --- /dev/null +++ b/epde/solver/optimizers/nys_newton_cg.py @@ -0,0 +1,261 @@ +import torch +from torch.optim import Optimizer +from torch.func import vmap +from functools import reduce +from torch.nn.utils import parameters_to_vector + +def _armijo(f, x, gx, dx, t, alpha=0.1, beta=0.5): + """Line search to find a step size that satisfies the Armijo condition.""" + f0 = f(x, 0, dx) + f1 = f(x, t, dx) + while f1 > f0 + alpha * t * gx.dot(dx): + t *= beta + f1 = f(x, t, dx) + return t + +def _apply_nys_precond_inv(U, S_mu_inv, mu, lambd_r, x): + """Applies the inverse of the Nystrom approximation of the Hessian to a vector.""" + z = U.T @ x + z = (lambd_r + mu) * (U @ (S_mu_inv * z)) + (x - U @ z) + return z + +def _nystrom_pcg(hess, b, x, mu, U, S, r, tol, max_iters): + """Solves a positive-definite linear system using NyströmPCG. + + `Frangella et al. Randomized Nyström Preconditioning. + SIAM Journal on Matrix Analysis and Applications, 2023. + `""" + lambd_r = S[r - 1] + S_mu_inv = (S + mu) ** (-1) + + resid = b - (hess(x) + mu * x) + with torch.no_grad(): + z = _apply_nys_precond_inv(U, S_mu_inv, mu, lambd_r, resid) + p = z.clone() + + i = 0 + + while torch.norm(resid) > tol and i < max_iters: + v = hess(p) + mu * p + with torch.no_grad(): + alpha = torch.dot(resid, z) / torch.dot(p, v) + x += alpha * p + + rTz = torch.dot(resid, z) + resid -= alpha * v + z = _apply_nys_precond_inv(U, S_mu_inv, mu, lambd_r, resid) + beta = torch.dot(resid, z) / rTz + + p = z + beta * p + + i += 1 + + if torch.norm(resid) > tol: + print(f"Warning: PCG did not converge to tolerance. Tolerance was {tol} but norm of residual is {torch.norm(resid)}") + + return x + +class NysNewtonCG(Optimizer): + """Implementation of NysNewtonCG, a damped Newton-CG method that uses Nyström preconditioning. + + `Rathore et al. Challenges in Training PINNs: A Loss Landscape Perspective. + Preprint, 2024. ` + + .. warning:: + This optimizer doesn't support per-parameter options and parameter + groups (there can be only one). + + NOTE: This optimizer is currently a beta version. + + Our implementation is inspired by the PyTorch implementation of `L-BFGS + `. + + The parameters rank and mu will probably need to be tuned for your specific problem. + + If the optimizer is running very slowly, you can try one of the following: + - Increase the rank (this should increase the accuracy of the Nyström approximation in PCG) + - Reduce cg_tol (this will allow PCG to terminate with a less accurate solution) + - Reduce cg_max_iters (this will allow PCG to terminate after fewer iterations) + + Args: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 1.0) + rank (int, optional): rank of the Nyström approximation (default: 10) + mu (float, optional): damping parameter (default: 1e-4) + chunk_size (int, optional): number of Hessian-vector products to be computed in parallel (default: 1) + cg_tol (float, optional): tolerance for PCG (default: 1e-16) + cg_max_iters (int, optional): maximum number of PCG iterations (default: 1000) + line_search_fn (str, optional): either 'armijo' or None (default: None) + verbose (bool, optional): verbosity (default: False) + + """ + def __init__(self, params, lr=1.0, rank=10, mu=1e-4, chunk_size=1, + cg_tol=1e-16, cg_max_iters=1000, line_search_fn=None, verbose=False): + defaults = dict(lr=lr, rank=rank, chunk_size=chunk_size, mu=mu, cg_tol=cg_tol, + cg_max_iters=cg_max_iters, line_search_fn=line_search_fn) + self.rank = rank + self.mu = mu + self.chunk_size = chunk_size + self.cg_tol = cg_tol + self.cg_max_iters = cg_max_iters + self.line_search_fn = line_search_fn + self.verbose = verbose + self.U = None + self.S = None + self.n_iters = 0 + super(NysNewtonCG, self).__init__(params, defaults) + + if len(self.param_groups) > 1: + raise ValueError( + "NysNewtonCG doesn't currently support per-parameter options (parameter groups)") + if self.line_search_fn is not None and self.line_search_fn != 'armijo': + raise ValueError("NysNewtonCG only supports Armijo line search") + self._params = self.param_groups[0]['params'] + self._params_list = list(self._params) + self._numel_cache = None + + def gradient(self, loss: torch.Tensor) -> torch.Tensor: + """ Calculation of loss gradient by model parameters (NN, autograd) + or model values (mat). + Args: + loss (torch.Tensor): result of loss calculation. + Returns: + torch.Tensor: calculated gradient vector. + """ + dl_dparam = torch.autograd.grad(loss, self._params, create_graph=True) + grads = parameters_to_vector(dl_dparam) + return grads + + def step(self, closure=None): + """Perform a single optimization step. + Args: + closure (callable, optional): A closure that reevaluates the model and returns (i) the loss and (ii) gradient w.r.t. the parameters. + The closure can compute the gradient w.r.t. the parameters by calling torch.autograd.grad on the loss with create_graph=True. + """ + if self.n_iters == 0: + # Store the previous direction for warm starting PCG + self.old_dir = torch.zeros( + self._numel(), device=self._params[0].device) + # NOTE: The closure must return both the loss and the gradient + loss = None + if closure is not None: + with torch.enable_grad(): + loss, grad_tuple = closure() + g = torch.cat([grad.view(-1) for grad in grad_tuple if grad is not None]) + # One step update + for group_idx, group in enumerate(self.param_groups): + def hvp_temp(x): + return self._hvp(g, self._params_list, x) + # Calculate the Newton direction + d = _nystrom_pcg(hvp_temp, g, self.old_dir, + self.mu, self.U, self.S, self.rank, self.cg_tol, self.cg_max_iters) + # Store the previous direction for warm starting PCG + self.old_dir = d + # Check if d is a descent direction + if torch.dot(d, g) <= 0: + print("Warning: d is not a descent direction") + + if self.line_search_fn == 'armijo': + x_init = self._clone_param() + def obj_func(x, t, dx): + self._add_grad(t, dx) + loss = float(closure()[0]) + self._set_param(x) + return loss + # Use -d for convention + t = _armijo(obj_func, x_init, g, -d, group['lr']) + else: + t = group['lr'] + self.state[group_idx]['t'] = t + # update parameters + ls = 0 + for p in group['params']: + np = torch.numel(p) + dp = d[ls:ls+np].view(p.shape) + ls += np + p.data.add_(-dp, alpha=t) + self.n_iters += 1 + return loss, g + + def update_preconditioner(self, grad_tuple): + """Update the Nystrom approximation of the Hessian. + Args: + grad_tuple (tuple): tuple of Tensors containing the gradients of the loss w.r.t. the parameters. + This tuple can be obtained by calling torch.autograd.grad on the loss with create_graph=True. + """ + # Flatten and concatenate the gradients + gradsH = torch.cat([gradient.view(-1) + for gradient in grad_tuple if gradient is not None]) + # Generate test matrix (NOTE: This is transposed test matrix) + p = gradsH.shape[0] + Phi = torch.randn( + (self.rank, p), device=gradsH.device) / (p ** 0.5) + Phi = torch.linalg.qr(Phi.t(), mode='reduced')[0].t() + Y = self._hvp_vmap(gradsH, self._params_list)(Phi) + # Calculate shift + shift = torch.finfo(Y.dtype).eps + Y_shifted = Y + shift * Phi + # Calculate Phi^T * H * Phi (w/ shift) for Cholesky + choleskytarget = torch.mm(Y_shifted, Phi.t()) + # Perform Cholesky, if fails, do eigendecomposition + # The new shift is the abs of smallest eigenvalue (negative) plus the original shift + try: + C = torch.linalg.cholesky(choleskytarget) + except torch._C._LinAlgError: + # eigendecomposition, eigenvalues and eigenvector matrix + eigs, eigvectors = torch.linalg.eigh(choleskytarget) + shift = shift + torch.abs(torch.min(eigs)) + # add shift to eigenvalues + eigs = eigs + shift + # put back the matrix for Cholesky by eigenvector * eigenvalues after shift * eigenvector^T + C = torch.linalg.cholesky( + torch.mm(eigvectors, torch.mm(torch.diag(eigs), eigvectors.T))) + try: + B = torch.linalg.solve_triangular( + C, Y_shifted, upper=False, left=True) + # temporary fix for issue @ https://github.com/pytorch/pytorch/issues/97211 + except: + B = torch.linalg.solve_triangular(C.to('cpu'), Y_shifted.to( + 'cpu'), upper=False, left=True).to(C.device) + + # B = V * S * U^T b/c we have been using transposed sketch + _, S, UT = torch.linalg.svd(B, full_matrices=False) + self.U = UT.t() + self.S = torch.max(torch.square(S) - shift, torch.tensor(0.0)) + self.rho = self.S[-1] + if self.verbose: + print(f'Approximate eigenvalues = {self.S}') + + def _hvp_vmap(self, grad_params, params): + return vmap(lambda v: self._hvp(grad_params, params, v), in_dims=0, chunk_size=self.chunk_size) + + def _hvp(self, grad_params, params, v): + Hv = torch.autograd.grad(grad_params, params, grad_outputs=v, + retain_graph=True) + Hv = tuple(Hvi.detach() for Hvi in Hv) + return torch.cat([Hvi.reshape(-1) for Hvi in Hv]) + + def _numel(self): + if self._numel_cache is None: + self._numel_cache = reduce( + lambda total, p: total + p.numel(), self._params, 0) + return self._numel_cache + + def _add_grad(self, step_size, update): + offset = 0 + for p in self._params: + numel = p.numel() + # Avoid in-place operation by creating a new tensor + p.data = p.data.add( + update[offset:offset + numel].view_as(p), alpha=step_size) + offset += numel + assert offset == self._numel() + + def _clone_param(self): + return [p.clone(memory_format=torch.contiguous_format) for p in self._params] + + def _set_param(self, params_data): + for p, pdata in zip(self._params, params_data): + # Replace the .data attribute of the tensor + p.data = pdata.data \ No newline at end of file diff --git a/epde/solver/optimizers/optimizer.py b/epde/solver/optimizers/optimizer.py new file mode 100644 index 0000000..07e1a7f --- /dev/null +++ b/epde/solver/optimizers/optimizer.py @@ -0,0 +1,61 @@ +import torch +from abc import ABC +from typing import Union, Any +from epde.solver.optimizers.pso import PSO +from epde.solver.optimizers.ngd import NGD +from epde.solver.optimizers.nys_newton_cg import NysNewtonCG +from torch.optim.lr_scheduler import ExponentialLR + + +class Optimizer(): + def __init__( + self, + optimizer: str, + params: dict, + gamma: Union[float, None]=None, + decay_every: Union[int, None]=None): + self.optimizer = optimizer + self.params = params + self.gamma = gamma + self.decay_every = decay_every + + def optimizer_choice( + self, + mode, + model) -> \ + Union[torch.optim.Adam, torch.optim.SGD, torch.optim.LBFGS, PSO]: + """ Setting optimizer. If optimizer is string type, it will get default settings, + or it may be custom optimizer defined by user. + + Args: + optimizer: optimizer choice (Adam, SGD, LBFGS, PSO). + learning_rate: determines the step size at each iteration + while moving toward a minimum of a loss function. + + Returns: + optimzer: ready optimizer. + """ + + if self.optimizer == 'Adam': + torch_optim = torch.optim.Adam + elif self.optimizer == 'SGD': + torch_optim = torch.optim.SGD + elif self.optimizer == 'LBFGS': + torch_optim = torch.optim.LBFGS + elif self.optimizer == 'PSO': + torch_optim = PSO + elif self.optimizer == 'NGD': + torch_optim = NGD + elif self.optimizer == 'NNCG': + torch_optim = NysNewtonCG + + + if mode in ('NN', 'autograd'): + optimizer = torch_optim(model.parameters(), **self.params) + elif mode == 'mat': + optimizer = torch_optim([model.requires_grad_()], **self.params) + + if self.gamma is not None: + self.scheduler = ExponentialLR(optimizer, gamma=self.gamma) + + return optimizer \ No newline at end of file diff --git a/epde/solver/optimizers/pso.py b/epde/solver/optimizers/pso.py new file mode 100644 index 0000000..b0300e4 --- /dev/null +++ b/epde/solver/optimizers/pso.py @@ -0,0 +1,223 @@ +from typing import Tuple +import torch +from copy import copy +import numpy as np +from torch.nn.utils import parameters_to_vector, vector_to_parameters +from epde.solver.device import device_type + + +class PSO(torch.optim.Optimizer): + + """Custom PSO optimizer. + """ + + def __init__(self, + params, + pop_size: int = 30, + b: float = 0.9, + c1: float = 8e-2, + c2: float = 5e-1, + lr: float = 1e-3, + betas: Tuple = (0.99, 0.999), + c_decrease: bool = False, + variance: float = 1, + epsilon: float = 1e-8, + n_iter: int = 2000): + """The Particle Swarm Optimizer class. + + Args: + pop_size (int, optional): Population of the PSO swarm. Defaults to 30. + b (float, optional): Inertia of the particles. Defaults to 0.99. + c1 (float, optional): The *p-best* coeficient. Defaults to 0.08. + c2 (float, optional): The *g-best* coeficient. Defaults to 0.5. + lr (float, optional): Learning rate for gradient descent. Defaults to 0.00, + so there will not be any gradient-based optimization. + betas (tuple(float, float), optional): same coeff in Adam algorithm. Defaults to (0.99, 0.999). + c_decrease (bool, optional): Flag for update_pso_params method. Defautls to False. + variance (float, optional): Variance parameter for swarm creation + based on model. Defaults to 1. + epsilon (float, optional): some add to gradient descent like in Adam optimizer. + Defaults to 1e-8. + """ + defaults = {'pop_size': pop_size, + 'b': b, 'c1': c1, 'c2': c2, + 'lr': lr, 'betas': betas, + 'c_decrease': c_decrease, + 'variance': variance, + 'epsilon': epsilon} + super(PSO, self).__init__(params, defaults) + self.params = self.param_groups[0]['params'] + self.pop_size = pop_size + self.b = b + self.c1 = c1 + self.c2 = c2 + self.c_decrease = c_decrease + self.epsilon = epsilon + self.beta1, self.beta2 = betas + self.lr = lr * np.sqrt(1 - self.beta2) / (1 - self.beta1) + self.use_grad = True if self.lr != 0 else False + self.variance = variance + self.name = "PSO" + self.n_iter = n_iter + + vec_shape = self.params_to_vec().shape + self.vec_shape = list(vec_shape)[0] + + self.swarm = self.build_swarm() + + self.p = copy(self.swarm).detach() + + self.v = self.start_velocities() + self.m1 = torch.zeros(self.pop_size, self.vec_shape) + self.m2 = torch.zeros(self.pop_size, self.vec_shape) + + self.indicator = True + + def params_to_vec(self) -> torch.Tensor: + """ Method for converting model parameters *NN and autograd* + or model values *mat* to vector. + + Returns: + torch.Tensor: model parameters/model values vector. + """ + if not isinstance(self.params, torch.Tensor): + vec = parameters_to_vector(self.params) + else: + self.model_shape = self.params.shape + vec = self.params.reshape(-1) + + return vec + + def vec_to_params(self, vec: torch.Tensor) -> None: + """Method for converting vector to model parameters (NN, autograd) + or model values (mat) + + Args: + vec (torch.Tensor): The particle of swarm. + """ + if not isinstance(self.params, torch.Tensor): + vector_to_parameters(vec, self.params) + else: + self.params.data = vec.reshape(self.params).data + + def build_swarm(self): + """Creates the swarm based on solution class model. + + Returns: + torch.Tensor: The PSO swarm population. + Each particle represents a neural network (NN, autograd) or model values (mat). + """ + vector = self.params_to_vec() + matrix = [] + for _ in range(self.pop_size): + matrix.append(vector.reshape(1, -1)) + matrix = torch.cat(matrix) + variance = torch.FloatTensor(self.pop_size, self.vec_shape).uniform_( + -self.variance, self.variance).to(device_type()) + swarm = matrix + variance + swarm[0] = matrix[0] + return swarm.clone().detach().requires_grad_(True) + + def update_pso_params(self) -> None: + """Method for updating pso parameters if c_decrease=True. + """ + self.c1 -= 2 * self.c1 / self.n_iter + self.c2 += self.c2 / self.n_iter + + def start_velocities(self) -> torch.Tensor: + """Start the velocities of each particle in the population (swarm) as `0`. + + Returns: + torch.Tensor: The starting velocities. + """ + return torch.zeros((self.pop_size, self.vec_shape)) + + def gradient(self, loss: torch.Tensor) -> torch.Tensor: + """ Calculation of loss gradient by model parameters (NN, autograd) + or model values (mat). + + Args: + loss (torch.Tensor): result of loss calculation. + + Returns: + torch.Tensor: calculated gradient vector. + """ + dl_dparam = torch.autograd.grad(loss, self.params) + + grads = parameters_to_vector(dl_dparam) + + return grads + + def get_randoms(self) -> torch.Tensor: + """Generate random values to update the particles' positions. + + Returns: + torch.Tensor: random tensor + """ + return torch.rand((2, 1, self.vec_shape)) + + def update_p_best(self) -> None: + """Updates the *p-best* positions.""" + + idx = torch.where(self.loss_swarm < self.f_p) + + self.p[idx] = self.swarm[idx] + self.f_p[idx] = self.loss_swarm[idx].detach() + + def update_g_best(self) -> None: + """Update the *g-best* position.""" + self.g_best = self.p[torch.argmin(self.f_p)] + + def gradient_descent(self) -> torch.Tensor: + """ Gradiend descent based on Adam algorithm. + + Returns: + torch.Tensor: gradient term in velocities vector. + """ + self.m1 = self.beta1 * self.m1 + (1 - self.beta1) * self.grads_swarm + self.m2 = self.beta2 * self.m2 + (1 - self.beta2) * torch.square( + self.grads_swarm) + + update = self.lr * self.m1 / (torch.sqrt(torch.abs(self.m2)) + self.epsilon) + + return update + + def step(self, closure=None) -> torch.Tensor: + """ It runs ONE step on the particle swarm optimization. + + Returns: + torch.Tensor: loss value for best particle of thw swarm. + """ + + self.loss_swarm, self.grads_swarm = closure() + + fix_attempt=0 + + while torch.any(self.loss_swarm!=self.loss_swarm): + self.swarm=self.swarm+0.001*torch.rand(size=self.swarm.shape) + self.loss_swarm, self.grads_swarm = closure() + fix_attempt+=1 + if fix_attempt>5: + break + + if self.indicator: + self.f_p = copy(self.loss_swarm).detach() + self.g_best = self.p[torch.argmin(self.f_p)] + self.indicator = False + + r1, r2 = self.get_randoms() + + self.v = self.b * self.v + (1 - self.b) * ( + self.c1 * r1 * (self.p - self.swarm) + self.c2 * r2 * (self.g_best - self.swarm)) + if self.use_grad: + self.swarm = self.swarm + self.v - self.gradient_descent() + else: + self.swarm = self.swarm + self.v + self.update_p_best() + self.update_g_best() + self.vec_to_params(self.g_best) + if self.c_decrease: + self.update_pso_params() + min_loss = torch.min(self.f_p) + + return min_loss diff --git a/epde/solver/points_type.py b/epde/solver/points_type.py index 6de1eae..0684677 100644 --- a/epde/solver/points_type.py +++ b/epde/solver/points_type.py @@ -1,67 +1,73 @@ -import numpy as np -import torch +"""Module for determine types of grid points. Only for *NN* mode.""" + + from typing import Union from scipy.spatial import Delaunay - +import numpy as np +import torch class Points_type(): """ Discretizing the grid and allocating subsets for Finite Difference method. """ - def __init__(self, grid): + def __init__(self, grid: torch.Tensor): + """ + Args: + grid (torch.Tensor): discretization points of comp-l domain. + """ + self.grid = grid @staticmethod def shift_points(grid: torch.Tensor, axis: int, shift: float) -> torch.Tensor: - """ - Shifts all values of an array 'grid' on a value 'shift' in a direction of + """ Shifts all values of an array 'grid' on a value 'shift' in a direction of axis 'axis', somewhat is equivalent to a np.roll. + Args: - grid: array of a n-D points. - axis: axis to which the shift is applied. - shift: shift value. + grid (torch.Tensor): discretization of comp-l domain. + axis (int): axis to which the shift is applied. + shift (float): shift value. + Returns: - shifted array of a n-D points. + torch.Tensor: shifted array of a n-D points. """ + grid_shift = grid.clone() grid_shift[:, axis] = grid[:, axis] + shift return grid_shift @staticmethod - def in_hull(p: torch.Tensor, hull: torch.Tensor) -> np.ndarray: - """ - Test if points in `p` are in `hull` + def _in_hull(p: torch.Tensor, hull: torch.Tensor) -> np.ndarray: + """ Test if points in `p` are in `hull` `p` should be a `NxK` coordinates of `N` points in `K` dimensions `hull` is either a scipy.spatial.Delaunay object or the `MxK` array of the coordinates of `M` points in `K`dimensions for which Delaunay triangulation will be computed. Args: - p: shifted array of a n-D points. - hull: initial array of a n-D points. + p (torch.Tensor): shifted array of a n-D points. + hull (torch.Tensor): initial array of a n-D points. Returns: - array of a n-D boolean type points. True - if 'p' in 'hull', False - otherwise. + np.ndarray: array of a n-D boolean type points. + True - if 'p' in 'hull', False - otherwise. """ + if p.shape[1] > 1: if not isinstance(hull, Delaunay): hull = Delaunay(hull.cpu()) return hull.find_simplex(p.cpu()) >= 0 elif p.shape[1] == 1: - """ - this one is not a snippet from a stackexchange it does the same - but for a 1-D case, which is not covered in a code above - """ + # this one is not a snippet from a stackexchange it does the same + # but for a 1-D case, which is not covered in a code above upbound = torch.max(hull).cpu() lowbound = torch.min(hull).cpu() return np.array(((p.cpu() <= upbound) & (p.cpu() >= lowbound)).reshape(-1)) def point_typization(self) -> dict: - """ - Allocating subsets for FD (i.e., 'f', 'b', 'central'). - Args: - grid: array of a n-D points. + """ Allocating subsets for FD (i.e., 'f', 'b', 'central'). + Returns: - type with a points in a 'grid' above. Type may be 'central' - inner point + dict: type with a points in a 'grid' above. Type may be 'central' - inner point and string of 'f' and 'b', where the length of the string is a dimension n. 'f' means that if we add small number to a position of corresponding coordinate we stay in the 'hull'. 'b' means that if we subtract small number from o a position of corresponding coordinate we stay in the 'hull'. @@ -71,7 +77,7 @@ def point_typization(self) -> dict: for axis in range(self.grid.shape[1]): for direction in range(2): direction_list.append( - Points_type.in_hull(Points_type.shift_points( + Points_type._in_hull(Points_type.shift_points( self.grid, axis, (-1) ** direction * 0.0001), self.grid)) direction_list = np.array(direction_list) @@ -100,13 +106,12 @@ def point_typization(self) -> dict: return point_type def grid_sort(self) -> dict: - """ - Sorting grid points for each subset from result Points_type.point_typization. - Args: - grid: array of a n-D points. + """ Sorting grid points for each subset from result Points_type.point_typization. + Returns: - sorted grid in each subset (see Points_type.point_typization). + dict: sorted grid in each subset (see Points_type.point_typization). """ + point_type = self.point_typization() point_types = set(point_type.values()) grid_dict = {} @@ -119,16 +124,19 @@ def grid_sort(self) -> dict: grid_dict[p_type] = torch.stack(grid_dict[p_type]) return grid_dict - def bnd_sort(self, grid_dict: dict, b_coord: Union[torch.Tensor, list]): - """ - Sorting boundary points + def bnd_sort(self, grid_dict: dict, b_coord: Union[torch.Tensor, list]) -> list: + """ Sorting boundary points + Args: - grid_dict: array of a n-D points. - b_coord: boundary points of grid. It will be list if periodic condition is. + grid_dict (dict): _description_ + b_coord (Union[torch.Tensor, list]): boundary points of grid. + It will be list if periodic condition is. + Returns: - bnd_dict is similar to grid_dict but with b_coord values. It - will be list of 'bnd_dict's if 'b_coord' is list too. + list: bnd_dict is similar to grid_dict but with b_coord values. It + will be list of 'bnd_dict's if 'b_coord' is list too. """ + def bnd_to_dict(grid_dict, b_coord): bnd_dict = {} for k, v in grid_dict.items(): @@ -142,7 +150,7 @@ def bnd_to_dict(grid_dict, b_coord): bnd_dict[k] = torch.stack(bnd_dict[k]) return bnd_dict - if type(b_coord) == list: + if isinstance(b_coord, list): bnd_dict_list = [bnd_to_dict(grid_dict, bnd) for bnd in b_coord] return bnd_dict_list else: diff --git a/epde/solver/solution.py b/epde/solver/solution.py index 6e7fa0a..50d2701 100644 --- a/epde/solver/solution.py +++ b/epde/solver/solution.py @@ -1,17 +1,17 @@ +"""Module for connecting *eval.py*, *losses.py*""" + from __future__ import annotations +from copy import deepcopy +from typing import Tuple, Union, Any, List import torch -import numpy as np -from copy import copy, deepcopy -from typing import Tuple, Union -from epde.solver.points_type import Points_type from epde.solver.derivative import Derivative +from epde.solver.points_type import Points_type from epde.solver.eval import Operator, Bounds from epde.solver.losses import Losses from epde.solver.device import device_type, check_device from epde.solver.input_preprocessing import lambda_prepare, Equation_NN, Equation_mat, Equation_autograd -from epde.solver.utils import * flatten_list = lambda t: [item for sublist in t for item in sublist] @@ -20,115 +20,151 @@ class Solution(): """ class for different loss functions calculation. """ - def __init__(self, grid: torch.Tensor, equal_cls: Union[Equation_NN, Equation_mat, Equation_autograd], - model: Union[torch.nn.Sequential, torch.Tensor], mode: str, weak_form: Union[None, list[callable]], - lambda_operator, lambda_bound, tol: float = 0, derivative_points: int = 2): + def __init__( + self, + grid: torch.Tensor, + equal_cls: Union[Equation_NN, Equation_mat, Equation_autograd], + model: Union[torch.nn.Sequential, torch.Tensor], + mode: str, + weak_form: Union[None, List[callable]], + lambda_operator, + lambda_bound, + tol: float = 0, + derivative_points: int = 2, + batch_size: int = None): + """ + Args: + grid (torch.Tensor): discretization of comp-l domain. + equal_cls (Union[Equation_NN, Equation_mat, Equation_autograd]): Equation_{NN, mat, autograd} object. + model (Union[torch.nn.Sequential, torch.Tensor]): model of *mat or NN or autograd* mode. + mode (str): *mat or NN or autograd* + weak_form (Union[None, List[callable]]): list with basis functions, if the form is *weak*. + lambda_operator (_type_): regularization parameter for operator term in loss. + lambda_bound (_type_): regularization parameter for boundary term in loss. + tol (float, optional): penalty in *casual loss*. Defaults to 0. + derivative_points (int, optional): points number for derivative calculation. + batch_size (int): size of batch. + For details to Derivative_mat class.. Defaults to 2. + """ self.grid = check_device(grid) + # print(f'self.grid.get_device {self.grid.get_device()} device_type() {device_type()}') if mode == 'NN': sorted_grid = Points_type(self.grid).grid_sort() self.n_t = len(sorted_grid['central'][:, 0].unique()) + self.n_t_operation = lambda sorted_grid: len(sorted_grid['central'][:, 0].unique()) elif mode == 'autograd': self.n_t = len(self.grid[:, 0].unique()) + self.n_t_operation = lambda grid: len(grid[:, 0].unique()) elif mode == 'mat': self.n_t = grid.shape[1] + self.n_t_operation = lambda grid: grid.shape[1] + equal_copy = deepcopy(equal_cls) prepared_operator = equal_copy.operator_prepare() - self.operator_coeff(equal_cls, prepared_operator) - prepared_bconds = equal_copy.bnd_prepare() + self._operator_coeff(equal_cls, prepared_operator) + self.prepared_bconds = equal_copy.bnd_prepare() self.model = model.to(device_type()) self.mode = mode self.weak_form = weak_form self.lambda_operator = lambda_operator self.lambda_bound = lambda_bound self.tol = tol - + self.derivative_points = derivative_points + self.batch_size = batch_size + if self.batch_size is None: + self.n_t_operation = None + self.operator = Operator(self.grid, prepared_operator, self.model, - self.mode, weak_form, derivative_points) - self.boundary = Bounds(self.grid, prepared_bconds, self.model, + self.mode, weak_form, derivative_points, + self.batch_size) + self.boundary = Bounds(self.grid,self.prepared_bconds, self.model, self.mode, weak_form, derivative_points) - self.loss_cls = Losses(self.mode, self.weak_form, self.n_t, self.tol) - self.eps = 0 + self.loss_cls = Losses(self.mode, self.weak_form, self.n_t, self.tol, + self.n_t_operation) # n_t calculate for each batch self.op_list = [] self.bval_list = [] self.loss_list = [] @staticmethod - def operator_coeff(equal_cls, operator): - for i in range(len(operator)): - eq = operator[i] + def _operator_coeff(equal_cls: Any, operator: list): + """ Coefficient checking in operator. + + Args: + equal_cls (Any): Equation_{NN, mat, autograd} object. + operator (list): prepared operator (result of operator_prepare()) + """ + for i, _ in enumerate(equal_cls.operator): + eq = equal_cls.operator[i] for key in eq.keys(): - if isinstance(eq[key]['coeff'], torch.Tensor): + if isinstance(eq[key]['coeff'], torch.nn.Parameter): + try: + operator[i][key]['coeff'] = eq[key]['coeff'].to(device_type()) + except: + operator[key]['coeff'] = eq[key]['coeff'].to(device_type()) + elif isinstance(eq[key]['coeff'], torch.Tensor): eq[key]['coeff'] = eq[key]['coeff'].to(device_type()) -# try: -# eq[key]['coeff'] = equal_cls.operator[i][key]['coeff'].to(device_type()) -# except: -# eq[key]['coeff'] = equal_cls.operator[key]['coeff'].to(device_type()) + def _model_change(self, new_model: torch.nn.Module) -> None: + """Change self.model for class and *operator, boundary* object. + It should be used in cache_lookup and cache_retrain method. + + Args: + new_model (torch.nn.Module): new self model. + """ + self.model = new_model + self.operator.model = new_model + self.operator.derivative = Derivative(new_model, self.derivative_points).set_strategy( + self.mode).take_derivative + self.boundary.model = new_model + self.boundary.operator = Operator(self.grid, + self.prepared_bconds, + new_model, + self.mode, + self.weak_form, + self.derivative_points, + self.batch_size) def evaluate(self, - second_order_interactions: bool = True, - sampling_N: int = 1, - lambda_update: bool = False, save_graph: bool = True) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Computes loss. + """ Computes loss. Args: - second_order_interactions: optimizer iteration (serves only for computing adaptive lambdas). - sampling_N: parameter for accumulation of solutions (op, bcs). The more sampling_N, the more accurate the estimation of the variance. - lambda_update: update lambda or not. - tol: float constant, influences on error penalty. - save_graph: boolean constant, responsible for saving the computational graph. + second_order_interactions (bool, optional): optimizer iteration + (serves only for computing adaptive lambdas). Defaults to True. + sampling_N (int, optional): parameter for accumulation of + solutions (op, bcs). The more sampling_N, the more accurate the + estimation of the variance (only for computing adaptive lambdas). Defaults to 1. + lambda_update (bool, optional): update lambda or not. Defaults to False. + save_graph (bool, optional): responsible for saving the computational graph. Defaults to True. Returns: - loss + Tuple[torch.Tensor, torch.Tensor]: loss """ - op = self.operator.operator_compute() - bval, true_bval, bval_keys, bval_length = self.boundary.apply_bcs() - - self.lambda_operator = lambda_prepare(op, self.lambda_operator) - self.lambda_bound = lambda_prepare(bval, self.lambda_bound) - - loss, loss_normalized = self.loss_cls.compute(op, bval, true_bval, - self.lambda_operator, - self.lambda_bound, - save_graph) - - if lambda_update: - # TODO refactor this lambda thing to class or function. - bcs = bcs_reshape(bval, true_bval, bval_length) - op_length = [op.shape[0]]*op.shape[-1] - - self.op_list.append(torch.t(op).reshape(-1).cpu().detach().numpy()) - self.bval_list.append(bcs.cpu().detach().numpy()) - self.loss_list.append(float(loss_normalized.item())) - - sampling_amount, sampling_D = samples_count( - second_order_interactions = second_order_interactions, - sampling_N = sampling_N, - op_length=op_length, - bval_length = bval_length) - - if len(self.op_list) == sampling_amount: - self.lambda_operator, self.lambda_bound = Lambda( - self.op_list, self.bval_list, - self.loss_list, - second_order_interactions)\ - .update(op_length=op_length, - bval_length=bval_length, - sampling_D=sampling_D) - self.op_list.clear() - self.bval_list.clear() - self.loss_list.clear() - - oper_keys = [f'eq_{i}' for i in range(len(op_length))] - lambda_print(self.lambda_operator, oper_keys) - lambda_print(self.lambda_bound, bval_keys) - - - return loss, loss_normalized - + self.op = self.operator.operator_compute() + self.bval, self.true_bval,\ + self.bval_keys, self.bval_length = self.boundary.apply_bcs() + dtype = self.op.dtype + self.lambda_operator = lambda_prepare(self.op, self.lambda_operator).to(dtype) + self.lambda_bound = lambda_prepare(self.bval, self.lambda_bound).to(dtype) + + self.loss, self.loss_normalized = self.loss_cls.compute( + self.op, + self.bval, + self.true_bval, + self.lambda_operator, + self.lambda_bound, + save_graph) + if self.batch_size is not None: + if self.operator.current_batch_i == 0: # if first batch in epoch + self.save_op = self.op + else: + self.save_op = torch.cat((self.save_op, self.op), 0) # cat curent losses to previous + self.operator.current_batch_i += 1 + del self.op + torch.cuda.empty_cache() + + return self.loss, self.loss_normalized diff --git a/epde/solver/utils.py b/epde/solver/utils.py index de6985e..d90f34d 100644 --- a/epde/solver/utils.py +++ b/epde/solver/utils.py @@ -1,31 +1,49 @@ -# this one contain some stuff for computing different auxiliary things. +"""this one contain some stuff for computing different auxiliary things.""" -import torch -import numpy as np -from typing import Tuple +from typing import Tuple, List, Union, Any from torch.nn import Module -from torch import Tensor -from SALib import ProblemSpec +import datetime +import os +import shutil +import numpy as np +import torch +from epde.solver.device import check_device + +def create_random_fn(eps: float) -> callable: + """ Create random tensors to add some variance to torch neural network. + + Args: + eps (float): randomize parameter. + + Returns: + callable: creating random params function. + """ + def randomize_params(m): + if (isinstance(m, torch.nn.Linear) or isinstance(m, torch.nn.Conv2d)) and m.bias is not None: + m.weight.data = m.weight.data + \ + (2 * torch.randn(m.weight.size()) - 1) * eps + m.bias.data = m.bias.data + (2 * torch.randn(m.bias.size()) - 1) * eps + + return randomize_params + def samples_count(second_order_interactions: bool, sampling_N: int, op_length: list, - bval_length:list) -> Tuple[int, int]: - """ - Count samples for variance based sensitivity analysis. + bval_length: list) -> Tuple[int, int]: + """ Count samples for variance based sensitivity analysis. Args: - second_order_interactions: - sampling_N: essentially determines how often the lambda will be re-evaluated. - op_length: operator values length. - bval_length: boundary value length. + second_order_interactions (bool): Calculate second-order sensitivities. + sampling_N (int): essentially determines how often the lambda will be re-evaluated. + op_length (list): operator values length. + bval_length (list): boundary value length. Returns: - sampling_amount: overall sampling value. - sampling_D: sum of length of grid and boundaries. - - + sampling_amount (int): overall sampling value. + sampling_D (int): sum of length of grid and boundaries. """ + grid_len = sum(op_length) bval_len = sum(bval_length) @@ -37,32 +55,35 @@ def samples_count(second_order_interactions: bool, sampling_amount = sampling_N * (sampling_D + 2) return sampling_amount, sampling_D -def lambda_print(lam, keys) -> None: - """ - Print lambda value. + +def lambda_print(lam: torch.Tensor, keys: List) -> None: + """ Print lambda value. Args: - dict_: dict with lambdas. + lam (torch.Tensor): lambdas values. + keys (List): types of lambdas. """ + lam = lam.reshape(-1) for val, key in zip(lam, keys): print('lambda_{}: {}'.format(key, val.item())) -def bcs_reshape(bval, true_bval, bval_length) \ - -> Tuple[dict, dict, dict, dict]: - """ - Preprocessing for lambda evaluating. + +def bcs_reshape( + bval: torch.Tensor, + true_bval: torch.Tensor, + bval_length: List) -> Tuple[dict, dict, dict, dict]: + """ Preprocessing for lambda evaluating. Args: - op: dict with operator solution. - bval: dict with boundary solution. - true_bval: dict with true boundary solution (i.e. right side of equation). + bval (torch.Tensor): matrix, where each column is predicted + boundary values of one boundary type. + true_bval (torch.Tensor): matrix, where each column is true + boundary values of one boundary type. + bval_length (list): list of length of each boundary type column. Returns: - op: dict with operator solution. - bcs: dict with difference of bval and true_bval. - op_length: dict with lengths of operator solution. - bval_length: dict with lengths of boundary solution. + torch.Tensor: vector of difference between bval and true_bval. """ bval_diff = bval - true_bval @@ -73,98 +94,163 @@ def bcs_reshape(bval, true_bval, bval_length) \ return bcs -class Lambda: +def remove_all_files(folder: str) -> None: + """ Remove all files from folder. + + Args: + folder (str): folder name. """ - Serves for computing adaptive lambdas. + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + + +def mat_op_coeff(equation: Any) -> Any: + """ Preparation of coefficients in the operator of the *mat* method + to suit methods *NN, autograd*. + + Args: + operator (dict): operator (equation dict). + + Returns: + operator (dict): operator (equation dict) with suitable coefficients. """ - def __init__(self, op_list: list, - bcs_list: list, - loss_list: list, - sampling_N: int = 1, - second_order_interactions = True): - """ - Args: - op_list: list with operator solution. - bcs_list: list with boundary solution. - loss_list: list with losses. - sampling_N: parameter for accumulation of solutions (op, bcs). The more sampling_N, the more accurate the estimation of the variance. - second_order_interactions: computes second order Sobol indices. - """ - self.second_order_interactions = second_order_interactions - self.op_list = op_list - self.bcs_list = bcs_list - self.loss_list = loss_list - self.sampling_N = sampling_N - - @staticmethod - def lambda_compute(pointer: int, length_list: list, ST: np.ndarray) -> dict: - """ - Computes lambdas. - Args: - pointer: the label to calculate the lambda for the corresponding parameter. - length_dict: dict where values are lengths. - ST: result of SALib.ProblemSpec(). + for op in equation.equation_lst: + for label in list(op.keys()): + term = op[label] + if isinstance(term['coeff'], torch.Tensor): + term['coeff'] = term['coeff'].reshape(-1, 1) + elif callable(term['coeff']): + print("Warning: coefficient is callable,\ + it may lead to wrong cache item choice") + return equation - Returns: - dict with lambdas. - """ - lambdas = [] - for value in length_list: - lambdas.append(sum(ST) / sum(ST[pointer:pointer + value])) - pointer += value - return torch.tensor(lambdas).float().reshape(1, -1) - - def update(self, op_length: list, - bval_length: list, - sampling_D: int) -> Tuple[dict, dict]: - """ - Updates all lambdas (operator and boundary). +def model_mat(model: torch.Tensor, + domain: Any, + cache_model: torch.nn.Module=None) -> Tuple[torch.Tensor, torch.nn.Module]: + """ Create model for *NN or autograd* modes from grid + and model of *mat* mode. - Args: - op_length: dict with lengths of operator solution. - bval_length: dict with lengths of boundary solution. - sampling_D: sum of op_length and bval_length. + Args: + model (torch.Tensor): model from *mat* method. + grid (torch.Tensor): grid from *mat* method. + cache_model (torch.nn.Module, optional): neural network that will + approximate *mat* model. Defaults to None. - Returns: - lambda_operator: values of lambdas for operator. - lambda_bound: values of lambdas for boundary. - """ - op_array = np.array(self.op_list) - bc_array = np.array(self.bcs_list) - loss_array = np.array(self.loss_list) + Returns: + cache_model (torch.nn.Module): model satisfying the *NN, autograd* methods. + """ + grid = domain.build('mat') + input_model = grid.shape[0] + output_model = model.shape[0] + + if cache_model is None: + cache_model = torch.nn.Sequential( + torch.nn.Linear(input_model, 100), + torch.nn.Tanh(), + torch.nn.Linear(100, 100), + torch.nn.Tanh(), + torch.nn.Linear(100, 100), + torch.nn.Tanh(), + torch.nn.Linear(100, output_model) + ) + + return cache_model + + +def save_model_nn( + cache_dir: str, + model: torch.nn.Module, + name: Union[str, None] = None) -> None: + """ + Saves model in a cache (uses for 'NN' and 'autograd' methods). + Args: + cache_dir (str): path to cache folder. + model (torch.nn.Module): model to save. + (uses only with mixed precision and device=cuda). Defaults to None. + name (str, optional): name for a model. Defaults to None. + """ - X_array = np.hstack((op_array, bc_array)) + if name is None: + name = str(datetime.datetime.now().timestamp()) + if not os.path.isdir(cache_dir): + os.mkdir(cache_dir) - bounds = [[-100, 100] for _ in range(sampling_D)] - names = ['x{}'.format(i) for i in range(sampling_D)] + parameters_dict = {'model': model.to('cpu'), + 'model_state_dict': model.state_dict()} - sp = ProblemSpec({'names': names, 'bounds': bounds}) + try: + torch.save(parameters_dict, cache_dir + '\\' + name + '.tar') + print(f'model is saved in cache dir: {cache_dir}') + except RuntimeError: + torch.save(parameters_dict, cache_dir + '\\' + name + '.tar', + _use_new_zipfile_serialization=False) # cyrillic in path + print(f'model is saved in cache: {cache_dir}') + except: + print(f'Cannot save model in cache: {cache_dir}') - sp.set_samples(X_array) - sp.set_results(loss_array) - sp.analyze_sobol(calc_second_order=self.second_order_interactions) - ''' - To assess variance we need total sensitiviy indices for every variable - ''' - ST = sp.analysis['ST'] +def save_model_mat(cache_dir: str, + model: torch.Tensor, + domain: Any, + cache_model: Union[torch.nn.Module, None] = None, + name: Union[str, None] = None) -> None: + """ Saves model in a cache (uses for 'mat' method). - lambda_op = self.lambda_compute(0, op_length, ST) + Args: + cache_dir (str): path to cache folder. + model (torch.Tensor): *mat* model + grid (torch.Tensor): grid from *mat* mode + cache_model (Union[torch.nn.Module, None], optional): model to save. Defaults to None. + name (Union[str, None], optional): name for a model. Defaults to None. + """ - lambda_bnd = self.lambda_compute(sum(op_length), bval_length, ST) + net_autograd = model_mat(model, domain, cache_model) + nn_grid = domain.build('autograd') + optimizer = torch.optim.Adam(net_autograd.parameters(), lr=0.001) + model_res = model.reshape(-1, model.shape[0]) - return lambda_op, lambda_bnd + def closure(): + optimizer.zero_grad() + loss = torch.mean((net_autograd(check_device(nn_grid)) - model_res) ** 2) + loss.backward() + return loss + loss = np.inf + t = 0 + while loss > 1e-5 and t < 1e5: + loss = optimizer.step(closure) + t += 1 + print('Interpolate from trained model t={}, loss={}'.format( + t, loss)) + + save_model_nn(cache_dir, net_autograd, name=name) + +def replace_none_by_zero(tuple_data: tuple) -> torch.Tensor: + """ Make tensor from tuple (or None element) ad replace None elements to zero. + + Args: + tuple_data (tuple): path to cache folder. + """ + if isinstance(tuple_data, torch.Tensor): + tuple_data[tuple_data == None] = 0 + elif tuple_data is None: + tuple_data = torch.tensor([0.]) + elif isinstance(tuple_data, tuple): + new_tuple = tuple(replace_none_by_zero(item) for item in tuple_data) + return new_tuple + return tuple_data class PadTransform(Module): """Pad tensor to a fixed length with given padding value. - - :param max_length: Maximum length to pad to - :type max_length: int - :param pad_value: Value to pad the tensor with - :type pad_value: bool src: https://pytorch.org/text/stable/transforms.html#torchtext.transforms.PadTransform @@ -172,19 +258,28 @@ class PadTransform(Module): """ def __init__(self, max_length: int, pad_value: int) -> None: + """_summary_ + + Args: + max_length (int): Maximum length to pad to. + pad_value (int): Value to pad the tensor with. + """ super().__init__() self.max_length = max_length self.pad_value = float(pad_value) - def forward(self, x: Tensor) -> Tensor: - """ - :param x: The tensor to pad - :type x: Tensor - :return: Tensor padded up to max_length with pad_value - :rtype: Tensor + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ Tensor padding + + Args: + x (torch.Tensor): tensor for padding. + + Returns: + torch.Tensor: filled tensor with pad value. """ + max_encoded_length = x.size(-1) if max_encoded_length < self.max_length: pad_amount = self.max_length - max_encoded_length x = torch.nn.functional.pad(x, (0, pad_amount), value=self.pad_value) - return x \ No newline at end of file + return x diff --git a/epde/solver/version.py b/epde/solver/version.py new file mode 100644 index 0000000..68eb9b6 --- /dev/null +++ b/epde/solver/version.py @@ -0,0 +1 @@ +__version__ = '0.4.5' diff --git a/epde/structure/factor.py b/epde/structure/factor.py index cb82b2d..1b90232 100644 --- a/epde/structure/factor.py +++ b/epde/structure/factor.py @@ -10,24 +10,64 @@ import copy import torch from typing import Callable -from collections import Iterable +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable import epde.globals as global_var from epde.structure.Tokens import TerminalToken from epde.supplementary import factor_params_to_str, train_ann, use_ann_to_predict, exp_form +from epde.evaluators import simple_function_evaluator + +class EvaluatorContained(object): + """ + Class for evaluator of token (factor of the term in the sought equation) values with arbitrary function + + Attributes: + _evaluator (`callable`): a function, which returns the vector of token values, evaluated on the studied area; + params (`dict`): dictionary, containing parameters of the evaluator (like grid, on which the function is evaluated or matrices of pre-calculated function) + + Methods: + set_params(**params) + set the parameters of the evaluator, using keyword arguments + apply(token, token_params) + apply the defined evaluator to evaluate the token with specific parameters + """ + + def __init__(self, eval_function): # , eval_kwargs_keys={} + self._evaluator = eval_function + # self.eval_kwargs_keys = eval_kwargs_keys + + def apply(self, token, structural=False, func_args=None, torch_mode=False): # , **kwargs + """ + Apply the defined evaluator to evaluate the token with specific parameters. + + Args: + token (`epde.main_structures.factor.Factor`): symbolic label of the specific token, e.g. 'cos'; + token_params (`dict`): dictionary with keys, naming the token parameters (such as frequency, axis and power for trigonometric function) + and values - specific values of corresponding parameters. + + Raises: + `TypeError` + If the evaluator could not be applied to the token. + """ + # assert list(kwargs.keys()) == self.eval_kwargs_keys, f'Kwargs {kwargs.keys()} != {self.eval_kwargs_keys}' + return self._evaluator(token, structural, func_args, torch_mode = torch_mode) class Factor(TerminalToken): - __slots__ = ['_params', '_params_description', '_hash_val', '_latex_constructor', - 'label', 'ftype', '_variable', 'grid_set', 'grid_idx', 'is_deriv', 'deriv_code', + __slots__ = ['_params', '_params_description', '_hash_val', '_latex_constructor', 'label', + 'ftype', '_variable', '_all_vars', 'grid_set', 'grid_idx', 'is_deriv', 'deriv_code', 'cache_linked', '_status', 'equality_ranges', '_evaluator', 'saved'] def __init__(self, token_name: str, status: dict, family_type: str, latex_constructor: Callable, - variable: str = None, randomize: bool = False, params_description=None, deriv_code=None, - equality_ranges = None): + variable: str = None, all_vars: list = None, randomize: bool = False, + params_description=None, deriv_code=None, equality_ranges = None): self.label = token_name self.ftype = family_type self._variable = variable + self._all_vars = all_vars self.status = status self.grid_set = False @@ -112,7 +152,10 @@ def set_parameters(self, params_description: dict, equality_ranges: dict, _params_description = {} if not random: _params = np.empty(len(kwargs)) - assert len(kwargs) == len(params_description), 'Not all parameters have been declared. Partial randomization TBD' + if len(kwargs) != len(params_description): + print('Not all parameters have been declared. Partial randomization TBD') + print(f'kwargs {kwargs}, while params_descr {params_description}') + raise ValueError('...') for param_idx, param_info in enumerate(kwargs.items()): _params[param_idx] = param_info[1] _params_description[param_idx] = {'name': param_info[0], @@ -166,35 +209,63 @@ def evaluator(self): @evaluator.setter def evaluator(self, evaluator): - try: - factor_family = [family for family in evaluator if family.ftype == self.ftype][0] - self._evaluator = factor_family.evaluator - except TypeError: + if isinstance(evaluator, EvaluatorContained): self._evaluator = evaluator + else: + factor_family = [family for family in evaluator.families if family.ftype == self.ftype][0] + self._evaluator = factor_family._evaluator # TODO: fix calling private attribute - # Переработать/удалить __call__, т.к. его функции уже тут - def evaluate(self, structural=False, grids=None): - assert self.cache_linked + def evaluate(self, structural=False, grids=None, torch_mode: bool = False): + assert self.cache_linked, 'Missing linked cache.' if self.is_deriv and grids is not None: raise Exception( 'Derivatives have to evaluated on the initial grid') key = 'structural' if structural else 'base' - if self.saved[key] and grids is None: + if (self.cache_label, structural) in global_var.tensor_cache and grids is None: + # print(f'Asking for {self.cache_label} in tmode {torch_mode}') + # print(f'From numpy cache of {global_var.tensor_cache.memory_structural["numpy"].keys()}') + # print(f'And torch cache of {global_var.tensor_cache.memory_structural["torch"].keys()}') + return global_var.tensor_cache.get(self.cache_label, - structural=structural) + structural=structural, torch_mode = torch_mode) + else: - value = self._evaluator.apply(self, structural=structural, grids=grids) + if self.is_deriv and self.evaluator._evaluator != simple_function_evaluator: + if grids is not None: + raise Exception('Data-reliant tokens shall not get grids as arguments for evaluation.') + if isinstance(self.variable, str): + var = self._all_vars.index(self.variable) + func_arg = [global_var.tensor_cache.get(label=None, torch_mode=torch_mode, + deriv_code=(var, self.deriv_code)),] + elif isinstance(self.variable, (list, tuple)): + func_arg = [] + for var_idx, code in enumerate(self.deriv_code): + assert len(self.variable) == len(self.deriv_code) + func_arg.append(global_var.tensor_cache.get(label=None, torch_mode=torch_mode, + deriv_code=(self.variable[var_idx], code))) + + value = self.evaluator.apply(self, structural=structural, func_args=func_arg, torch_mode=torch_mode) + else: + value = self.evaluator.apply(self, structural=structural, func_args=grids, torch_mode=torch_mode) if grids is None: + if self.is_deriv and self.evaluator._evaluator == simple_function_evaluator: + full_deriv_code = (self._all_vars.index(self.variable), self.deriv_code) + else: + full_deriv_code = None + if key == 'structural' and self.status['structural_and_defalut_merged']: - global_var.tensor_cache.use_structural(use_base_data=True) + self.saved[key] = global_var.tensor_cache.add(self.cache_label, value, structural=False, + deriv_code=full_deriv_code) + global_var.tensor_cache.use_structural(use_base_data=True, + label=self.cache_label) elif key == 'structural' and not self.status['structural_and_defalut_merged']: global_var.tensor_cache.use_structural(use_base_data=False, label=self.cache_label, replacing_data=value) else: - self.saved[key] = global_var.tensor_cache.add(self.cache_label, value, - structural=False) + self.saved[key] = global_var.tensor_cache.add(self.cache_label, value, structural=False, + deriv_code=full_deriv_code) return value @property diff --git a/epde/structure/main_structures.py b/epde/structure/main_structures.py index c5ac443..9e0dd43 100644 --- a/epde/structure/main_structures.py +++ b/epde/structure/main_structures.py @@ -11,9 +11,12 @@ import copy import os import pickle -from typing import Union, Callable +from typing import Union, Callable, List from functools import singledispatchmethod, reduce -from collections import Iterable +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable import numpy as np @@ -22,12 +25,15 @@ import epde.globals as global_var import epde.optimizers.moeadd.solution_template as moeadd -from epde.structure.encoding import Chromosome +from epde.decorators import HistoryExtender, BoundaryExclusion +from epde.evaluators import simple_function_evaluator from epde.interface.token_family import TFPool -from epde.decorators import HistoryExtender, ResetEquationStatus -from epde.supplementary import filter_powers, normalize_ts, population_sort, flatten, rts, exp_form +from epde.preprocessing.domain_pruning import DomainPruner + +from epde.structure.encoding import Chromosome from epde.structure.factor import Factor from epde.structure.structure_template import ComplexStructure, check_uniqueness +from epde.supplementary import filter_powers, normalize_ts, population_sort, flatten, rts, exp_form class Term(ComplexStructure): @@ -48,7 +54,6 @@ class Term(ComplexStructure): __slots__ = ['_history', 'structure', 'interelement_operator', 'saved', 'saved_as', 'pool', 'max_factors_in_term', 'cache_linked', 'occupied_tokens_labels', '_descr_variable_marker'] - # manual_reconst_attrs = ['structure'] def __init__(self, pool, passed_term=None, mandatory_family=None, max_factors_in_term=1, create_derivs: bool = False, interelement_operator=np.multiply, collapse_powers = True): @@ -80,6 +85,7 @@ def manual_reconst(self, attribute:str, value, except_attrs:dict): factor = Factor.__new__(Factor) attrs_from_dict(factor, factor_elem, except_attrs) + factor.evaluator = self.pool self.structure.append(factor) @property @@ -296,9 +302,13 @@ def latex_form(self): def contains_deriv(self, variable=None): if variable is None: - return any([factor.is_deriv and factor.deriv_code != [None,] for factor in self.structure]) + return any([factor.is_deriv and factor.deriv_code != [None,] and + factor.evaluator._evaluator == simple_function_evaluator + for factor in self.structure]) else: - return any([factor.variable == variable and factor.deriv_code != [None,] for factor in self.structure]) + return any([factor.variable == variable and factor.deriv_code != [None,] and + factor.evaluator._evaluator == simple_function_evaluator + for factor in self.structure]) def contains_variable(self, variable): return any([factor.variable == variable for factor in self.structure]) @@ -553,15 +563,16 @@ def shifted_idx(idx): value = np.add(elem1, - reduce(lambda x, y: np.add(x, y), [np.multiply(self.weights_internal[idx_full], temp_feats[:, idx_sparse]) for idx_sparse, idx_full in enumerate(feature_indexes)])) # for feature_idx, weight in np.ndenumerate(self.weights_internal)])) - else: + else: elem1 = np.expand_dims(target, axis=1) - if features is None: - feature_list = [np.multiply(self.weights_final[idx_full], temp_feats[:, idx_sparse]) - for idx_sparse, idx_full in enumerate(feature_indexes)] + if features is not None: + features_val = reduce(lambda x, y: np.add(x, y), [np.multiply(self.weights_final[idx_full], temp_feats[:, idx_sparse]) + for idx_sparse, idx_full in enumerate(feature_indexes)]) # Possible mistake here + features_val = np.expand_dims(features_val, axis=1) else: - feature_list = 0 - value = np.add(elem1, feature_list) - + features_val = np.zeros_like(target) + value = np.add(elem1, - features_val) + # print(value.shape) return value, target, features else: return None, target, features @@ -784,6 +795,7 @@ def clear_after_solver(self): def solver_formed_grid(training_grid=None): + raise NotImplementedError('solver_formed_grid function is to be depricated') if training_grid is None: keys, training_grid = global_var.grid_cache.get_all() else: @@ -878,8 +890,12 @@ def matches_complexitiy(self, complexity : Union[int, list]): if not isinstance(complexity, list) or len(self.vars_to_describe) != len(complexity): raise ValueError('Incorrect list of complexities passed.') + adj_complexity = copy.copy(complexity) + for idx, compl in enumerate(adj_complexity): + if compl is None: + adj_complexity[idx] = self.obj_fun[-len(complexity) + idx] - return list(self.obj_fun[-len(complexity):]) == complexity + return list(self.obj_fun[-len(adj_complexity):]) == adj_complexity def create(self, passed_equations: list = None): if passed_equations is None: @@ -1005,13 +1021,6 @@ def __iter__(self): def fitness_calculated(self): return all([equation.fitness_calculated for equation in self.vals]) - # def save(self, file_name='epde_systems.pickle'): - # directory = os.getcwd() - # with open(file_name, 'wb') as file: - # to_save = ([equation.text_form for equation in self.vals], - # self.tokens_for_eq + self.tokens_supp) - # pickle.dump(obj=to_save, file=file) - class SoEqIterator(object): def __init__(self, system: SoEq): @@ -1025,4 +1034,4 @@ def __next__(self): self._idx += 1 return res else: - raise StopIteration + raise StopIteration \ No newline at end of file diff --git a/epde/structure/structure_template.py b/epde/structure/structure_template.py index e9c6829..fd49f6e 100644 --- a/epde/structure/structure_template.py +++ b/epde/structure/structure_template.py @@ -8,7 +8,10 @@ import numpy as np from functools import reduce -from collections import Iterable +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable def check_uniqueness(obj, background): return not any([elem == obj for elem in background]) @@ -48,8 +51,12 @@ def evaluate(self, structural=False): if len(self.structure) == 1: return self.structure[0].evaluate(structural) else: - return reduce(lambda x, y: self.interelement_operator(x, y.evaluate(structural)), - self.structure[1:], self.structure[0].evaluate(structural)) + try: + return reduce(lambda x, y: self.interelement_operator(x, y.evaluate(structural)), + self.structure[1:], self.structure[0].evaluate(structural)) + except ValueError: + print([element.name for element in self.structure]) + raise ValueError('operands could not be broadcast together with shapes') def reset_saved_state(self): self.saved = {True: False, False: False} diff --git a/epde/supplementary.py b/epde/supplementary.py index f03ce94..847aefd 100644 --- a/epde/supplementary.py +++ b/epde/supplementary.py @@ -6,20 +6,110 @@ @author: mike_ubuntu """ +from abc import ABC +from typing import Callable, Union + import numpy as np from functools import reduce import copy import torch -device = torch.device('cpu') +# device = torch.device('cpu') import matplotlib.pyplot as plt -import epde.globals as global_var + +from epde.solver.data import Domain +from epde.solver.models import Fourier_embedding, mat_model + + +class BasicDeriv(ABC): + def __init__(self, *args, **kwargs): + raise NotImplementedError('Trying to create abstract differentiation method') + + def take_derivative(self, u: torch.Tensor, args: torch.Tensor, axes: list): + raise NotImplementedError('Trying to differentiate with abstract differentiation method') + + +class AutogradDeriv(BasicDeriv): + def __init__(self): + pass + + def take_derivative(self, u: Union[torch.nn.Sequential, torch.Tensor], args: torch.Tensor, + axes: list = [], component: int = 0): + if not args.requires_grad: + args.requires_grad = True + if axes == [None,]: + return u(args)[..., component].reshape(-1, 1) + if isinstance(u, torch.nn.Sequential): + comp_sum = u(args)[..., component].sum(dim = 0) + elif isinstance(u, torch.Tensor): + raise TypeError('Autograd shall have torch.nn.Sequential as its inputs.') + else: + print(f'u.shape, {u.shape}') + comp_sum = u.sum(dim = 0) + for axis in axes: + output_vals = torch.autograd.grad(outputs = comp_sum, inputs = args, create_graph=True)[0] + comp_sum = output_vals[:, axis].sum() + output_vals = output_vals[:, axes[-1]].reshape(-1, 1) + return output_vals + +class FDDeriv(BasicDeriv): + def __init__(self): + pass + + def take_derivative(self, u: np.ndarray, args: np.ndarray, + axes: list = [], component: int = 0): + + if not isinstance(args, torch.Tensor): + args = args.detach().cpu().numpy() + + output_vals = u[..., component].reshape(args.shape) + if axes == [None,]: + return output_vals + for axis in axes: + output_vals = np.gradient(output_vals, args.reshape(-1)[1] - args.reshape(-1)[0], axis = axis, edge_order=2) + return output_vals + +def create_solution_net(equations_num: int, domain_dim: int, use_fourier = True, # mode: str, domain: Domain + fourier_params: dict = None, device = 'cpu'): + ''' + fft_params have to be passed as dict with entries like: {'L' : [4,], 'M' : [3,]} + ''' + L_default, M_default = 4, 10 + if use_fourier: + if fourier_params is None: + if domain_dim == 1: + fourier_params = {'L' : [L_default], + 'M' : [M_default]} + else: + fourier_params = {'L' : [L_default] + [None,] * (domain_dim - 1), + 'M' : [M_default] + [None,] * (domain_dim - 1)} + fourier_params['device'] = device + four_emb = Fourier_embedding(**fourier_params) + if device == 'cuda': + four_emb = four_emb.cuda() + net_default = [four_emb,] + else: + net_default = [] + linear_inputs = net_default[0].out_features if use_fourier else domain_dim + + if domain_dim == 1: + hidden_neurons = 128 # 64 # + else: + hidden_neurons = 112 # 54 # + + operators = net_default + [torch.nn.Linear(linear_inputs, hidden_neurons, device=device), + torch.nn.Tanh(), + torch.nn.Linear(hidden_neurons, hidden_neurons, device=device), + torch.nn.Tanh(), + torch.nn.Linear(hidden_neurons, equations_num, device=device)] + return torch.nn.Sequential(*operators) def exp_form(a, sign_num: int = 4): if np.isclose(a, 0): return 0.0, 0 exp = np.floor(np.log10(np.abs(a))) - return np.around(a / 10**exp, sign_num), int(exp) # np.sign(a) * + return np.around(a / 10**exp, sign_num), int(exp) + def rts(value, sign_num: int = 5): """ @@ -33,40 +123,36 @@ def rts(value, sign_num: int = 5): idx -= 1 return np.around(value, int(idx)) -def train_ann(grids: list, data: np.ndarray, epochs_max: int = 500): - dim = 1 if np.any([s == 1 for s in data.shape]) and data.ndim == 2 else data.ndim - assert len(grids) == dim, 'Dimensionality of data does not match with passed grids.' - data_size = data.size - model = torch.nn.Sequential( - torch.nn.Linear(dim, 256), - torch.nn.Tanh(), - # torch.nn.Dropout(0.1), - # torch.nn.ReLU(), - torch.nn.Linear(256, 256), - torch.nn.Tanh(), - # torch.nn.Dropout(0.1), - # torch.nn.ReLU(), - torch.nn.Linear(256, 64), - # # torch.nn.Dropout(0.1), - torch.nn.Tanh(), - torch.nn.Linear(64, 1024), - # torch.nn.Dropout(0.1), - torch.nn.Tanh(), - torch.nn.Linear(1024, 1) - # torch.nn.Tanh() - ) - - data_grid = np.stack([grid.reshape(-1) for grid in grids]) - grid_tensor = torch.from_numpy(data_grid).float().T - grid_tensor.to(device) - data = torch.from_numpy(data.reshape(-1, 1)).float() - print(data.size) - data.to(device) +def train_ann(args: list, data: np.ndarray, epochs_max: int = 500, batch_frac = 0.5, + dim = None, model = None, device = 'cpu'): + if dim is None: + dim = 1 if np.any([s == 1 for s in data.shape]) and data.ndim == 2 else data.ndim + # assert len(args) == dim, 'Dimensionality of data does not match with passed grids.' + data_size = data.size + if model is None: + model = torch.nn.Sequential( + torch.nn.Linear(dim, 256, device=device), + torch.nn.Tanh(), + torch.nn.Linear(256, 256, device=device), + torch.nn.Tanh(), + torch.nn.Linear(256, 64, device=device), + torch.nn.Tanh(), + torch.nn.Linear(64, 1024, device=device), + torch.nn.Tanh(), + torch.nn.Linear(1024, 1, device=device) + ) + + model.to(device) + data_grid = np.stack([arg.reshape(-1) for arg in args]) + grid_tensor = torch.from_numpy(data_grid).float().T.to(device) + # grid_tensor.to(device) + data = torch.from_numpy(data.reshape(-1, 1)).float().to(device) + # print(data.size) + # data.to(device) optimizer = torch.optim.Adam(model.parameters(), lr=0.0001) - batch_frac = 0.5 - batch_size = int(data_size * batch_frac) # or whatever + batch_size = int(data_size * batch_frac) t = 0 @@ -75,7 +161,7 @@ def train_ann(grids: list, data: np.ndarray, epochs_max: int = 500): loss_mean = 1000 min_loss = np.inf losses = [] - while loss_mean > 2e-3 and t < epochs_max: # and t 2e-3 and t < epochs_max: permutation = torch.randperm(grid_tensor.size()[0]) @@ -96,8 +182,8 @@ def train_ann(grids: list, data: np.ndarray, epochs_max: int = 500): best_model = model min_loss = loss_mean losses.append(loss_mean) - if global_var.verbose.show_ann_loss: - print('Surface training t={}, loss={}'.format(t, loss_mean)) + # if global_var.verbose.show_ann_loss: + # print('Surface training t={}, loss={}'.format(t, loss_mean)) t += 1 print_loss = True if print_loss: @@ -106,26 +192,13 @@ def train_ann(grids: list, data: np.ndarray, epochs_max: int = 500): plt.show() return best_model - def use_ann_to_predict(model, recalc_grids: list): data_grid = np.stack([grid.reshape(-1) for grid in recalc_grids]) recalc_grid_tensor = torch.from_numpy(data_grid).float().T - recalc_grid_tensor.to(device) + recalc_grid_tensor = recalc_grid_tensor #.to(device) return model(recalc_grid_tensor).detach().numpy().reshape(recalc_grids[0].shape) - - -def np_cartesian_product(*arrays): - print(arrays) - la = len(arrays) - dtype = np.result_type(*arrays) - arr = np.empty([len(a) for a in arrays] + [la], dtype=dtype) - for i, a in enumerate(np.ix_(*arrays)): - arr[..., i] = a - return arr.reshape(-1, la) - - def flatten(obj): ''' Method to flatten list, passed as ``obj`` - the function parameter. @@ -137,39 +210,16 @@ def flatten(obj): obj[idx] = [elem,] return reduce(lambda x, y: x+y, obj) - - -def try_iterable(arg): - try: - _ = [elem for elem in arg] - except TypeError: - return False - return True - - - -def memory_assesment(): - try: - h = hpy() - except NameError: - from guppy import hpy - h = hpy() - print(h.heap()) - del h - - def factor_params_to_str(factor, set_default_power=False, power_idx=0): param_label = np.copy(factor.params) if set_default_power: param_label[power_idx] = 1. return (factor.label, tuple(param_label)) - def form_label(x, y): print(type(x), type(y.cache_label)) return x + ' * ' + y.cache_label if len(x) > 0 else x + y.cache_label - def detect_similar_terms(base_equation_1, base_equation_2): # Переделать! same_terms_from_eq1 = [] same_terms_from_eq2 = [] @@ -211,7 +261,7 @@ def detect_similar_terms(base_equation_1, base_equation_2): # Передела return [same_terms_from_eq1, similar_terms_from_eq1, different_terms_from_eq1], [same_terms_from_eq2, similar_terms_from_eq2, different_terms_from_eq2] -def filter_powers(gene): # Разобраться и переделать +def filter_powers(gene): gene_filtered = [] for token_idx in range(len(gene)): @@ -230,30 +280,6 @@ def filter_powers(gene): # Разобраться и переделать gene_filtered.append(powered_token) return gene_filtered -def Bind_Params(zipped_params): - param_dict = {} - for token_props in zipped_params: - param_dict[token_props[0]] = token_props[1] - return param_dict - - - -def Slice_Data_3D(matrix, part=4, part_tuple=None): - """ - Input matrix slicing for separate domain calculation - """ - - if part_tuple: - for i in range(part_tuple[0]): - for j in range(part_tuple[1]): - yield matrix[:, i*int(matrix.shape[1]/float(part_tuple[0])):(i+1)*int(matrix.shape[1]/float(part_tuple[0])), - j*int(matrix.shape[2]/float(part_tuple[1])):(j+1)*int(matrix.shape[2]/float(part_tuple[1]))], i, j - part_dim = int(np.sqrt(part)) - for i in range(part_dim): - for j in range(part_dim): - yield matrix[:, i*int(matrix.shape[1]/float(part_dim)):(i+1)*int(matrix.shape[1]/float(part_dim)), - j*int(matrix.shape[2]/float(part_dim)):(j+1)*int(matrix.shape[2]/float(part_dim))], i, j - def define_derivatives(var_name='u', dimensionality=1, max_order=2): """ @@ -268,8 +294,8 @@ def define_derivatives(var_name='u', dimensionality=1, max_order=2): deriv_names (`list` with `str` values): keys for epde var_deriv_orders (`list` with `int` values): keys for enter to solver """ - deriv_names = []#[var_name,] - var_deriv_orders = []#[[None,],] + deriv_names = [] + var_deriv_orders = [] if isinstance(max_order, int): max_order = [max_order for dim in range(dimensionality)] for var_idx in range(dimensionality): @@ -279,7 +305,6 @@ def define_derivatives(var_name='u', dimensionality=1, max_order=2): deriv_names.append('d' + var_name + '/dx' + str(var_idx)) else: deriv_names.append( - 'd^'+str(order+1) + var_name + '/dx'+str(var_idx)+'^'+str(order+1)) print('Deriv orders after definition', var_deriv_orders) return deriv_names, var_deriv_orders @@ -309,4 +334,3 @@ def normalize_ts(Input): else: matrix[i] = 1 return matrix - diff --git a/examples/ODE discovery.ipynb b/examples/ODE discovery.ipynb index 20199da..50dc41e 100644 --- a/examples/ODE discovery.ipynb +++ b/examples/ODE discovery.ipynb @@ -13,7 +13,7 @@ { "cell_type": "code", "execution_count": 1, - "id": "1b9842d8", + "id": "0539da96", "metadata": {}, "outputs": [], "source": [ @@ -28,7 +28,7 @@ }, { "cell_type": "markdown", - "id": "141bc1de", + "id": "5bd8603c", "metadata": {}, "source": [ "ADD SOME LINK TO ARTICLE ON EPDE PRINCIPLES" @@ -37,30 +37,37 @@ { "cell_type": "code", "execution_count": 2, - "id": "0539da96", + "id": "93ae0a4c", "metadata": {}, "outputs": [], "source": [ "import sys\n", - "sys.path.append('..')\n", - "import helpers" + "sys.path.append('..')" ] }, { "cell_type": "code", "execution_count": 3, + "id": "f6666146", + "metadata": {}, + "outputs": [], + "source": [ + "import epde" + ] + }, + { + "cell_type": "code", + "execution_count": 4, "id": "19ff1073", "metadata": {}, "outputs": [], "source": [ - "import epde\n", - "\n", "mpl.rcParams.update(mpl.rcParamsDefault)\n", - "plt.rcParams['text.usetex'] = True\n", + "plt.rcParams['text.usetex'] = False\n", "\n", "SMALL_SIZE = 12\n", "mpl.rc('font', size=SMALL_SIZE)\n", - "mpl.rc('axes', titlesize=SMALL_SIZE)\n" + "mpl.rc('axes', titlesize=SMALL_SIZE)" ] }, { @@ -79,41 +86,37 @@ "Principles of equation discovery can be illustrated by a simple example of reconstruction of first-order equation:\n", "\n", "\\begin{equation}\n", - "\\label{eq:ODE1}\n", "x \\sin{t} + \\frac{d x}{d t} \\cos{t} = 1,\n", "\\end{equation}\n", "\n", "where the general solution for an arbitrary constant $C$ is:\n", "\n", "\\begin{equation}\n", - "\\label{eq:ODE1_solution}\n", "x = \\sin{t} + C \\cos{t}.\n", "\\end{equation}\n", "\n", "To generate the data we will use an analytical particular solution, matching initial condition of $x(0) = 1.3$ (thus, $C = 1.3$) on the interval of $(0, 4 \\pi)$:\n", "\n", "\\begin{equation}\n", - "\\label{eq:ODE1_part_solution}\n", "x = \\sin{t} + 1.3 \\cos{t}.\n", "\\end{equation}\n", "\n", "Furthermore, we will provide an example of using a priori known derivatvies, that can be easily calculated by differentiating the solution:\n", "\n", "\\begin{equation}\n", - "\\label{eq:ODE1_deriv_solution}\n", "x' = \\cos{t} - 1.3 \\sin{t}.\n", "\\end{equation}" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "id": "a517dd15", "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkMAAAGyCAYAAADnH8C6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAACGEElEQVR4nO2deXgc1ZX2327ti6W2vNuSF8k2XmW7JcsrZrHMkgAOIAGBSQZCkAlJyARmJJzMJJPJzDh2MnwDJBCLJCSQkNgSDBAIOJYxNsabFlve5E3tRd6tpbVZa/f9/rh9uyVbu6r6dt06v+fpp0vq7qqjUtet955z7jkWxhgDQRAEQRCESbHKNoAgCIIgCEImJIYIgiAIgjA1JIYIgiAIgjA1JIYIgiAIgjA1JIYIgiAIgjA1JIYIgiAIgjA1JIYIgiAIgjA1JIYIgiAIgjA1wbINMAJutxsXLlzAkCFDYLFYZJtDEARBEEQfYIyhvr4eY8eOhdXavf+HxFAfuHDhAhISEmSbQRAEQRDEAKioqEB8fHy3r5MY6gNDhgwBwE9mTEyMZGsIgiAIgugLdXV1SEhI8N7Hu4PEUB8QobGYmBgSQwRBEARhMHpLcaEEaoIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA0lUBMEQRCETrhcLrS1tck2QzlCQkIQFBSk2f5IDBEEQRCExjDGcOnSJTidTtmmKIvNZsPo0aM1KYZMYoggCIIgNEYIoZEjRyIyMpK6F2gIYwzXrl3DlStXAABjxowZ9D5JDBEEQRCEhrhcLq8QGjZsmGxzlCQiIgIAcOXKFYwcOXLQITNKoCYIgiAIDRE5QpGRkZItURtxfrXIySIxRBAEQRA6QKExfdHy/JIYIgiCIAjC1JAYIgiCIAjC1FACtWI0Nzdj165d+Oyzz1BdXY2IiAiMHj0a9913HyZPnizbPIIA2tqATz8FDh8GTpwAwsKAqVMBux1YsABQILRw9OhRfPrppzh79iwqKyuRmJiIOXPm4Oabb6Zmz0Rg0NoK1NYCzc38Z4sFiI4GhgwBNKjfU1JSgsTERNhstm7fU1BQgPT09EEfSwtIDClCQ0MDfv7zn+PFF19EQ0PDDa8///zzmDdvHn7yk5/g3nvvlWAhYXrq6oBf/xp4+WXg/Pmu3zN3LvD97wOPPabJgOxvPvjgA7z44ovYtm1bl6/HxMTgO9/5Dv7pn/4JI0aM8LN1BAGgsRGoqAC6uE8A4KJo2DBg3DggJGRAhygoKMD69euRl5fX4/scDgdWrVqF9evXD+g4WmJhjDHZRgQ6dXV1iI2NRW1tbUDO6j744AOsWrUKly5dAgCMHj0ay5cvx8SJE9Hc3IzS0lJs3boVLpcLAHD//ffjlVdewbhx42SaTZiJvXuBhx8GTp/mP48aBSxbxj1Czc3A8eNAQQHQ1MRfv+UW4E9/4gOyAXA6nfj2t7+Nt99+GwBgtVqRnp6Om266CXFxcThx4gR27dqFU6dOAeDF4t58802amChKc3MzTp06hUmTJiE8PFy2ORyXi4ugykrf76KiuDfIYuGv19ZyjxHAJyPjxgEjRvTLW+twOJCSkoJTp0716BUSpKSkYNWqVcjKyurnH9S389zn+zcjeqW2tpYBYLW1tbJN6YTb7Wbr1q1jFouFAWBJSUksPz+fud3uG95bWVnJcnJyWHBwMAPAxo4dyw4dOiTBasJ0vPwyY8HBjAGMTZjA2BtvMNbcfOP7KisZ++//Ziw6mr932DDGNm/2t7X95tChQ2z8+PEMAAsKCmL//M//zM6ePXvD+1wuF3v33XfZnDlzGAAGgL3wwgusvb1dgtWEnjQ1NbEjR46wpqYm2aZwWloYO3yYscJC/nA4+O+ux+1mrK6u83tPn+a/7yNZWVksOzu70+82b97MEhMTu3x/T6/1Rl/Oc1/v3ySG+kAgiiG3282eeeYZ76D6zDPPsJauvtzXceDAATZjxgwGgNlsNrZjxw4/WEuYlp//nAsbgLGMDMZqanr/zPHjjNnt/DNhYQEtiA4ePMhGjBjBALDExES2c+fOXj/T0tLCnn32We+1++STT3Y5gSGMS3c3abfbzRoaGvz7uHqVNezaxRq2b2cNO3eyhosXva91+71zuxm7eNEniE6cYMzl6vXvrqmpYQBYcXFxp9/3JngSExNZXl5e7yf2OkgM+ZlAFEM//vGPGQBmtVrZyy+/3K/PVlVVsUWLFjEALCoqipWWlupkJWFqXnrJJ4R+8pN+zS5ZczNjK1fyz0ZEMLZ1q15WDphDhw6x4cOHMwDMbrezqqqqfn3+rbfeYlarlQFg3//+90kQKUR3N+mGhgavCA6ER0NDQ89/SHU1Y0VFXBCVl7Psf/mXG0RNVlYWs9vtjDEuemw2W6fXMzIybjhuzXWTooyMDJaVlaXZee4IiSENCTQx9Jvf/Mb7pVq/fv2A9tHY2Mhuu+02BoCNHz+eXb58WWMrCVPz7rs+IfSv/zqwfTQ3M/alL/F9DBnCPUYBQnV1NUtMTGQAWEpKSr+FkOB3v/ud91peu3atxlYSslBGDDHGWG2tTxCdO8fS09O9wkWIHyFusrOzvcKoI3l5eT16htavXz+gUJmWYohWkxmM3bt3Y9WqVQCAH/zgBwNKOgN4GfP8/HwsWLAAJ0+exP33349PP/0UYWFhWppLmJHycuCJJ/j2d78L/Md/DGw/YWHAO+8A6enAF18AGRnA7t2ApyeRLNxuN772ta/B4XBg4sSJ2LRpE+Li4ga0ryeeeAK1tbX4/ve/jx/84AdYvHgxli5dqrHFRKAQGRnZ5WpfzWEMOHUKcDqB4GC+UKGLBOM+tQuJiQEmTOCLHy5eRN769ZiUkoKkpCSsWbMGeXl53kRph8OBxMTEfpsbFxeH6urqfn9OU/otxUxIoHiG6uvrWVJSEgPAMjMzNXGrl5WVsdjYWAaA/eAHP9DASsLUNDUxNm8e9+YsXsxYa+vg93nuHGMjRvB9Pvnk4Pc3SH76058yACw8PJyVlJQMen9ut5s99thjDACLj49nlZWVGlhJyER6AvXVq9yTU1TEE6K1oKKC77OkhBXv3s0A3JAo3dFr1JHePEObN29mA5EjWnqGqAK1gfj+97+P8vJyJCQkIDc3V5O+LNOmTcPvfvc7AMDPfvYzFBYWDnqfhIn50Y+AffuA4cOBDRsGXKekE+PGAX/+M1/e+9vfAv/3f4Pf5wApLS3FT37yEwDAq6++innz5g16nxaLBa+99hqmTp2Kc+fO4cknnwSjiifEQGlp4UvoAWDsWF5EUQvGjeNL8V0uFG3aBJvNhpKSkk5v6ctS+kCFxJBB+Oijj/Cb3/wGFosFb775pqZfugceeACPPvoo3G43/vEf/xHNoiIpQfSH/fuBF1/k27/7HRAfr92+ly8HXniBb3/3u7yAo59xuVz45je/ifb2djzwwAN4QoQCNWDIkCHYuHEjQkJC8P777+O9997TbN+EiWCMh7NcLi5cRo/Wbt8WCzBpEhwXLiDnF79A8aZNAIB169Z53zLQcJfD4ZAupAwnhpxOJ3Jzc7FixYo+vb+goACZmZnIzc1FQUEBcnJykJ+fr7OV2tLc3Ixnn30WAPDcc8/h1ltv1fwYL7/8MkaPHo2ysjL853/+p+b7JxTH5QKysvhzZiagRzHBf/s3YPJkXr36hz/Ufv+98PLLL6OoqAixsbF45ZVXNN//nDlzkJ2dDQB49tln/ZNbQqhFdTVQXw9YrcCkSdq3tgkPx4pnn8Xa73wHicHByPvTn7BmzRoUFBQAAJKSkuBwOG74WGJiIhwOB5xOJwoKCm54j9PpRGpqqra29pd+B+kkUlxczNavX8/Wrl3bZcZ6V+Tl5TGbzeatAzKQ1Veyc4b+8z//kwFg48aNY/X19bod591332UAWFhYGDt9+rRuxyEU5JVXeE5PTAxjFy7od5yCAn4ci4WxPXv0O851nDt3jkVGRjIALDc3V7fjXLt2jU2aNIkBYM8//7xuxyH0RUrOUHs7Y/v387wena7BjIwMlp6eztiRI97ijeIeW1NTw4qLi7vN/bHb7cxms7H09HRWXl7e6bX09PQb8o/6gumX1ufl5fVLDF1f06C/yBRDZ8+e9Q7Cb7/9tq7Hcrvd3uX2jzzyiK7HIhSipoaxoUO5SPnVr/Q/3te+xo+1dGn/ahcNgieffJIBYIsXL2auPhSfGwwfffQRA3g167KyMl2PReiDFDF07hwXKAcO9KlA4qCor/cVZLxueb7NZmOb+1koFV0UauwLlEBtIl544QVcu3YNN998Mx555BFdj2WxWPDiiy/CYrHgL3/5C3bt2qXr8QhFWLcOqKkBZswAPGUfdGXNGr5MeMcO4KOPdD/c4cOH8cYbbwAAfvGLX8Bq1XfY/NKXvoT77rsPLpcL//Zv/6brsQhFaGkBPL0pER/Pw2R6Eh3Nm7kCwNmzPFfJw+rVq7F27do+7yo/Px/p6emw2+1aW9kvTCGGNm7ciPz8fOTm5iInJ6fX97e0tKCurq7TQwaHDx/Gn//8ZwDA//7v/2qyeqw35s6di2984xsAeKd7RqtaiJ64cAH43//l2//93/7pND9uHPC97/HtF17geUo68sILL8DtduOBBx7AokWLdD2W4L/+679gsViQn5+P4uJivxyTMDAXLnBBMmQI4K9E5HHjuOhqbOQNXj1kZ2fD4XDcsNKsO3JycvolnvRCeTFkt9uRnp6OjIwMZGVlISkpCZmZmT1+Zs2aNYiNjfU+EhIS/GRtZ37yk5+AMYYHHnjAr6r5pz/9KcLDw7Fr1y5s2bLFb8clDMhPf8o7zS9eDNx3n/+Om5PDB/3Dh4E//lG3w2zfvh0ffvghgoKCsGbNGt2Ocz2zZs3CY489BoAXVyWIbmluBqqq+HZ8vPZJ090RGgqMHMm3hRjzUFxcjKeeegpOp7PHXWRmZmLt2rXSvUIAjJVALehPztD1iEZyPeURNTc3s9raWu+joqLC7zlDBw4c8JZMP3DggN+OK/je977HALClS5dSzySia06f9nWj377d/8dfu5YfOzGRsbY2XQ6xYsUKBoCtWrVKl/33RHl5OQsODmYA2Geffeb34xMDx685Qw4Hz92R0a6mtZWx4mJ+/EHm5g4EyhnqB9cvo+9YNrw7wsLCEBMT0+nhb/793/8dAPDQQw9h9uzZfj9+dnY2wsLCsGPHDmzbts3vxycMwIsvAu3tvAbQzTf7//jf+Q4v7uhwADqUyygqKsLmzZsRFBTUp/C61iQmJuKb3/wmAPjVK0UYiI5eobFj/X/8kJBuvUNGQ2kx5HQ6kZmZ2Un4CLfdQPqn+IujR4/i3XffhcViwY9+9CMpNowdO9Y7EIuKuwThpbISeP11vi2KIfqbyEjAU38LP/uZ5gOxECCPPvooJk2apOm++8q//Mu/wGq1YtOmTSgtLZViAxHAiKTp2FheZFEGo0bx3KFr1zrlDhkNQ4qh7ipcOhyOTtUwbTYbsrOzOwmf3NxcZGRkSK922RP/60lIvffeezFz5kxpduTk5CAkJASfffYZ9u7dK80OIgB55RWeK5SSwj1Dsvj2t/lNoLQU8FTE1YKysjL8n6ftxwuyxB74pE3kOP785z+XZgcRgLS1+bxCY8bIs6Ojd+jyZXl2DBJDiSEhdtavX4+SkpIbqkkXFBRg/fr1nT6zevVqrFu3zvuoqqpCXl6ev03vM1evXsUf/vAHAHw1l0wSEhLw1a9+FQDw0ksvSbWFCCAaGoBf/pJvv/CC/xI2uyIuzrecX8NQ0i9+8QswxvCVr3wFM2bM0Gy/A0FUpf7LX/6C06dPS7WFCCCuXuXe0KgovtRdJkIM1ddzD5ER0TifSUn8WXTxP/7jPxgAlpKSEhCJy6KiaHBwMDt37pxsc4hA4Fe/4onLU6bwqreyOXeOsZAQbpMGXeQrKytZWFgYA8B27typgYGDJz09nQFg3/ve92SbQvQB3ROoXS7G9u3jictVVfoco7+cPOmtSu0vKIFaUZqbm/FLz4z7+eef90tdod6w2+24+eab0d7ejtdee022OYRsGAN+9Su+/d3v+qeuUG+MGwdkZPBtYdsg+M1vfoOWlhakpKRg4cKFg96fFvzzP/8zAOCNN95AY2OjZGsI6VRX88ULoaHA0KGyreGIprDV1UBrq1xbBgCJoQAiLy8PV65cQUJCAjLE4B4AfM9T4O7Xv/41mpqaJFtDSGXbNuDIEe6a//rXZVvj49vf5s9vv82rYQ8Ql8uFV199FQDwne98JyAmJACwYsUKJCUloa6uDm+//bZscwiZMObLzRk5Um6YuiMiXMcYD+EZDBJDAURubi4AICsrCyEhIZKt8bFy5UpMmDABVVVV2LBhg2xzCJkIz8vXvsZXsAQKixcDc+bwpG5P64yB8OGHH+Ls2bMYNmwYHn74YQ0NHBxWqxXf+ta3AACvvvoqVYY3M42N/HtutfLSEoGEyB2qrOxydWdJSUmvhRgLCgp0MKx3SAwFCIcPH8aOHTsQFBTkbYcRKAQHB2OVJ0n1dbGcmjAf588DnhVWeOYZubZcj8Xi8w69+irgdg9oNyJM/c1vfhMRERFaWacJjz/+OMLDw7F//37s2bNHtjmELITXZehQIDhYri3XkbtxI4befjtf6XbdMvuCggKsWbOm15XcDofDe7/xJySGAgThFbrvvvswVkbxrF54/PHHERQUhJ07d+LIkSOyzSFk8Jvf8D5gN98MSCgE2iuPPsq9VeXlwABmlw6HAwUFBbBYLF4vTCDR0VtF+Xsmpb3dFwYeMUKuLV2QmpaGOJHD1CFU5nA4kJmZ2afJdFZWFoqKirz3RH9BYigAaGpqwptvvgmAfxECkTFjxuCee+4BwBNMCZPhdvvCT08/LdeW7oiK4uE7APjd7/r98d///vcAeH7OhAkTNDRMO57xeOQ2bNjQa7iBUJDqan4tRkTIK7LYC95819paoKUFALB27VpkZWV18goVFBQgKSmpy32sXbvW781bSQwFAPn5+XA6nZg4cSLuuOMO2eZ0y1NPPQUAePPNN9Hi+ZITJuGzz4AzZ7jn5f77ZVvTPU88wZ/fe69fidQul8srhgItTN2R+fPnY9asWWhpaaH8PSPCGM/5GcijoYFfg01NXAxduzbwfYmHxrlnNpsNq//t34AhQ/gvqqrgdDqRm5vbrxy89PR0ADe209ITEkMBgBiEn3zySVitgfsvufPOOzFu3DhUVVXhvffek20O4U+EV+iRR/hAHKjMmwckJ/MZ6V/+0uePffrpp6ioqMDQoUOxcuVKHQ0cHBaLBY8//jgAeIuzEgbi2jW+4mogjyFDgLQ0YNkyIClp4Pvp+OhngcScnJwbvDmrVq1CSkoKAF4x3Waz+UJ4lZUoKiyEzWbr1Jk+MzMTK1asgMPhgMVigcViucHTabfbsXnz5n6f4oESuHdek1BRUYGtW7cCAL4mXPwBSnBwsHfW/MYgVuwQBqOuDnjnHb4tPC+BisXis7Ef39HfecJqjz32GMLDw/WwTDMee+wxBAUFYdeuXTh27JhscwgTsXbtWiQmJnoTnAsKCrBx40Zs2bKl8xttNl6DrLUVmz/66IZeoHl5ecjLy0NiYiIYY2CM3ZBYvWLFCr+uLCMxJJk//elPYIzhlltuCdg8hY4IwbZ582ZcNnAfGqIfbNzIXfPTp/OZaaDz2GN8lU1hIXD4cK9vr6mp8fYhC+QQmWD06NG4++67AZB3yHBERvJwV38fdXXAzp3A9u18VedA9tHVIzKy339CXl4eNm7ciHXr1iEzMxN5eXk3rhCzWr3FIB3Hjw+oMXpcXFy3fUj1gMSQRBhj3sTprwdSAbsemDJlChYsWAC3242/9CMMQRgYTxgXjz8eOAXeemLECODee/m2sL0H8vPz0dLSguTkZMybN09f2zRChMrefPNNuFwuucYQfcdi4YnP/X243bwh6pAhvCnrQPbR1WMA17PNZsOWLVuQk5ODrKwsb37PDQwbBgBwVlf7Vpj18zj+XCRAYkgixcXFKCsrQ3h4eEBVnO6Nf/iHfwAA/PGPf5RsCaE7Z84AX3zBB03P/90QiJDzX/7Sa80hUdH5scce09sqzbjnnnsQFxeH8+fPe8PshMKI7vRxcQExISkqKoLNZkNJSUn3b4qOBkJDYYuO7rU9RyAUESUxJJG33noLAHD//fcjJiZGsjV95+GHH0ZQUBCKioooZ0F1hPfv1luBAKx/1S133w3ExADnznEx1w3nz5/Htm3bAACPPPKIv6wbNGFhYd4JFK0qUxyXy7cy0uNtkYnD4UBOTg6Ki4sBAOvWrev6jRYLMGwY4mJiUN1LSoXD4cDRo0fR0NDQ6Xe9FWjUEhJDknC73cjLywMQ+InT1zNixAjceeedAHjOE6Ewf/4zfzaQUAAAhIcDDzzAt3vo5bVhwwYwxrB06VKMHz/eT8ZpgxBv77zzDloN2BiT6CM1NXwJfHj4gHJ8tGbFihXeROq8vDysWbOm+0TnuDgkxcfDcfo0r0rdgcTERDgcDlRVVeHvf/87jh492mk1tdPpRGpqqo5/SWdIDEnCarVi3759+OUvf4kVK1bINqffiFCZSAAnFKSsDCgt5cnIDz4o25r+89Wv8ue8vBsGYoEIkT366KP+skozli1bhjFjxqCmpsavS5AJPyOSiAMgRJaZmYnExERvcWCbzYbXX38dmZmZXef3REQgfdkylBw9CnSxdN5utyMpKQm///3vERYW1qkFzubNmzstx9cbEkMSGTVqFL797W8jOMD6y/SF++67D5GRkXA4HNi3b59scwg9ECGyO+8MCPd8v7n9dt44sqqqy/Ycx44dQ3FxMYKCggyVsycICgrCQw89BAC0mEFV2tuB+nq+HRcn1xbwlWTXC++MjAzU1NR0G9KyL1kC25AhKPjwwxteKy4uRlFREX71q18hOTkZlg5ir6CgwK/NkkkMEQMiKioKX/rSlwDAG+4jFIIxX4hMeFiMRnAwkJnJt8Xf0gGRa7NixQqMCMA+T31BhMree+89NDU1SbaG0Bynk1+LERE8TGZE4uKw+vHHsfbXv77BQ+tyuVDraeg6tMOKs/z8fKSnp5NniDAGYjadn59PoTLVKC0FTpzgA/B998m2ZuAIIff++94+SYJ3PIUkhXfFiCxYsAATJkxAQ0MDPvroI9nmEFrTMURmVMLCkP3003CcP4+S7ds7veR0OsEYQ3h4eKcQWU5ODvUmI4zDl7/8ZYSHh+PkyZMoLS2VbQ6hJe++y5/vvtvXZ8iILFrE67LU1QGffur99cmTJ3HgwAEEBQXhPgOLPYvF4hVzQtwRitAxRDaAOj0BxdChKH7rLTz17LOdcotqPKvkhg4d6g2RZWZmYu3atX71CgEkhohBEB0dTaEyVRFiSKzIMipWq6+xrPib4BMOt912G4YZMR+qAw96kts//PBDNDc3S7aG0AwVQmSCuDjYhgxB8R/+AFtUFIDOIbK4Dp6vvLw8KTl8JIaIQSG+tHl5eRQqU4Vjx3gbi+Bg4J57ZFszeISge+89XrMFPjH0oBFXyV3H/PnzMW7cODQ0NNzYI4owLqK2kJFDZIKwMF9ZAI9nqLa21hsiC4R+gCSGiEFxzz33ICwsDCdOnMChQ4dkm0NogfCgLF/OGy4anWXL+A2lshL4/HOcOXMGhYWFsFgsuF94jQyM1WrFV77yFQDw9lgjAoMBTxBdLh7aBdS4BgFfqM8jhkSIzGazdVpF1h+0nICTGCIGxZAhQ7x1kt5//33J1hCaIMSQAl4TALyn08qVfPvdd/Gu5++7+eabMWrUKImGaccDHu/X+++/j/b2dsnWEKJcyoD/F7W1vkKLHRKLDY0QdXV1cLe1eUNkg6kyLc6vFuVpSAwRg2al50ZDYkgBzp4Fiop4cTchIFRAhMrefRfvecSQCiEywbJlyxAXF4fKykrs2LFDtjmmJygoCEFBQagT3p3+IpKMVfEKAVzYhYcDjKH58mW43W6EhIQgypNDNBDq6uq853qwGK/aHxFw3HvvvbBYLCgqKsL58+cxbtw42SYRA+W99/jz0qW8YKEqpKfzxpHnz6PpwgUAPhGvAsHBwVi5ciXeeOMNvPvuu7j11ltlm2RqLBYLRo4ciYsXLyIsLAxRUVF9DwW53T4xFBEBqJQUHx0NNDejyVMyIDo6Gi3XlbzoC4wxNDY2oq6uDmPGjBlwmK0jJIaIQTNq1CgsXLgQu3btwgcffIBvfetbsk0iBspf/8qfFRIKAPiM9M47gXfewZcZQ0tyMiZMmCDbKk25//778cYbb+CDDz7ASy+9pMkNghg4sbGxaGpqQmVlJa5evdr3DzY1AVeu8JWQkZHSW3BoSksLUFkJN4BK8Hy3gfbVs1gssNlsiI2N1cQ0EkOEJqxcuRK7du3C+++/T2LIqNTVAZ4O7rj3Xrm26MG99wLvvIP7ALQp+PctX74c4eHhOHPmDA4dOoTZs2fLNsnUWCwWjBkzBiNHjkRbN73xuuQ//oM3F37oIb6tEi4XWh9/HKFOJ/4nMhL/u3s3QkNDB7SrkJAQTcJjAhJDhCasXLkSL7zwAj799FPU1dUhJiZGtklEf9m0iZfLnzqVPxSjZflyhACYB8CyYIFsczQnMjISy5cvx0cffYQPP/yQxFCA0K+cFsaAt94Czp0DbrnF+PWFuuAAgLQzZ/DwlCkBdZ+gBGpCE6ZNm4apU6eira0Nn3zyiWxziIEgQmQKek0AYNuRI9jl2U4+e1aqLXpxr+d/91fxvySMxf79XAhFRvLSFgrypicfaklNDRd/AQKJIUIzRFsD6pFkQFwu4G9/49uKiqG//vWvEBLBquh39Mtf/jIAYPfu3bhy5Ypka4h+I67B9HQlvUJnz57F786cQTOA6MpK4MgR2SZ5ITFEaIYYiD/++GO43W7J1hD9YtcuoKqKF0ZbskS2NZrDGOskhvDpp0Bjo0yTdCE+Ph7z5s0DYwwff/yxbHOI/iJEumcsVY2PPvoITQD2i5IBQvwFACSGCM1YsmQJhgwZgqtXr6K4uFi2OUR/EGGVu+/mbTgU48iRIzhz5gzKQ0PhnjSJr2rZvFm2Wbpwj6eFCoXKDEZlJbB7N9+++265tuiEiBrU3Xyz+IVEazpDYojQjJCQENxxxx0AgL8FkOIn+oDwIqjQi6wLhJfk1ttug1X8jYp6ToQY2rRp04CXLRMS2LSJ59AkJwMJCbKt0Zxr1655e+clPP00/+WOHb6aSpIhMURoiuhiT2LIQJw7Bxw8yOuaeMSsaggxdPfdd/tm3R9/HFAJnFqRmpqKESNGoKGhATt37pRtDtFXFA+Rbd26Fc3NzYiPj8e0u+8Gpk/nuYp//7ts0wCQGCI05q677gIAFBYWUgKnUdi0iT+npQHDhsm1RQfq6+vx+eefA/CIoVtv5cmpFRVAWZlc43TAarXizjvvBABa2WkU2tsB8b/yTChVQ0yQ77nnHl4QVIi+AAmVkRgiNGXs2LHeBE4aiA2CCBd5hKxqfPrpp2hra0NiYiKmTJnCWxzccgt/UdFQ2d0e7xclURuEPXuAmhq+gGHhQtnW6MImz6RLfDe9ou/jj3kLEsmQGCI0h0JlBqKtzZdIrGjSZscQmbdFRcdQmYKsWLECFosFBw4cwAVPLzYigBETxzvuUHIBg8PhQHl5OYKDg31985Yu5b3Krl7l9ZUkQ2KI0BwRKtuyZQstsQ90du/mbTiGDwdSU2Vbozkdl5jf3VHsie3PPwcaGiRYpi8jRoxAquf/KWbkRAAj8mY84U3V2OyZcC1cuNBXdTokBLj9dr4dAN9REkOE5ixYsADR0dGorKxEaWmpbHOInhCekTvu4AnUilFWVoazZ88iLCwMt912m++FKVOASZOA1lZg61Z5BuqImJRQuDrAqaoCCgv5tqILGIQYWrFiRecXhPgjMdR/nE4ncnNzbzypPbBu3Trk5uYiNzcX69at09E6AuBL7IUrdLOitVyUQdwoFQ2R/d0z4162bBkiIyN9L1gsvr9ZUbEgxNDmzZvR3t4u2RqiWwoK+KrGWbOAceNkW6M5LpfLu6T+juvFnvh5506gvt7PlnXGUGKopKQEGzduhNPpRHV1dZ8+I8RPVlYWsrKyYLfbsWrVKj3NJOCbARQUFEi2hOiWq1eBffv4dj8mF0ZCiPEbBmHANytV9DualpaGoUOHoqamBoXC80AEHiJEpqhXqKioCE6nE7Gxsd7QrZfJk4HERJ67+NlnUuwTGEoM2e12ZGVlITExsc+fWbNmDbKysrw/p6enIzc3Vw/ziA6kp6cDAD7//HM0NzdLtoboEhEemj0bGDVKri060NLSgs88A2yXnuRbbuGhwePH+TJ7xQgODsbtnpwMmpQEKIz5QkSK5wstX74cwV0lhwdIqMxQYqi/OBwOOJ1O2EQflA4ExODAGG9U19Ym2xLNmT59OsaOHYvm5mbs2LFDtjlEV3hc1/AIV9XYtWsXrl27hlGjRmH27Nk3viE2Fpg/n2+Lc6EYyz2dz7co+vcZniNHgPPned0r0aJCMUSoutvUFuERk1x8UXkx1BU2mw3OHkqAt7S0oK6urtNDF5KTgZkzgaIiffYvEYvFQqGyQEf8Xzw3TNUQg3B6ejqs3SWHCyGoqFgQHlohDIkAQwiAZct4/SvFqK+vx65duwD0IIZuv52XEzhxAjh1yo/WdUZpMdQdcXFxPeYcrVmzBrGxsd5Hgl59YqZN48+ffqrP/iUjBmJKog5ATp0CHA4+CC1bJtsaXeh2BUtHhBAUSayKMXnyZCQkJKC1tZU8tIGImJAomi+0bds2tLe3IzExEUlJSV2/KSYGWLQIGDIEOHrUvwZ2wJRiqLfk69WrV6O2ttb7qNArn0DUWFBcDO3btw+VlZWSrSE6ITwhCxbwQUgxqqqqUFxcDKAXMbRoEZ+RX7qkZGsOi8VCobJApa0N2L6dbyvune119ffbb/MSAxJXtSothrpLtHY6nT0mYYeFhSEmJqbTQxeEGPriC0DBJOPRo0dj1qxZYIzhU0UFn2ERN0ZFB+EtW7aAMYaZM2di7Nix3b8xPJxXwgWUXVVGYihAKS7mBT/j4njKhIL0uJqzI/HxvAijRJQXQzabrcvcofRASBqdOhUYOxZoaQE8cVXVoLyhAMTtVj55Wtz4+1SPTPG8IbGirKSkpM8lSQg/ICaIt92mZMHTiooKHD16FFar1fsdDGQM+R/o7oJ2OBw3FFVcvXp1pxtxfn5+p6X2UrFYlA+ViZvR5s2bwRTMyTAkhw7xGkORkTxMpiBbPWUD+jQIC+/YZ5/x7uGKMXbsWEyfPh2MMW+pASIAEGO+AYTCQBBeobS0tC5XdAcahhJDQuysX78eJSUlyMnJQX5+vvf1goICrF+/vtNnsrOz4XQ6kZ+fj/z8fBQWFt7wHqmIFgGKiqFly5YhJCQEp0+fRnl5uWxzCMDnAVm2DAgNlWuLDpw/fx4nTpyA1WrFzX1Zrjx3Lg9V1NUpubIToFBZwNHczNMjAOXFUH+6RUiFEb1SW1vLALDa2lrtd37qFGMAY8HBjNXVab//AOCWW25hANhrr70m2xSCMca+9CX+nfvFL2RbogtvvfUWA8BSUlL6/qEHH+Tn5Kc/1c8wifzf//0fA8Buuukm2aYQjDG2dSv/vo0ezZjbLdsazXG5XGz48OEMANu+fbtUW/p6/zaUZ0hJJk7kDSPb2wFFl752DJURkjHBChYRIuvUmLU3xLlQ1HNy6623wmq14tixYzh//rxsc4iOITKLRa4tOlBaWorKykpER0dj4cKFss3pEySGAgHhJlW0e7ZIVv/000/hcrkkW2Ny9u7lK1iGD1d2BcuAxJBIot65E1CwOKHNZkNKSgoACpUFBIrnC4ncNJEmYQRIDAUCiidRp6ameqt+l5SUyDbH3IjFBLffruQKljNnzuDUqVMICgrqW76QYPJkICEBaG1V1kNLeUMBQkMDsGcP31ZUDG3btg0AcMstt0i2pO+oNxoaETGDLSkBamrk2qIDQUFBWOapckyrWSSjeH0h4RVKTU3FkP4Uk7RYfN4hRctAdBRDjFZ2ymPHDp4WIVIkFMPtdmO7JxR/6623yjWmH5AYCgTGjAGmT+ftADyKWjXEDGGbon+fIWho8NWzUrS+0IBCZALF84aWLFmCsLAwnD9/HsePH5dtjnlRPER28OBB1NTUIDo6Gna7XbY5fYbEUKCgeKhMiKHPP/+c8oZk8fnnvhlpDxXYjQpjTBsxtG8fbw2gGBEREVi8eDEACpVJRXExJLz/S5cuRXBwsFxj+gGJoUBBcTE0d+5cxMTEoK6uDqWlpbLNMSciRKnoIOxwOFBRUYGQkBAsWbKk/zsYPRqYMYN7aD//XHsDAwDKG5JMTQ1PhwB86RGKIcSQkUJkAImhwOGWW3jewuHDwOXLsq3RnI4JrRQqk4Q47wYbpPqKGITT0tIQFRU1sJ14ctu85QcUQ4ihbdu2we12S7bGhGzbxsX2tGm8FZNiGDVfCCAxFDgMG8Yr4QLKLrGnvCGJNDTwxpCA74avGCJENqhBWJwbRb+jdrsdkZGRqKqqQllZmWxzzIfiIbJDhw6huroaUVFRhsoXAkgMBRaKh8qEGNq+fTvNSv3N7t08X2j8eGDCBNnWaM6g84UEQgzt3w/U1g7esAAjNDQUixYtAgDvDJ7wI4qLoY75QkapLyQgMRRIKC6G7HY7oqOjUVNTg4MHD8o2x1yIG5+iXqETJ07gwoULCA0N9SYJD4hx44CkJMDt5gUYFUSUuSAx5GeuXOFpEABPi1AQ4fU3WogMIDEUWCxZwvOGysuBixdlW6M5wcHB3sRWCpX5GXG+FRVDwiu0cOFCREREDG5niucNdRRDVG/Ij4ik/NmzeQV4xXC73YYstiggMRRIxMYCc+bwbUVXs1DekASam30Vbw04SPUFTUJkAnGOFP2OLliwACEhIbhw4QIcDodsc8yDGNP7UxndQBw5cgRVVVWIjIxEamqqbHP6DYmhQEPMShUXQzQr9SOFhUBLCzBqFDBlimxrNIcx5s1V0EQMiWuwsFDJPmURERFIS0sDQKEyv6K4GDJyvhBAYijwEBeKooNUamoqIiIiUFlZiSNHjsg2xxx0zBdSsEP28ePHcfnyZYSFhWHBggWD3+HEiUB8PE8437178PsLQChvyM/U1fGkfEB5MWTEEBlAYijwEBfKwYOA0ynVFD3omOBKoTI/oXjy9OeeGXdaWhrCw8MHv0OLxVR5Q4Qf2LmTJ+VPmsST9BWDMWbo5GmAxFDgIUIZjAFffCHbGl0QMwdq2uoH2tp83yODzth6Y4eny3y/utT3huJ5Q4sXL4bVaoXD4cC5c+dkm6M+IkSm6ITkyJEjqKysNGy+EEBiKDBRfFYqZg7btm2jvCG92bcPaGwEhg4FZs6UbY0uCM/Q0qVLtdupuAZ37+b5VooRExODefPmAfCdP0JHTJIvtHjxYoSGhso1ZoCQGApExAWj6CAlwhlXrlzBsWPHZJujNkJQ33wzYFXvchcroiwWy+DqC13PTTcBI0bwlXhFRdrtN4CgUJmf6LiaU1ExZPQQGUBiKDARF0xREdDUJNcWHQgLC8PChQsBUN6Q7iieLyRCZMnJyYiNjdVux5Q3RGhFYSHQ2gqMHKnsak5dQtV+hsRQIDJpEm/i19bmm1EoBtUb8gMul/K5CroOwornDYmw4pEjR3D16lXJ1ihMxxCZgqs5T506hYsXLyIkJATz58+Xbc6AITEUiJhoVkr5Cjpy6BBfkRgdDXjyQ1RDVzEkrsEvvuDL7BVj+PDhmOnJIxPnkdABk0xIRNkUo0JiKFBRPG9owYIFCAoKwrlz53D27FnZ5qiJENJLlgDBwXJt0YG6ujqUlpYC0Dh5WjBrFmCzAQ0NvhoxikGhMp1xuXyrOQ0cQuqJLzx/ny7XoB8hMRSoiAtn1y4eLlOMqKgo72qWLxQtISAdxfOFdu3aBbfbjcTERIwdO1b7AwQFAWKAV1QskBjSmdJSoL4eiIkBkpNlW6MLwjMk+k4aFRJDgcrMmXw5dGMjXx6tIOLiIRe9DjCmvBjSZUn99SieNyTCi/v370dtba1kaxREePYXL+biWjGqq6u9nQQ0Xc0pARJDgYrV6puVKhoqE2KIPEM6cPIkcOUKEBYGGDipsSf8soJFzHZ37uQCUzHGjRuHpKQkuN1u7Ny5U7Y56qF4fSHxnZk2bRpGjBgh2ZrBQWIokFE8b0iIoYMHD6Kurk6yNYohbmypqVwQKUZLSwv2eFZa6uoZstv5+ausBE6c0O84EhFikiYlGsOYaZKnjR4iA0gMBTYdxZDbLdcWHRg7diwmTZoEt9uN3Yo2xJSGuLEZ3HXdHSUlJWhubsbw4cNx00036Xegjp41RcWCCG+QGNKY48dN4501evI0QGIosLHbgYgIoLoaKCuTbY0uUN6QTogbmwIztq7omC9k0bt2S8dQmYKIa3Dv3r1oU3CxhjSEVygtTUnvbHNzMwoLCwGQGCL0JjQU8FRqVj1URrNSDamuBjxJjap6hvxa8VacQ0W/o9OmTYPNZsO1a9e8pQoIDRATPEXzhYqLi9Ha2oqRI0ciKSlJtjmDhsRQoCMU965dcu3QCTGj2LNnD81KtUKEHKdM4f21FMPtdssRQ2VlXGgqhtVq9YbKKIlaQ8S5VMBr0hUdQ2S6e2f9AImhQEcMxIoOUjNmzIDNZkNjYyPNSrVC8RBZWVkZampqEBkZiblz5+p/wOHDeeNWQNlJCeUNaczVq76Ee+HdVwxVii0KSAwFOuJCEkulFcNqtWLRokUAaCDWDMXFkMgXWrRoEUJCQvxzUMVDZSJcTZ4hjRCiecYMXi9OMdxut3e8VmElGUBiKPCx2XgBRkDZWSnlDWlIWxuwdy/fVjRfSMqMVAz4in5H09LSqD2OlghRqeg1ePToUVRXVyMiIsLbScDokBgyAoqHysRN7YsvvgBTsLCdX9m/H2hq4rPRadNkW6MLwnvh14q3Qgzt3atke5zIyEjvTY28QxqguBgSE5IFCxb4zzurMySGjIDiYmj+/PkIDg7GhQsXcPr0adnmGJuO9YWs6l3ely9fhsPhgMViwYIFC/x34JtuAuLigOZm5dvjkId2kLS2Ap4l56qKIZXqCwnUGy1VRFxQhYX8QlOMyMhI2O12ADQQDxrFZ6S7PKHimTNnIjY21n8HtliUzxuiJGqN2L+fi+a4OGDqVNnW6AKJIUIOU6YAw4YBLS3Kzko7hsqIAcKY8snTQgxJaQqpePFFcU5LS0vR0NAg2RoD03FCosCS8+u5ePGi1zu7UKGVciSGjEDHWamiAzFVotaAM2eACxeA4GBly/+LfBaxAtGvdEyiVjC3LT4+HuPHj4fb7fb2fSMGgOLeWXENzp4927/eWZ0xpBhat24dcnNzkZubi3Xr1vX6/oKCAmRmZiI3NxcFBQXIyclBfn6+HyzVEJOIocOHD8PpdMo1xqgIr9C8eUBkpFxbdKC1tRVFRUUAJHmGUlOBkBDg4kVA0dw2WmI/SDp6ZxUVQ1K9szpiODEkxE9WVhaysrJgt9uxatWqHj/jdDpRUFCAVatWYdWqVUhKSkJGRoY/zNWOjmJIwVnpqFGjkJSUBMaY92Ij+om4gSkaItu/fz+am5sxbNgwTJkyxf8GRETwfoGA8pMSClcPkIoK7p0NClLWOyvGZyneWR0xnBhas2YNsrKyvD+np6cjNze318+dOnUKjDGUl5d3+rxhSE3l4Y8LFwBF64CImQZ1sB8gJskXWrRokbzy/4rXGxLX4K5du+B2uyVbY0CESFbYO1tcXAyAxJBUHA4HnE4nbDbbDa8VFBT43yB/EhnJLzBA2VmpSMYjMTQA6uqAgwf5tmLua4HUfCGB4ivKZs+ejejoaNTV1eHw4cOyzTEeiucL7du3Dy0tLRg+fDgmT54s2xxNMZwY6gqbzdZrnsnGjRuRn5+P3Nxc5OTk9PjelpYW1NXVdXoEBIrnDYmb3J49e2hW2l/27AHcbmDiRGDsWNnW6IKUYovXIzxDBw9yAaoYwcHB3vpNlDc0ABQPVQvv7MKFC5VoztoRQ4mh7oiLi0N1D92k7XY70tPTkZGRgaysLCQlJSEzM7Pb969ZswaxsbHeR0JCgh5m9x/FxdDs2bMRERGB2tpaHD16VLY5xkLxGWlFRQXOnTuHoKAgzJeZizF6NJCYyPP2FPVgUt7QAGls5DWGAGWvQ1XzhQBFxFBPQggAEhMTkZiY6P35oYceQn5+frfepNWrV6O2ttb7qKio0NLcgSMusNJSQME6IMHBwd4bHYXK+ok4XwoOUoBvEJ4zZw6ioqLkGiPOsaLLz6n44gApLARcLiAhAYiPl22NLpAYChA6CpqOOJ3Obl8DcMMyepFz1F3YLSwsDDExMZ0eAUF8PDB+PL/gRLl3xRAXGa0o6weM+W7MChVB60hADcLiHCsq2EUIxOFw4OrVq7LNMQ6Ke2fPnz+PiooKWK1Wud5ZnTCcGLLZbF2KmPT09C4/43Q6kZmZ2ekzwiPUk4AKWBQPlVES9QA4cQKoqQHCwoDkZNnW6EJA5AsJOoohBctcxMbGYvr06QBAxRf7g+JiSExIkpOTER0dLdka7TGUGAJ4CKvjyrH8/PxOS+UdDkenQow2mw3Z2dmdhE9ubi4yMjK6XJUW8JhEDB0+fDhwEtcDHXHDstuB0FC5tuhAU1MT9nna0ASEZyg5GQgPB6qrgZMnZVujCyKJmiYlfcTtBoQ3W3ExFBDXoA4YTgxlZ2fD6XQiPz8f+fn5KCwsxPr1672vFxQUdPoZ4AJq3bp13kdVVRXy8vL8bbo2iAtt1y4lZ6WjR4/GxIkTwRjD3r17ZZtjDBQPkRUXF6Otrc373ZBOaCiQksK3FRUL5KHtJ8ePc3EcEQHMmSPbGl1QXQwFyzZgIGRnZ3u3r68kLSpTd0R4h5RAzEpranh4RMGuyAsXLsTp06exe/fubsOfRAfEDcszm1eNgCi2eD0LF/JaQ7t3A1/7mmxrNEeIob1798LlciEoKEiyRQGOmJCIli2K0dLSomyxRYHhPEOmJySEZqWEj6YmvroQUFYMBVS+kEDxJOqZM2ciKioK9fX1KCsrk21O4KP4hGTfvn1obW3F8OHDkZSUJNscXSAxZEQUH4jFzGP37t1gCoYCNWXfPqC9HRg5EpgwQbY1msMYC4zK09cjrsHSUuDaNbm26EBQUBDS0tIA0KSkTwjPkKJiKCC9sxpDYsiIiIFY0ZUec+fORVhYGKqqqnBS0QRVzRA3qoULAQUHqVOnTuHKlSsICQlBivCIBgLx8cC4cbzMhSd8oBrkoe0j164BBw7wbROIIVUhMWRExAWn6Kw0NDQUdk93cBqIe0HxGalY2j137lyEh4dLtuY6xKRE0ZpYJIb6SEkJF8Vjx1KxRQNDYsiIxMfzC0/hWSkVX+wjJhFDCwNxpZzi4WqxvP7IkSOora2VbE0A0/EaVNA7e+7cucBohaMzJIaMiMWi/EBMs9I+cOkScOYM/z4oOkgJMbQgEMVeR8+Qgrlto0aNwqRJk8AYQ6GiFe81QfHk6Y7FFqW3wtEREkNGRVx4iuYNCTF04MABNDY2SrYmQBH/+xkzgEBpGaMhra2t3mKLASmG7HYgOJiL0kDpX6gxNCnpA4p7Z80QIgNIDBkXxT1DCQkJGDduHFwul7e+BXEdig/CpaWlaGlpwbBhwwJzOW9kpK/AnqLXIYmhXrh4kQthq5XXGFIQEkNEYJOSAgQFAefPA+fOybZGF8RATHlD3aB45WlxA05LSwvc5byKT0o6iiEqc9EF4hqcNQtQsF9XS0uL1zsbkHl7GkJiyKhERQGzZ/NtxUNlNCvtApcLEO1KFPUMBXS+kEBxMTRnzhyEhoaiqqqqywbZpkfxfKGA985qCIkhI6P4QEzFF3ugrAxoaOCieOZM2dbogqHEUEkJ0NIi1xYdCAsLozIXPaF4qFpcgwHtndUIEkNGRnExZLfbERwcjEuXLuHMmTOyzQksxCA8fz4PlypGx4KbohJyQJKUBAwbxoWQaIuiGOSh7QaXCygq4tuKiyHVQ2QAiSFjIy7A4mKgrU2uLToQERGBefPmAaCB+AYUd8/v9YQAp06diri4OMnW9ACVuTAvR45w72x0NDB9umxrdMEQ3lmNIDFkZKZOBWw23qzz4EHZ1ugCDcTdYBL3vCEGYZOIof3796OpqUmyNQGEuAbT0sg7qwAkhoyM1eq7GSo+ENOKsg7U1wOHD/NtI4iFAUBiKHAYP348Ro8ejfb2dpSUlMg2J3AwkXd26NChkq3RHxJDRkfx4osiiXrfvn1obm6WbE2AUFQEuN1AQgJvy6IYjDHvQGwIMTR/Pg+XnToFXL4s2xrNsVgs5KHtCsW9s+J/bYhrUANIDBkdxWelEydOxMiRI9HW1uatd2F6FB+ET548ierqaoSFhSE5OVm2Ob0TG8urgAPKTkpIDF0HeWeVg8SQ0RGx3OPHgaoqubboQMdZKYXKPJik2KLdbkdoaKhka/qI4pMSEkPXUVTE+9GNHw+MHi3bGs3p6J01w0oygMSQ8Rk2DJgyhW+LInyK0bHekOlhTPlcBUPOSEWrAkW/o6mpqbBard4O5qZH8QnJiRMnUFNTg/DwcGN4ZzWAxJAKiAuSXPTqU1HBG4MGBfFGoQpiSDEkrsG9e3n9GcWIiory3hT3KDrO9AuTTEjsdjtCQkIkW+MfSAypgOIrysSstKKiAufPn5dtjlzEjWjOHN4oVDGam5tR6ileaCj3/PTpQEwM0NjoyyVRDCFOTT8pYUz5vD2zJU8DJIbUoKNnyO2Wa4sOREdHY7anD5vpZ6WKz0j37duHtrY2jBw5EhMmTJBtTt+xWn35e4qKBfLQehDe2eBg8s4qBIkhFUhOBsLDAacTOHFCtjW6IC5K04shxWekHQdhw/VCMkkSdVFREdoUrHjfZzp6ZyMi5NqiA01NTcb0zg4SEkMqEBICpKTwbUUHYnLRg7dcKS7m24oOUoaekSouhqZOnQqbzYbm5mYcOHBAtjnyMIF3tr29HaNGjcL48eNlm+M3SAypguIDccdZaXt7u2RrJHHgANDczFuwiBWEimFoMSRsLisDamrk2qIDVquVJiUAeWcVhcSQKii+omzatGmIiYnBtWvXcFjRBNVe6dgLyarepXvlyhWcOnUKFosF8+fPl21O/xk+nHexB4DCQrm26ITp84Y6emdNIIbMhHojqlkRX9wDB/iKFsWwWq3eG6Rp84YUr20i/q/Tpk1DbGysZGsGiOLtcUyfu3fwoPLeWTOuJANIDKlDfDzvU+Vy+WYuimF6F73iuQpKzEgVF0Oie/mJEydQpWDF+17pGCJT0Dt7+fJlnDlzxrje2UGg3n/TrFgsyofKhIvelLPSmhrecgXwLeFWDPF/NfQKlo5iiDG5tujAsGHDMMXjEdmraMX7HjHJhGTGjBmIiYmRbI1/ITGkEuICVbSHl/AYlJWVoba2VrI1fkbceCZP5rkpiuF2u43Vqb475s4FQkOBykrexV5BTB0qM1HytNkgMaQSirvoR44ciYkTJ4IxhkJFE1S7RfFB+NixY6irq0NkZCRmzZol25yBExbGBRGg7MpO04qhmhrg2DG+rbh3lsQQYWxSUngc+8IFQNFmiqYNlZnEPZ+SkoLg4GDJ1gwSxScl4ka5d+9eMAVDgd0iJmBJSUp6Z10ulxre2QFCYkgloqMBMatWfCA2lRhizBcmM3I+TQ8oNSNVPHdvzpw5CAsLQ3V1NU6ePCnbHP+h+GrOo0ePor6+HlFRUZg5c6Zsc/wOiSHVMMmsdM+ePeaZlZaXA1VVPAQzZ45sa3RBqeW84m/Ytw9oaZFriw6EhoZi3rx5AEy2stMk3tnU1FTje2cHAIkh1VBcDM2bNw8hISG4cuUKTp8+Ldsc/yAG4XnzeHKuYly7dg0HDx4EoIgYSkzkYZTWVsDT40k1TBeuNkGneqW8swOAxJBqCBduURGgYNuK8PBwzPUkqJpmIFbcPV9cXAyXy4UxY8YgPj5etjmDx2LxJdgq+h01Xbja4eDe2dBQZb2zJIYItZg2DRgyBLh2DVC0bYXpBmKTzEgXLlyoTi8kxT204hosLS1Fc3OzZGv8gPg/zpvHw9WK0djYqJZ3dgCQGFKNoCBAVA5VfCA2Rb5CczOwfz/fVnSQUnJGKv4WRb+jEydOxIgRI9DW1oZ9+/bJNkd/xP9RUe9sUVER3G43xo0bh3Hjxsk2RwokhlRE8VmpyFfYt28fWltbJVujM/v28eaQI0cCEyfKtkYXlBRDIkxWXs4LMCqGxWIxl4fWRN5Zs0JiSEUUF0NJSUkYNmwYWlpaUKpogqqXjoOwKiGkDly8eBEVFRWwWq1ITU2VbY52DB0KTJ3KtxVtW2GaDvYtLeSdNQGGXD+3bt062Gw2AIDT6UR2drYunzEs4gt95AhQVwco1mPGYrEgLS0NH3/8Mfbs2aN2Q0GTLOedOXMmoqOjJVujMQsX8n5ye/YAX/qSbGs0xzSeof37+crA4cOBSZNkW6MLJIZ08gzV1dXpsVsAXNQAQFZWFrKysmC327Fq1SrNP2NoRo8Gxo/ny0GLimRbowummZUqvpJM6UFYcQ/t/PnzYbFYcPr0aVy5ckW2OfqhuHf2/PnzOH/+PIKCgpCSkiLbHGkMWAzNnz8f9fX1N/x+y5YtmKSjel6zZg2ysrK8P6enpyM3N1fzzxgexQdiU8xKL18GTp/mA7Ci3i+lii1ej/ib9u5VsoN9bGwspk2bBkDx61Dx5Gnxv5s1axaioqIkWyOPAYshu92OiRMnYuvWrd7f/fznP0dmZqZuQsPhcMDpdHrDXR0pKCjQ7DMtLS2oq6vr9DAciouhNE+C6smTJ1FVVSXZGp0Q/7vp05ULdQK8F1KRx3OppBhKTgbCw3mDzxMnZFujC6aYlJgkeVrJa7AfDFgMrV+/Hhs2bMCDDz6I1atX484770RBQQFOnTqFBx98UEsbvTgcji5/b7PZ4HQ6NfvMmjVrEBsb630kJCQMxFy5dBRDCs5Khw4diqmeBFVlB2LFQ2RHjhxBQ0MDoqOjMWPGDNnmaE9ICGC3821Fv6PKi6GrV3nBRUBZ7yyJIc6gcobS09OxevVqrF27FkVFRVi/fj1iY2O1sq3PxMXFobq6WrPPrF69GrW1td5HRUWFFmb6F7ud1xy6dAkwov19QPmWACaZkc6fPx9BQUGSrdEJxesNiWtw7969cLvdkq3RAbEScNo0oIvogtFR3jvbDwYlhp5++mn87Gc/Q3FxMdasWYOUlBT89re/1cq2PtNfIdTbZ8LCwhATE9PpYTgiI7mbHqBZqRFxuXwDsaKDlClmpIqHq2fNmoXIyEjU1dXh6NGjss3RHsVXcx4+fBiNjY0YMmSIN//LrAxYDE2ePBmnTp2Cw+HAvHnzkJWVhcLCQqxZswZ33XWXljZ6SUxM7PL3Tqez29cG8hllUHwg7iiGlJuVHj0K1NdzUTtzpmxrdMEUYkiEOEtLgaYmubboQHBwsHcFkpKTEsVD1WIBg9Le2T4yYDGUkZGBTZs2dQqLJSYm4uTJk7p5UhITE2Gz2brMA0pPT9fsM8qguBhKTk5GeHg4nE4nTqiWoCr+Z/PnA8GGLAfWIw0NDTjs6Z2ntBgaPx4YNYo3TVa0bYWyHlq3m7yzJmLAYuhnP/tZt69t3LhxoLvtldWrV3daBZafn99p2bzD4fDWFerrZ5RFfMGLi3lLB8UICQlRd1aquHte9EJKSEjAmDFjZJujHxaL8pMSZcXQ8eNAbS0QEQHMni3bGl0gMeTDcO04srOz4XQ6kZ+fj/z8fBQWFmL9+vXe1wsKCjr93JfPKMtNNwGxsdw9f+iQbGt0QdmBWPHkaaXrC12P4mJIJFEfPHgQjY2Nkq3REPH/SklR0jtbV1eHI0eOADDJddgLhvwPd2ylkZGR0ek1UWW6P59RFquVh1kKCviFPW+ebIs0R8lK1A0NPvGqaK6CqWakiouh+Ph4jB07FhcuXEBxcTGWLVsm2yRtULzYYlFRERhjmDBhAkaPHi3bHOkYzjNE9BPFB2JxMz1w4ACaVElQLSri+Qrx8cDYsbKt0RzGmLm6ZM+fz8Nlp0/zquIKoqSHVnHvrKkmJH2AxJDqKC6GEhISMHr0aLS3t6OkpES2Odqg+CB87tw5XLx4EUFBQbCLooQqExPDq4gDyl6Hyomha9eAAwf4tqLXIYmhzpAYUh3xRT96lCcDKobFYvFezMqEyhRfzisG4eTkZERGRkq2xk8oPilRTgyVlPBaX2PGcA+tYnT0zpIY4pAYUp2RI4FJk3hLjsJC2dboglKVqBlTfiWZKQdhIWxV+I52QWpqKqxWK86dO4fz58/LNmfwKN6pvqKiApcuXUJwcLA5vLN9gMSQGaBZqXE4dw64eJG3UvGUDVANU4oh8bcWFvJ8MMWIjo7GrFmzAChyHSqePN3ROxsRESHZmsCAxJAZULw/UmpqKiwWC86ePYuLFy/KNmdwiBtJcjKvPq0Y7e3t5uyFNHMm/3/W1fGQtYIoNSlRPG/PlBOSXiAxZAYU72A/ZMgQdWalig/CBw8eRFNTE2JjY3HTTTfJNsd/BAcDqal82+jf0W5QRgxdvMibW1utvv+ZYpAYuhESQ2Zg3jwgJAS4epUv71UQZQZik+QLzZ8/H1aryYYfxT204hosKiqCy+WSbM0gEGPIzJlAdLRcW3Sgra0NxcXFAEgMdcRko5FJCQ8H5szh20YXC92ghBhqa+OtUwDlcxVMUV/oehTP3Zs+fTqio6PR2Njo7TtnSBSfkBw6dMjrnZ06dapscwIGEkNmQfGBWNxcCwsLjTsrPXSIt06JjQUUHaRM7Z4Xf/PBg4BKbSs8BAUFIS0tDYDBJyWKh6pFCZK0tDTzeWd7gM6EWVBcDIlZaUNDg7ffjuEQM9K0NJ6voBi1tbU46kkeNqUYio8Hxo3jq8mEB1AxDF/zy+XylSBR1Htp6glJD6g34hJdI774JSVAa6tcW3QgKCgI8+fPB2DgWanixRYLCwvBGMOkSZMwYsQI2ebIQfFJieHD1YcPc6/dkCG+quGKQWKoa0gMmYUpU4ChQ4GWFl+ZecUw/ECsuHueBmGYRgwdOXIEdXV1kq0ZAB29s0FBcm3RAafTaW7vbA+QGDILFgu/wAFlB2JDd7CvqfHVnxH/J8UgMQTlxdDo0aMxfvx4MMa89aQMheLFFgs9IUBTe2e7gcSQmVB8IBY32cOHD6O+vl6yNf1E5CkkJQEKDlKMMa9INeVKMkFKCs8HO3cOUKFtRRcY2kOruBiiCUn3kBgyE4qLIUPPShVfznvq1ClcvXoVoaGhmDdvnmxz5BEdDXgKhKp6HRq2V6DTCZSV8W1Fr0NTl7boBRJDZkKEX44f52EZBTFsqEzYu2iRXDt0Qvw/7HY7wsLCJFsjGcUnJR1XlDEjVbwX3tnERGW9s+QZ6h4SQ2Zi+HAehgGAvXvl2qIThnTRd+xUr+iMbdeuXQBoRgpAeTFkt9sRHByMy5cv4+zZs7LN6TuKX4OnT5/G1atXERISgrlz58o2J+AgMWQ2FB+IO4ohw8xKhacuPJw3aFUQyhfqgDgHRUW8ro1iREREINnzPTbUpERxMST+F3PnzkV4eLhkawIPEkNmQ3ExJGally5dQkVFhWxz+oYYhFNSgNBQubboQFNTE/bv3w+AxBAAYNo0XsemsZHXtVEQw3loTeCdpRBZz5AYMhuKd7CPiIjAHE8fNsPkDSmeL1RSUoL29nZvgrvpCQoCPAVCVZ2UGE4MnTwJVFcDYWG+Po6KQWKoZ0gMmY25c7n3oaoKcDhkW6MLhhuIFZ+RdgyRWSwWydYECIp3sBcewOLiYrS1tUm2pg8o7p1tbW1FSUkJABJD3UFiyGyEhQFiabNRxEI/MZQYamz0VQQ3gRgiPCgerp4yZQpsNhuam5txwAgV7xUvbXHgwAG0tLQgLi4OkydPlm1OQEJiyIwoPhAbalZaWMgbd4omngpCYqgLxDV45AhgxLYVvWC1Wo3VwV7xvoDif5CWlkbe2W4gMWRGFBdDU6ZMwdChQ40xK1U8X+jcuXM4d+4crFYrUlNTZZsTOIweDYwfz/P2jFYgtI8YxkN77RpQWsq3FRVDYkJCIbLuITFkRsQFsW8fb9yqGBaLxTizUpPkCyUnJyMqKkqyNQGG+J8H+nd0gBhGDJWUAO3twJgxQEKCbGt0gZKne4fEkBlJTOQFGFtbfTMixTBEJWoTLOcV53+Rop6vQaG4h1bceI8dO4aaQK543/EaVDCEVF1djRMnTgCAd5JI3AiJITNigg72hpiVnj4NXL4MhIQAdrtsa3SB8oV6QPEyF8OHD0eSp+K96JYekCg+Idnr6TYwefJkDBs2TLI1gQuJIbOi+KxUzICOHz8euLNSMQjPm8erTytGa2sriouLAZAY6hK7HQgOBi5dAoxSILSfdOxTFrAovpKMQmR9g8SQWVFcDA0bNgxTpkwB4JsZBRyKz0gPHDiA5uZmDB061Pu/IDoQEeFrvxLIYmEQBLyH9tw54Px5wGoFFE3wp+TpvkFiyKyIMNnJk0BlpVxbdCLgZ6WKiyEqttgHFJ+UBHyvQHHek5MBBRP83W435e31ERJDZmXoUN4jCaBZqQyam/lqPkBZMUSd6vuA4mJo7ty5CA0NRVVVFRyBWPFe8QnJsWPH4HQ6O7UpIrqGxJCZETMFz01LNQJ6VlpSArS1AaNGARMnyrZGFyh5ug8IMVRczL8PihEWFoZ5nor3ATkpUVwMiQlJamoqQkJCJFsT2JAYMjOKi6E5c+YgLCwM1dXVOHnypGxzOqP4ct4rV654PQG0nLcHpk4FbDbuKTx4ULY1uhCwHtq2Nl/BS0XzaYQYohBZ75AYMjPiAtmzhxcdU4zQ0FDYPUvWA24gVnxGKs739OnTYbPZ5BoTyFitpilzEXC5ewcOcBFqs3FRqiAkhvoOiSEzM2MGEBPDy9HTrNS/CG+comKIkjb7geId7MU1uH//frQEUsX7jkvqrerdCmtra3HkyBEAdB32BfW+AUTfsVp9N2NFQ2UiXyWgxNC5c/xhtQLz58u2RhcoX6gfKB6uTkxMxPDhw9Ha2or9+/fLNseHCbyzjDEkJiZi1KhRss0JeEgMmR3FB+KOs9Lm5mbJ1nhQfDmvy+Xy1nYiMdQHxDk6cQK4elWuLTpgsVi834NdgTTOKN6pnkJk/YPEkNlRXAxNmDABI0eORFtbG/aJpeyyUXxGeuTIETQ0NCA6OhozZsyQbU7gM3QoMH0631b0Oly8eDEAYOfOnZIt8VBVxcUn4MvZUgwSQ/3DcGJo3bp1yM3NRW5uLtatW9fr+wsKCpCZmYnc3FwUFBQgJycH+fn5frDUIIh8hfJy4MoVubboQMdZacCEyhQXQ2IQTktLQ1BQkGRrDIJHLKguhr744ovAKHMhxoKpU4G4OLm26AAVW+w/hhJDQvxkZWUhKysLdrsdq1at6vEzTqcTBQUFWLVqFVatWoWkpCRkZGT4w1xjYLPxRGpA+QTOgFjN0trqW86r6CBF+UIDQIihQPGcaMz8+fMRFBSECxcuoCIQ+rApPiE5evQoamtrERkZiWTR8oXoEUOJoTVr1iArK8v7c3p6OnJzc3v93KlTp8AYQ3l5eafPEx7ETVnRgTigVpSJ5bxDhwKK9usiMTQAhBjau1fJ4ouRkZGYO3cugADJGxJjnaLfUXGO58+fj+DgYMnWGAPDiCGHwwGn09llzZKCggL/G6QSiucNzZ8/H1arFadPn8bFixflGqN4sUWn04mysjIA1BiyX0ydygVyczMQSCuuNCRg8oba231hsiVL5NqiE5Qv1H8MJYa6wmazwel09vjZjRs3Ij8/H7m5ucjJyen1WC0tLairq+v0UBoxKy0sVHJWGhMTg9mzZwMIgIFYcfe8WEWWmJiIkSNHSrbGQFityk9KAkYMHTwINDTwGmszZ8q1RSdIDPUfw4ih7oiLi0N1dXW3r9vtdqSnpyMjIwNZWVlISkpCZmZmj/tcs2YNYmNjvY+EhAStzQ4sbrqJ5w41NfEwjoIs8cwAv/jiC7mGiBudooMUNWcdBIrnDQkxtG/fPjQ2NsozpGOITMEEf6fT6S22SNdh35EWTMzPz8eGDRt6fd/q1au9LRW6oichBPAZakceeughrFq1qtuQmzjmc8895/25rq5ObUEkii9+8gm/WaekyLZIcxYvXoxXX31V7qz0yhXA4eDhMUWX8wqxKW58RD9QPHcvISEB48aNw/nz51FUVIRbbrlFjiHi/CoaIhO5kUlJSeSd7QfSxFBGRka/VnVdL2oETqez29cALro6HkcIIIfD0a3ICgsLQ1hYWJ9tU4JFi3xi6DvfkW2N5gjPUElJCZqamhAREeF/I8QgPGMGEBvr/+PrjMvl8iZPL1H0RqMraWl8YlJRwSuUx8fLtkhTLBYLFi9ejLy8POzcuVOeGBLeYUUFO4XIBoZhwmSJiYmw2Wxd5g6lp6d3+Rmn04nMzMxOnxH5RT0JKFOieL7ChAkTMGbMGLS1taFILG33N2IQVlQoHDp0CPX19RgyZIg3R4voB9HRwJw5fFvR61B4DKWtKDt/HjhzhotORRP8SQwNDMOIIYCHrzquHMvPz++0VN7hcHQqxGiz2ZCdnd1J+OTm5iIjI4M6aV/PggU8fHPqFHDpkmxrNMdiscjPG1JcDInzunDhQiq2OFBMkje0c+dOOcUXhQhLTgaGDPH/8XXG7XZ7w2QkhvqHocRQdnY2nE4n8vPzkZ+fj8LCQqxfv977ekFBQaefAS6g1q1b531UVVUhLy/P36YHPh1XVig+K5WSN9TcDBQX823FxRCFyAaB4nlDc+fORXh4OKqqqnBCtMPwJ4qHyMrKyrzFFsk72z8MV40pOzvbu319zpGoTN0R4R0i+sDixcChQ1wM3X+/bGs0R9ykxazU4s86P0VFvPr0qFGAoiFaEkMaIG7S+/bx1Z0yctt0JDQ0FKmpqdixYwd27tyJqVOn+tcAITIVFUM7duwAwL2zVGyxfxjKM0TojLiJyV5+rhMdZ6XHjh3z78E7hsgULLZ4/vx5nDlzBlarlYotDoaJE4HRo3m9L+FJVAxpHtqmJqCkhG8rKthpQjJwSAwRPsQFVFjIBw7FCA0NRZpnSbvfB2KT5AslJydjiIK5GH7DYlE+VCZNDBUW8urTY8YAEyb499h+QniGli5dKtkS40FiiPCRmOiblcpacaUzHbtn+w3GfDc2RQcpmpFqiOId7EVi75EjR3rtHqApHesLKeidvXDhAk6dOgWr1UrFFgcAiSHCh8Xiu1l7Zhiq0TFvyG8cOwZUVfH8j3nz/HdcP0JiSEM6riiTseJKZ0aOHInJkyeDMebf5smK5wuJa3DOnDmIiYmRbI3xIDFEdEZxMSRmpUePHkVVVZV/Diq8UGlpQEiIf47pRxoaGrDf01yUxJAG2O1AaCivWF5eLtsaXfB7qKyjd1ZRMSRCZHQNDgwSQ0RnhBjauRNwu+XaogPDhg3DtGnTAPix8Jvi+UKFhYVwuVyIj4/H+PHjZZtjfMLDgdRUvq34pMRv4erjx7l3NjxcWe8s5QsNDhJDRGfmzAGiogCnE/A0+1MNv+cNKS6GKESmAzffzJ8//1yuHTohviu7d+9Ge3u7/gcUXqH587nXTTHq6+vJOztISAwRnQkO9q1mUXRW6te8oStX+KwUULZTPYkhHVA8XD1z5kwMHToUjY2N2Ldvn/4HVLzY4p49e+B2uzFhwgTEK9bTzl+QGCJuRNzUFB2IxU177969aG1t1fdgQnDNnAkMHarvsSTgdru94UYSQxoiVjwdPw5cvizbGs2xWq3e78vn/vB+Kd6pnkJkg4fEEHEjis9Kp06dimHDhqG5udnrWtYNxUNkhw8fRm1tLaKiopCcnCzbHHUYOhSYNYtvK3od3uwJBeouhqqrgbIyvq24d5bE0MAhMUTcyIIFQFAQ7+5cUSHbGs2xWCzevKEdet9oFBdDHZuzUvl/jRE3NkXzhoQY2rFjh75NW8U1Pm0aMHy4fseRRHt7O3lnNYDEEHEjQ4YAc+fybUVbc4gZlK6zUhM1Z12saC6GVEQStaKeoZSUFERERKCyshJHjx7V70Dbt/PnZcv0O4ZESktL0djYiNjYWMwUzbaJfkNiiOgaMStVVAwt8wyMn3/+Odx6lRAQzVlHj6bmrET/EWJo3z6gvl6uLToQGhrq7WOn66REcTHU8Rq0WumWPlDozBFdo3jekN1uR2RkJKqqqlAm8gm0RvHmrFT+X2fi43njVrdb2dYcuucNNTT4mrMqKoYoeVobSAwRXSNm+gcOALW1cm3RgdDQUG/hN90GYiEkFfWabPfMuOfOnYvY2FjJ1iiKSfKGdLsGd+0CXC7emDUhQZ9jSIQxRpWnNYLEENE1Y8bw0I7bDezeLdsaXRChMnFT1xSXy3cDU3RGum3bNgC+80jogOJ5Q4sWLUJQUBDOnDmDCj0WaygeIjt9+jQuXryIkJAQzJ8/X7Y5hobEENE9iofKOoohzVezHDzIPWpDhvCq3goiROQtt9wi2RKFEWJo926ef6YY0dHRmOdpj6GLd0hxMSS8QqmpqYiIiJBsjbEhMUR0j+JiaMGCBQgJCcH58+dx6tQpbXfu8Zpg6VJe1Vsxrl69iiOedi2Uq6Aj06YBw4Z1XpmoGLqFylpagD17+LbiYohCZIOHxBDRPeImt2ePkrPSiIgIpKWlAdAhVCbEkKJeE3HjmjVrFoYrWLslYLBYKG9ooBQWckE0ciQwZYq2+w4QxLhFE5LBQ2KI6B4xK21qUnZWqkvekNvtc88rKoYoX8iPKJ43JG7khw8fRlVVlXY77hgiU3A156VLl3D06FFYLBa6DjWAxBDRPRaL72b+2WdSTdELXcTQkSNAVRUQGQmkpGi33wBCiCHKF/IDHcWQXjWxJDJixAhMmzYNgK9mjiYovoBBjFlz5szBUAX7HvobEkNEz9x6K39WVAwtXrwYVqsV5eXlOH/+vDY7FSGyxYuBkBBt9hlA1NTU4MCBAwDIM+QX5s3jwrqmhgttBdE8VNbe7qvzpeh39DPPmHyrGKOJQUFiiOgZcaHt2AG0tUk1RQ9iYmK0X82ieL6Q6CU1depUjB49WrY56hMSAoiilpQ31DdKS3nV7thYX8NbxRBiiLyz2kBiiOiZmTN53tC1a7y9hIJoGipjTHkxJM4TeYX8iAiVKS6GiouL0djYOPgdimt56VLedFoxrly5grKyMsoX0hASQ0TPWK3K5w2JgVgTMXT8OHDlChAeDnhWqqkG5QtJoOM1qGeHd0lMmDAB8fHxaG9vxx6xHH4wKJ4vJK7B5ORkxMXFSbZGDUgMEb0jQmXC46EYHVezVFZWDm5n4hwtXAiEhQ3SssCjvr4eJZ5eTySG/MiiRfz7dPEicOyYbGs0p6OH47PBTroYU77YIoXItIfEENE7iucNjRgxAjNmzADgK2I2YIQYUnQQ3rlzJ1wuFyZOnIgEBXs9BSzh4VwQAcDWrXJt0Ynbb78dAPDpp58ObkdlZb7VnHa7BpYFHsIzRMnT2kFiiOgdkTfU2Ej1hnrCRPlCNCOVgEcsqCqGbrvtNgDAnj170NDQMPAdiWt44UIgNFQDywKLK1eu4PDhwwAob09LSAwRvWOCvCFNxJDDAZw/33n1j2JQsUWJeMSCqnlDkyZNwoQJE9De3j64ekNiQiKSzhVDjFGzZ8/GsGHDJFujDiSGiL6heL0hcXPft28fnE7nwHYiBuG0NO6iV4ympibs3bsXAHmGpCC+V1evAh7PgEpYLBavd2jAoTLGAPFZ4UlTDAqR6QOJIaJvKJ43NG7cOEyZMgVut9s72PQbxUNku3fvRltbG8aOHYvExETZ5piP0FBANOQcbF5NgCLyhrYONBR4+DBfzRkRoax3loot6gOJIaJvmCBvaPny5QCALVu2DGwHiidPd1xSb1Gw15MhEKEyxfOGiouLUVtb2/8diGv35puVzBe6evUqDh06BIBC1VpDYojoGybIGxqUGDpzhj+CgngbDgURoQuakUpEhH62bVOyT1l8fLzXQzug/D3hMfNcy6ohzsmsWbMwfPhwydaoBYkhou8oLoZuu+02WCwWHDlyBBcvXuzfh8UgnJoKDBmivXGSaWhowK5duwAA6enpkq0xMSkp/PtVU8NbTijIgJfYt7f7xiZFxRDlC+kHiSGi7yieNzRs2DDMnTsXwAAG4oIC/rxihbZGBQjbt29He3s7Jk2aRPlCMgkO9q2SUjRvSITK+p03VFwM1NUBNhvguY5Vg4ot6geJIaLvzJoFxMVR3tD1MOYTQ4p6TQo8fx95hQIAxfOGhNejtLS0fxXhxTV7221K9iOrrKzEwYMHAVC+kB6QGCL6jsnyhlhfa7kcOsRXsERGKruChcRQACHyhrZv56EhxRg1ahRmzpwJoJ+tORTPFxLnYubMmRg5cqRcYxSExBDRP8SsVHhCFOPmm29GSEgIzp49i/Ly8r59aPNm/rxsmZL9yC5fvuydkYoQBiGROXN4KKi+HvD0iVONfi+xb24GRKFGResLbfaMMzQh0QcSQ0T/EDkxO3YA167JtUUHoqKisNDj3elzqEzxEJnIn5o7dy5GjBgh2RoCQUE+D62iobJ+F1/cuZMLojFjgGnTdLRMDowx/P3vfwcA3HHHHZKtURPDiSGn04nc3Fys6Eei6rp165Cbm4vc3FysW7dOR+tMwE03AQkJQEsL8Pnnsq3RhX7lDbW2+uoLKZo8TSGyAER46BRNoha1rI4ePdq3lZ0dQ2QK1sAqLy/H6dOnERISQsnTOmEoMVRSUoKNGzfC6XSiurq6T58R4icrKwtZWVmw2+1YtWqVnmaqjcXiu+mL8JBiCDH06aefwt1bLZfdu7mHbORInmCuGIwxcs8HIkIM7djBBblixMXFYd68eQD6GCoTExdFQ2TCK7RkyRJERUVJtkZNDCWG7HY7srKy+rW0d82aNcjKyvL+nJ6ejtzcXD3MMw/CTeu5QFUjLS0NUVFRqKqqwoEDB3p+swiRLV/OE8wV4+TJk6ioqEBoaCiWLl0q2xxCMGsWMHw4F+KefnGq0ecl9nV1QGEh31Y0eVpMSPoTESH6h3qjdwccDgecTidsNtsNrxUomgDsF4Qr+uBBoL/FCQ1AaGiod+lqr6EyxfOFxHWyePFimpEGElar78avqIdWJFH3eg1u3w64XMDkycD48X6wzL+0t7d7c6coX0g/lBdDXWGz2XrsTN7S0oK6urpOD6IDw4fzSriAsgNxn/KGamt9s3JFxZD4+ylEFoAo7qFdtmwZQkJCcOrUKZw8ebL7NyoeItu7dy/q6uo6hQ4J7VFaDHVHXFxcjzlHa9asQWxsrPeRkJDgR+sMgknyhrZv347W7nIyPvuMz0inTFFyRupyubwz0uWKhh8MjbgG9+7l7TkUIzo6Gos9ff7+3pPgE2JI0e+o+NvT09MRpGAxyUAhWNaB8/PzsWHDhl7ft3r1atjtdk2P3Vvy9erVq/Hcc895f66rqyNBdD133AGsWcPFkNutXL5McnIyhg8fjsrKSuzdu7frfBnFW3Ds27cPNTU1iImJQWpqqmxziOtJSACmTwfKyvhqqgcflG2R5txxxx3Ytm0b/v73v+OZZ5658Q1XrvBwPeBLKlcMyhfyD9LEUEZGBjIyMnQ9RneJ1k6ns8ck7LCwMIQpWDxPUxYtAqKigMuX+WA0Z45sizTFarXitttuQ15eHrZs2dKzGFI0hCTyhW677TYEB0sbKoieuOMOLob+/nclxdCdd96JH/7wh/j000/R1taGkJCQzm8Qnuk5cwAFa2DV1tZiz549AEgM6Y1a0/nrSExMhM1m6zJ3iHIgBklYmK9xq6I5C2Lw2bRp040vnjsHHD3KPWKKdpCm+kIGoGPeUF/bxxiIefPmYdiwYaivr8fu3btvfMMnn/Dnu+/2r2F+YuvWrXC5XJg6dSomTJgg2xylMaQY6i7M5XA4biiquHr16k4rx/Lz8zsttScGgeIJnHfddRcAYM+ePTd+50SeQmoqMHSony3Tn6amJnzhaW9AYiiAueUWICQEOH0a6Gv7GANhtVq9k5Ib8obcbt/Y47lWVYOqTvsPQ4khIXbWr1+PkpIS5OTkID8/3/t6QUEB1q9f3+kz2dnZcDqdyM/PR35+PgoLC294DzFAhNv288+Bpia5tuhAQkICZs6cCbfbfWMpBuGeV1QobNu2Dc3NzYiPj8dNN90k2xyiO6KigCVL+HZXHkwFEELgBjG0fz/PGYqO5mF7BaF8IT/CiF6pra1lAFhtba1sUwILt5ux+HjGAMY2bZJtjS48//zzDAB74oknfL90uRgbPpz/3Vu3SrNNT5599lkGgD311FOyTSF6Y80a/l285x7ZluhCRUUFA8AsFgurrKz0vfBf/8X/7pUrpdmmJw6HgwFgwcHBdO8ZBH29fxvKM0QEGBaLaUJln3zyCZjIySgqAiorgZgY36xcMT7++GMAwN2K5mIoxZe+xJ+3bOHNShUjPj4es2bN6tSsFIAvX0jREJnwCi1cuBAxMTGSrVEfEkPE4FBcDC1duhSRkZG4ePGirzWHRyhgxQqer6EY5eXlOHHiBIKDg6m+kBGYPRsYN46HqkXTYMX4kkfw/e1vf+O/qK0Fdu3i23feKckqfRFiiPKF/AOJIWJwdGzNceGCbGs0Jzw83Nsj6RMxExUDspiRK4bwCi1dupRmpEbAYvGtphJCXTGEGPrkk0/gcrl4XaX2dmDqVGDSJMnWaU9bWxslT/sZEkPE4Bg+nK+oApQdiEWo7OOPPwauXvU1hVTUPS9m3xQiMxBCmAuhrhiLFy9GTEwMKisrUVRUBHz0EX9B0Wtwx44dqKurw4gRIzB//nzZ5pgCEkPE4LnnHv784Ydy7dAJMSvdsWMHGt99l9dzmTsXGDtWrmE60NTU5O0STmLIQCxfDgQHAydOAD318TIoISEhXg/Jxx995BND994r0Sr9+Mjz9919992wKlbdP1Chs0wMHiGGNm8GWlrk2qIDiYmJmD59OlwuFyrffJP/UtEQmVhSP27cOMyaNUu2OURfiYkBbr6Zbyvqof3yl78MAHDk5wOXLvEl9cuWSbZKHz70TCzvEWMroTskhojBM28e95I0NiqbwHnPPfcgCEBcURH/haJi6K9//SsA7g2zWCySrSH6hfhOCq+JYohw9aSyMv6LO+4AQkMlWqQP5eXlOHbsGIKDgylfyI+QGCIGj8UCeGZtqobK7r33XiwGMKS1FSwuDliwQLZJmsMY84qhlStXSraG6DfiGty6Faivl2uLDowePRopKSnw+koU9ZqIENnSpUsRGxsr2RrzQGKI0IaOeUMK9khatGgRHvI07706fz7Pz1CM0tJSVFRUICIiArfffrtsc4j+Mm0aMHky0NqqbKmLR265Bd50YkW9sxQikwOJIUIbli/nzVtPneJdtBUjOCgIGR6X/KbwcMnW6MMHH3wAgC/ljYiIkGwN0W8sFkB49N5/X64tOpEZHQ0AKLRa0aRg2Yf6+nps86QaiBwpwj+QGCK0ISoK8NTjgeemqhRHj2J0fT1aALx89Khsa3RBhMjuVXSFjim47z7+/NFHvA6PYowvLQUA/LWrfoEK8Mknn6C1tRWTJ0+mnoB+hsQQoR1iVvree1LN0AWPwNtqsaDo2DGcVGz58oULF1BUVASLxULueSOzeDEQFwdUVwM7d8q2RlsaG2HxNKN9H8D7Cnq//u///g8AcP/999MCBj9DYojQjpUruat+zx7g/HnZ1miLZ+A95pmtiUFLFUSewoIFCzBq1CjJ1hADJjjYl0itmod20yaguRnXRo/GAXBPptvtlm2VZrS2tnqTp7/yla/INcaEkBgitGPMGGDhQr6t0qzt8mVg924AwNCvfQ2AemLoPY83j0JkCiBCZe+/r9ZiBs81F/bII4iNjcWVK1ewZ88eyUZpx2effYa6ujqMGjUKC8U4SvgNEkOEttx/P39WSSx88AG/qaSkIP3xxwEAu3btwgVFerE5nU5v/sUDDzwg2Rpi0Nx5J6+/c/IkcPiwbGu0oa3NW7Yj6MEHvdXR31MoJC/+lpUrV1LVaQnQGSe0RYihzz4DamqkmqIZ+fn8+cEHMXbsWCxatAiAOgPxX//6V7S1tWHGjBmYNm2abHOIwTJkiK+Tu/juGp3PPgOcTmDkSGDRItzvGWfeffddMAW8X26325sDRSEyOZAYIrRl8mRg1iy+kkWFSrhVVcCWLXz7wQcB+LwnqoTK3nnnHQDAg56/j1CAjAz+rIoYEtfaypVAUBC+9KUvITw8HCdPnkSpZ4WZkSksLMSFCxcwZMgQqvElCRJDhPYI75DnJmtoPvgAcLmA5GRg6lQA8M5Kt27diurqapnWDZr6+np88sknAEgMKcW99wIhITxMZvRSEG63b4Wq59qLjo72hsryFRB8eXl5AHhtoTBPcVfCv5AYIrRH3FQ//hioq5Nry2ARA62YaQNISkpCcnIyXC6X4Zf3/u1vf0NLSwsmT56M5ORk2eYQWjF0KJCezreNPinZsQO4eBGIjQU6eE0yMzMBcCFh5FAZYwwbN24EADz88MOSrTEvJIYI7UlOBm66iXewN/LyXqcT2LyZb3cQQ4BvIN6wYYOfjdKWjiEyqmuiGKqEysQ1dv/9vMq9h3vuuQdhYWE4fvw4Dh06JMm4wbN7925UVFQgOjra24yW8D8khgjtsViARx7h20YWCx9+yFexzJgBTJ/e6SUxgysoKMDVq1dlWDdoGhoavHVNKESmIJ78Guzfz1eWGZH2dp+YE2OKhyFDhnjFgwgzGRHhFVq5ciXCFW31YwRIDBH6INy9mzYZd1XZn//Mn6/zCgHAlClTkJKSApfL5fWuGI0PPvgA165dQ1JSElJTU2WbQ2jNsGG+sNJf/iLXloGybRtw5Urnv6UDRg+Vud1ur5B76KGHJFtjbkgMEfowfTowezb3rBhx1dXVq1zIAcBXv9rlWx7xzFT/YtAbzZ89Yu/RRx+lEJmqPPoof/7Tn4xZgFFcWw8+yBPCr+Pee+9FeHg4jh49iv379/vXNg3YuXMnzp8/j5iYGNwpyiEQUiAxROiH8A4ZMVSWl8dXkdntQDe1d8RMbvv27YYrwFhVVeVdRfbVbsQeoQAPPMDzbI4e5eEyI9HWBrz7Lt/uJrE4JibGWzX9j3/8o78s0wwxkfrKV75Cq8gkQ2KI0A8xgG3ZAly6JNeW/vKnP/Hnxx7r9i3jx4/HkiVLOq0GMQr5+flob2/H3LlzMf26fChCIWJi+DJ7wPedNgqbNvGGs6NGAbfc0u3b/uEf/gEA93S6XC5/WTdoWltbvWKIJiTyITFE6MfkybxXmcsFvP22bGv6zqlTvON3x0TwbhChsrfeessflmlGxxAZoThC0P/5z/xaNAp/+AN//upXeSJ4N9x1112Ii4vDxYsXsXXrVj8ZN3g+/vhjVFVVYcyYMUgXZRAIaZAYIvTl61/nz2JgMwJCuN1+OzB2bI9vfeSRRxASEoKSkhIcPHjQD8YNnoqKCmzfvh2AT8wRCnP33YDNBly4AHj+7wFPTY2vLMc//mOPbw0NDfWGrI0UKhMTqEcffRTBwcGSrSFIDBH68vDDvGnkgQOAEcrmMwYIL08PITLB8OHDvTkLfzCI4PvDH/4AxhhuvfVWJCQkyDaH0JuwMN+KyDfflGtLX9mwAWht5TXL5s7t9e2Pea7Vd955B9euXdPZuMFTXV2Nv/71rwCAr4sJIyEVEkOEvsTFAffdx7eNIBZ27gSOHQMiI32VtHvhHz0z1z/+8Y9ob2/X07pB43a78cYbbwAAnnzyScnWEH7j8cf588aNxqgKL8aKXrxCgsWLF2PixIloaGgwRKmLjRs3orW1FXPmzKHK7wECiSFCf8TM509/4kXUApnf/pY/P/QQTz7tA3fffTdGjBiBy5cvY5NYjh+gbN++HQ6HAzExMd6Gs4QJWLyYV4W/di3wV3ceOwbs3s3zhPrgnQUAq9WKb3zjGwCA3/zmN3papwnCi0xeocCBxBChP3fdBYwcyYunBXIn+7o6343im9/s88dCQkK8K1qE1yVQ+d3vfgeAr16JjIyUbA3hNywWQHgCheAPVH7/e/585518JVkfeeKJJ2C1WrF9+3YcP35cH9s04ODBg9i9ezeCg4NpAUMAQWKI0J+QEJ+bfv16qab0yIYNfOZ80018Jt0PHvf8fR988AEuBWgZgdraWm+HbzGLJkzE178OBAcDe/YAgdrLq7XVJ9aeeqpfH42Pj/d2sv9tAAu+9Z4xcOXKlRg9erRkawgBiSHCP2Rl8edPPuFL1wMRMYA++SSfSfeD5ORkLFq0CG1tbQE7EL/99ttoamrCzJkzMX/+fNnmEP5m1ChfzaEA/Y7i3Xd59fexY4F77un3x7/p8ej+/ve/R2trq9bWDZrGxkbvKrJVq1ZJtoboCIkhwj8kJQF33MFXa73+umxrbmT/fj5jDg725Tj1k29961sAgNzc3IAr/sYYw69+9SsAwFNPPUXtN8yKCJX94Q/cCxpo/PrX/Pmpp/i12E++/OUvY/To0bhy5Yp3tVYgsWHDBtTV1SExMRHLly+XbQ7RARJDhP94+mn+/Nvfcnd4IPHKK/w5I6NfeQodyczMxLBhw3D27Fn87W9/09C4wfPZZ5/h8OHDiIqK8ob0CBNy113ApEm8jk+gVaQuK+ONWa3WfuXsdSQkJARPPPEEAOAVcU0HECJElpWVBauVbr+BBP03CP9xzz3c/X3liq/nUCBQWem7MTz77IB3Ex4e7s3FefXVV7WwTDPEjeHrX/86YmNjJVtDSCMoCPjOd/j2K68EVvNW4RW6914gPn7Au3nmmWcQFBSEbdu2BVTz1qKiIuzdu7eTYCMCBxJDhP8ICfElRb74YuAMxK+/DrS0AKmpvH3IIBB5AJs2bQqYFS1nz57F+++/DwD4jrgREublG9/gdbQOHuSemEDA6QQ8Kx3hCTcPlPj4eGR4iky+/PLLgzRMO/7nf/4HAK/6PnLkSMnWENdDYojwL888wyviFhYCO3bItoZ3xhZenGef7Xfi9PUkJSXhnnvuAWMML774ogYGDp7XXnsNbrcbt99+O2bMmCHbHEI2NpsvLy5QQkm5uUBDAzBrFs8tHCTf+973APBFA1evXh30/gbL2bNnkZeXBwB47rnnJFtDdAWJIcK/jBzpqyr7i1/ItQUA8vOBc+e4XZ7+RoMlOzsbAF/RcvnyZU32OVBqa2vx2muvAQCeHUQIkFCM736XP7/3HnDypFRT0NoKvPQS337++UFPSABg4cKFmD9/PlpaWvBrEX6TyMsvvwyXy4Xbb78dc/vQXoTwP4YTQ06nE7m5uVixYkWf3l9QUIDMzEzk5uaioKAAOTk53lorhCSee44PeB98wKvNysLtBv77v/n2t7/NPVYasHTpUixcuBAtLS3Skzhfe+011NbWYsaMGd4eagSBGTN4A1e3G1i7Vq4tf/4zbyI7diygURFCi8WCf/qnfwLAhUhjY6Mm+x0IdXV1eN2zgvb555+XZgfRM4YSQyUlJdi4cSOcTieqq6v79Bmn04mCggKsWrUKq1atQlJSkjeeTEjippt8/cpkeof++ldefC4mZlCJ09djsVi83qFf/epXaGho0Gzf/eHatWveUN3q1atp9QrRmX/9V/78hz8AFRVybHC7fWPAs8/yps4a8dBDDyEpKQmVlZVSvUOvvvoq6urqMH36dNx1113S7CB6gRmQvLw8Zrfb+/zempqaQR2vtraWAWC1tbWD2g/RgR07GAMYCw5mrLzc/8d3uxlLTeU2/OAHmu++vb2dTZ06lQFga9eu1Xz/feGVV15hANjEiRNZW1ubFBuIAOe22/g18N3vyjn+xo38+EOGMDbIcborfvvb3zIAbNSoUezatWua7783amtrWVxcHAPA3nzzTb8fn+j7/ZumioQclizhiZLt7cB//If/j//3vwNFRXxVjcedriVBQUH44Q9/CABYu3YtamtrNT9GTzQ3N2PdunUAgJycHAQPoIAdYQI831G8/jrg7zYyLhfwox/x7eee44ndGvO1r30NEyZMwOXLl72hKn/y8ssvo7q6GjfddBP1IQtwTCGGNm7ciPz8fOTm5iInJ6fX97e0tKCurq7Tg9CB//xP/vzWW8DRo/47rtvtuwmsWgWMGKHLYR577DFMnz4d1dXV3mW1/uKXv/wlKioqEB8fT0UWie65/XZeTqK5GfjpT/177D/9iV/3cXFcDOlASEgIVq9eDYBPSvyZO+R0Or3X/b//+78jKCjIb8cmBoCfPFWa0p8wWXl5OSvvEIZZv349y8jI6PEzP/7xjxmAGx4UJtOBlSu5m/yhh/x3zLfe8rnmr1zR9VDvvPMOA8CioqLY5cuXdT2WoKqqitlsNgaAvfHGG345JmFgtm7l10NQEGNlZf45ZmsrY5Mm8ePqHEZubm5mEydOZADYv//7v+t6rI788Ic/ZADYzJkzWXt7u9+OS3Smr2EyC2NyKt/l5+djw4YNvb5v9erVsNvtN3x2zZo1KC4u7vdxnU4nhg4dipqaGti6ccu2tLSgpaXF+3NdXR0SEhJQW1uLmJiYfh+T6IEDB4A5c/j2F1/0u1t8v2lq4gncFRXAmjXACy/oejjGGNLS0lBUVITvfOc7flld9s///M/4n//5H8yePRv79u2jGSnROytX8tWd990HeAp06sr/+3/cGzRqFFBeDkRF6Xq4jRs34uGHH0ZkZCSOHz+OcePG6Xq8U6dOYfr06WhpacE777yDBx54QNfjEd1TV1eH2NjY3u/f/lBmWtPfBOrrAcCKi4v7fDxKoNaZJ5/kM8S5cxnTO9H3v/+bH2v8eMb8lFC5ZcsWBoBZrdZ+fe8GwsmTJ1loaCgDwD7++GNdj0UoRFkZ9wwB3FOkJ+fPc68swNjrr+t7LA9ut5stXryYAWBf//rXdT/eV77yFQaA3X777cztdut+PKJ7+nr/VloM1dTUMACdwmTid/1ZYUZiSGeuXGFs6FA+OL7yin7HKS9nLDKSH+ePf9TvOF3w8MMPMwAsLS1NN5e52+1my5cvZwDYihUraBAm+sczz/BrY/p0xpqa9DvOo4/y4yxYwJjLpd9xrmPPnj3elIedO3fqdpxNmzYxACwoKIgdOnRIt+MQfUPp1WTd1RhyOBzeFTQAYLPZkJ2djcTERO/vcnNzkZGR0W2IjJDAiBG+4of/+q/AxYvaH4Mx3gn72jXg1luBr35V+2P0wIsvvoghQ4Zg7969uq1q+f3vf48tW7YgPDwcr732GiwaVPIlTMRPf8rDVmVl+iVTb90KvP02L7r66qu8Q72fSEtL8y4mePzxx9HU1KT5MRobG739/7773e9i5syZmh+D0Ak/iTNNKC8vZ2vXrmV2u50BYNnZ2Z3CYOvXr2eJiYmdPlNTU8PWrl3rfWRnZ/f7uOQZ8gPt7b66P3fcof2M8de/5vuOiGDs5Elt991HXnrpJQaAxcTEdPJWasHFixfZ0KFDGQC2bt06TfdNmIh33/UlU2sd0q2uZiwhge//W9/Sdt99NqGajRkzhgFgzz33nOb7f/rppxkANm7cuEHXtyO0Qekwmb8hMeQnDh/mYgVg7H/+R7v9Hjvmy1H4f/9Pu/32k/b2drZkyRIGgKWmprKWlhZN9tvW1uYNj9ntdiqwSAyOzEx+rcycyVh9vTb7dLsZe/BBvt/Jkxmrq9NmvwPgww8/ZACYxWJh27Zt02y/H3zwgTcMV1BQoNl+icFBYkhDSAz5EeHBCQlhrLBw8PurrWVs2jS+z5tv5h4oiZw9e9ZbkVarmWlOTg4DwCIjI9nBgwc12SdhYi5fZmzUKH7NZGRwITNY1q/X9roeJE888QQDwEaOHMnOnDkz6P2dPn2ajRgxggFgzz//vAYWElpBYkhDSAz5Ebebsfvv5wPnqFGDa9XhcjF23318X+PGMXbxonZ2DoL333/fO4N8fZCrafLy8rz7+stf/qKRhYTp2bGDCxeAsf/6r8Ht6+OPedsdgLGf/1wb+wZJQ0MDmzNnDgPA5syZwxoaGga8r6qqKjZ9+nQGgM2dO5c1NzdraCkxWEgMaQiJIT/jdDI2Zw4fPJOSGLt0qf/7aG/3LdkPC2Ns717NzRwMq1ev9rrqN2zYMKB9fPDBBywkJIQBYN///vc1tpAwPcKbAzD26qsD28euXb4VnI8+6tfVY71x+vRpNnLkSAaA3X333QPqXXbt2jV28803MwAsPj6eVVRU6GApMRhIDGkIiSEJXLjgq1A7bRrP++krra2MPfww/6zVytif/6yfnQPE7XazrKwsBoCFhISwP/Zzqf+7777rFUKZmZmUJ0Tow3PP+QTRL37Rv89u2sSYzcY/e+edjGmUI6clX3zxBQsPD2cA2NKlS/uV9Hzp0iW2aNEiBoDFxsZSiDpAITGkISSGJHH8OA9vAYzFxjL24Ye9f+boUcYWLfLlJ3RRdDNQaG9vZ48++qg3zPXtb3+7Vxd7U1MTe+6557yfefjhh0kIEfrhdjO2erVPEH3ta3xVWE+4XIytW8cnIgBjS5Zol4itA59//jmLjY1lANj06dPZ7t27e/3M3r172YQJExgAZrPZ2Pbt2/1gKTEQSAxpCIkhiVy8yNjixb7B+M47Gdu9+8akzvJyxn74Qx4SAxiLjmbso4/k2NwP2tvb2Y9+9COvuJk0aRL79a9/fYMoamxsZL/73e/YjBkzvO/91re+RUKI8A9r1vjEzZgxjP3yl4xVVnZ+T1sbX5o/a5bvev3GNxgzQA5NaWkpGz16tDd0/fTTT7PS0tJOhUvdbjc7cuQIe+yxx7zX4OTJk9nRo0clWk70RsD3JjMSfe5tQuhDSwuQkwP86ldAezv/3bBhQGoq70B/6RJw8KDv/XfcAbz+OjB+vBx7B8Df/vY3PPHEE7hy5QoAICwsDLNnz8a4ceNw8eJFlJWVob6+HgAwcuRI/Pa3v8U999wj02TCbOzaBTzxBHDsGP85JASYO5cXTW1oAIqKeFFTAIiN5b3/nn6aF1g0AJWVlXj++efx5ptven83ZcoUjB8/HmFhYSgtLcX58+e9rz322GN46aWXMGzYMBnmEn2kr/dvEkN9gMRQgFBeDvzkJ8DGjVwgdcRiAdLTeZXpzEzDDMAduXbtGl5//XX84he/wLlz5254fdKkScjKysJTTz1FAzAhh6YmIDcX+P3vgf37b3x96FDg29/mTViHDvW3dZqwdetWvPTSS/j444/R2tra6bXQ0FCkp6fjpz/96Q0NxInAhMSQhpAYCjBaW/lAvH8/EBHBB127HRg7VrZlmuB2u+FwOFBaWorLly9j7NixGD9+PObOnQurH9sXEESPlJUBJ04AlZVAUBCQlgbcdJNfW2zoSV1dHbZv347a2lpcu3YNkyZNwpIlSxARESHbNKIfkBjSEBJDBEEQBGE8+nr/VkPCEwRBEARBDBASQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmJpg2QYYAcYYAN79liAIgiAIYyDu2+I+3h0khvpAfX09ACAhIUGyJQRBEARB9Jf6+nrExsZ2+7qF9SaXCLjdbly4cAFDhgyBxWLRbL91dXVISEhARUUFYmJiNNuvKtD56Rk6Pz1D56d76Nz0DJ2fnjHS+WGMob6+HmPHjoXV2n1mEHmG+oDVakV8fLxu+4+JiQn4L5RM6Pz0DJ2fnqHz0z10bnqGzk/PGOX89OQRElACNUEQBEEQpobEEEEQBEEQpobEkETCwsLw4x//GGFhYbJNCUjo/PQMnZ+eofPTPXRueobOT8+oeH4ogZogCIIgCFNDniGCIAiCIEwNiSGCIAiCIEwNiSGCIAiCIEwNiSGCIAiCIEwNFV2UxLp162Cz2QAATqcT2dnZcg0KMNatWwcAKC8vBwCsX79epjkBzYoVK7B582bZZgQcOTk5SEpKAgDExcUhIyNDskWBQW5uLpxOJ2w2G8rLy7F69WrvWGQ2nE4nNm7ciLy8vC6vIbOP0305P4Aa4zSJIQmIL1BWVhYAoKCgAKtWrTL0F0lLcnJysHbtWu/Pq1atoht+N+Tn56OgoEC2GQGF0+nE8uXLsWXLFthsNpSUlCAlJaXXRo1mYN26dcjKyup0g3/qqaeQl5cn1zAJlJSUoKioCE6nE9XV1Te8bvZxurfzo9w4zQi/Y7PZWE1NTaff0b+CU1NTw9LT0zudn+LiYgaAlZeXyzMsAKmpqWHr16+n7851ZGVlsbVr13b63ebNmyVZE1ikp6f36XdmIi8vj9nt9ht+T+M0p6vzo+I4TTlDfsbhcHhd1NdDM3xOUVERHA6H9+fExEQAfBZL+Ni4cSMeeugh2WYEHLm5ucjIyIDD4fBeU+np6ZKtCgxsNhtWrFjhvZYcDof3+iJ80DjdO6qN0xQm8zMdvzwdsdlshv0SaYnNZkNNTU2n34nBhwZtHwUFBXSD7wJxfZWUlCAxMRGJiYlYtWoVMjMz6XwBeP3115GSkoKhQ4ciOzsbSUlJpgn79Acap3tGxXGaPEMBQlxcXJdxWQJYs2YN1q9fb9okz65wOp2GHXT0RNzEbDYb7HY7EhMTsXbtWmRmZkq2LDCw2WzIyclBRkYG1q1bh7y8PLq59wMap7vH6OM0iaEAgS6wrsnJycHDDz/sTWIkfGEgontSU1O922I2T+ENfj0lJiYiLy8P5eXlqK6uRkpKimyzDAON012jwjhNYsjPdDebp5n+jeTn5yMpKcl0y1l7oqSkpNONnuhMd9eQzWbrNvRhFkQejAgXJiYmori4GDabDfn5+ZKtCyxonO47qozTlDPkZxITE70D8/UXFeU0+BCzeDHTEMs7zT4QVVdXo6SkxHt+RH2PdevWITEx0fQeI5En5HA4YLfbvb93Op2mF5EOh6PLEMaqVav8b0yAQ+N031BpnCbPkARWr17dyWWfn59vaPei1pSUlKCkpAR2ux0OhwMOhwO5ubmIi4uTbZp00tPTkZ2d7X2IG1l2drbphZBg7dq12LBhg/fn/Px8pKendxJHZiQ9PR0lJSU35AgVFxeb+rvTXeiLxmlOd+dHtXHawhhVIpOBmMkDQGFhYafiVWbG6XRi0qRJXSZ10le1M/n5+diwYQPy8/ORnZ2NFStW0KzVg6iyDABVVVV0fXlwOp1Ys2YNhg0b5s2l6liE0Uw4HA7vNVRSUoLs7GzMnz+/kzA08zjd0/lRcZwmMUQQBEEQhKmhMBlBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEARBEKaGxBBBEKaloKAASUlJss0gCEIyJIYIgiAIgjA1wbINIAiCkEFmZiby8/MBABaLBQBQU1NjyqalBGF2yDNEEIQpycvLQ15eHhITE8EYA2OMhBBBmBQSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBEARBmBoSQwRBmJbExEQ4HA44nU4UFBTA4XDINokgCAmQGCIIwrTY7XbY7XZMmjQJa9eulW0OQRCSsDDGmGwjCIIgCIIgZEGeIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTA2JIYIgCIIgTM3/B29134uYvP2MAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAG4CAYAAABYTdNvAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/TGe4hAAAACXBIWXMAAA9hAAAPYQGoP6dpAACVwElEQVR4nO3deVzU1foH8M/MsO+yKCCg4oaKbAq45G6mqdlitmhp2W3P23KvZcuvMqtbdu/Nbre9tO2WZqtmmea+ISq4K4qCKCgIssMAM+f3x5czgAIO8J05M2ee9+s1L8Zx5jsPw8x3nnPOc87RMMYYCCGEEEJIq7SiAyCEEEIIsQeUNBFCCCGEmIGSJkIIIYQQM1DSRAghhBBiBkqaCCGEEELMQEkTIYQQQogZKGkihBBCCDGDk+gAZGI0GpGbmwtvb29oNBrR4RBCCCHEDIwxlJWVITQ0FFpty/1JlDSpKDc3F+Hh4aLDIIQQQkg75OTkICwsrMX/p6RJRd7e3gCUF93Hx0dwNIQQQggxR2lpKcLDw03f4y2hpElFfEjOx8eHkiZCCCHEzlyttIYKwQkhhBBCzEBJEyGEEEKIGShpIoQQQggxAyVNhBBCCCFmoEJwQgghRIDa2loYDAbRYUjN2dkZOp1OteNR0kQIIYRYUWlpKS5evAi9Xi86FOlpNBr4+voiODhYlUWnKWkihBBCrKS0tBTnzp2Dl5cXAgMD4ezsTDtIWAhjDBUVFSgoKIC7uzv8/Pw6fExKmgghhBAruXjxIry8vBAWFkbJkhW4u7tDr9cjPz8fvr6+HX7NqRCcEEIIsYLa2lro9XpVvryJ+Xx8fGAwGFSpH6OkiRBCCLEC/qXt7OwsOBLH4uSkDKrV1dV1+FiUNBFCCCFWRL1M1qXm601JEyGEEEKIGShpcnBVVVUoLy9XpduSEItgDLhwAZB4enZVVRUuXrwIxpjoUAi5EmOA0QgYDMp1B0ZJkwPKysrCs88+i+TkZHh5ecHb2xvOzs7o1q0bnnzySezevVt0iMTRFRUBr78ODBoEeHkBwcGAhwfQowdw993Anj2iI+yQuro6/PTTT7j++usRFBQEDw8PBAUFoXPnzhg/fjw+++wz1NTUiA6TODLGgEuXgNOngf37gX37gLQ05WdGhtKQ6UBj++GHH8a1115r1n1vv/12zJgxo93PpSYNo6aNakpLS+Hr64uSkhL4+PiIDucKxcXFeO2117BkyZKrnpBnzJiBt99+GyEhIVaKjhAANTXAiy8CS5YAVVWt33fUKODjj4Heva0Tm0rWrVuHv/zlL8jOzm71fuHh4Vi4cCFmz55NNTCSqK6uxunTp9GjRw+4ubmJDqdl5eXAmTNAZWXr93NyAsLCgIAAoA3v0dOnT6Nv375Yu3YtxowZAwDIzc3FRx99hBtvvBFxcXFN7p+WlobBgwdj3759iI2NbetvY9brbu73N/U0OYgjR44gPj4eixcvRk1NDcaMGYNly5bhzJkzqKioQH5+Pn766Sfceeed0Ol0WLFiBaKiovD999+LDp04isxMYPhw4B//UBKm+Hhg6VLg2DFlaC43F1i/Hpg1SzlZb94MJCQAX30lOnKzVFdX4/HHH8eECROQnZ2NwMBAPP3009izZw8KCwtRWVmJPXv24B//+AeCg4ORk5ODe+65B3PmzEHl1b68CFEDY8C5c8pnrrIS0OmALl2Avn2Vz2N8PDBggJIoubkpPU1ZWUrPUxt6nZYsWYIePXqYEiZASZpefvllpKenX3H/+Ph4DB48GP/85z9V+CU7iBHVlJSUMACspKREdChNrF27lvn4+DAArEePHmz16tXMaDS2eP+0tDSWlJTEADCNRsM++OADK0ZLHNLu3Yz5+jIGMObvz9gPPzDWynuUZWczNmqUcn+AsQULWr+/YJWVlezaa69lABgA9sgjj7CKiooW719VVcVeffVVptVqGQAWExPD8vPzrRgxsYSqqip25MgRVlVVJTqUKxkMjJ06xVhqqnI5dYqxmprW75+Xx9jevcr9Dx1irLr6qk9TU1PDAgMD2fPPP9/k9tTUVAaALV26tNnHvfXWW8zT05OVlZW15bdijJn3upv7/U1Jk4psMWlat24dc3JyYgDYiBEjWEFBgVmPq6urYw888IDpJP/GG29YOFLisPbtY8zPT0l+hgxh7MwZ8x5XV8fYiy82JE4vvmjJKNutoqKCjR8/ngFgnp6ebPXq1WY/duPGjaxLly4MAIuPj2eXLl2yXKDE4mw2aTIaGcvIaEiYzPyeYIwxVlHBWHo6q9y6lfXt3p317dOHVVZWmv67sLCQBQcHs6FDh7K6ujq2YcMGBoBt2rTJdJ+NGzeavmsaXxonUPv372cA2A8//NDmX4+SJhtla0lTeno68/b2ZgDY9OnTWbUZrYDGjEYje/75501v4OXLl1soUuKwjh5lLCBASXqGDWOsHa1I9u9/NyROb76peogdYTAY2OTJkxkA5uXlxbZu3drmYxw/fpx17tyZAWDDhg1j5eXlFoiUWENLX95Go5GVl5eLuZSVsfIjR1j5li2sfNs2Vn7uXKsjEc3S6xk7eJDtWrqU6XQ69sRf/2r6r9tvv525u7uz48ePM8YYW7RoEdNoNE2+J8+fP88WLlzIALD777+fffnll+zLL79kmZmZpvvU1tYyd3d39tRTT6n2ujdGSZMAtpQ0nTt3joWGhjIAbNSoUW1OmBp78sknGQDm5ubGUlNTVYySOLTycsb691eSncRExoqL23+sN95QjqPRMLZ+vXoxdtCLL75o+uxs27at3cdJT09nfn5+DAC7++67VYyQWFNLX97l5eXN9rSIurQrMa+uZiw9nS2YM4dptVq2ZfNm9t133zEA7O233zbdbdasWSwgIOCKh19teI4xxvr06cMmTZrU5tDUTJqoEFxCRqMRs2fPRm5uLvr3748ff/wRrq6u7T7em2++iUmTJqG6uho33ngjCgsLVYyWOCTGgIceAo4cAUJCgFWrAF/f9h9v/nxg7lzluHfeqRSzCvbrr7/i5ZdfBgB89NFHGD58eLuPFRsbi59++glarRZffPEFPv/8c7XCJEQdrq5Ar1546cEHMSAyErPvugsPP/wwRo0ahXnz5pnuVlhYiE6dOrXrKTp16oSLFy+qFXG7OAl9dmIR77zzDtavXw93d3f88MMP7X6DcjqdDt988w2Sk5Nx/PhxzJs3D19//bVK0RKH9NlnwJdfKrNzvv1WmaHTUf/5j7J+0/79wB13ABs3KscXIC8vD3fddRcA4JFHHjFd74hRo0Zh4cKFeP755/Hwww8jMTER/fv37/BxiXgeHh4oLy+37pMaDMDRo8oyH506Ad27m5YN8PDwaN8xPT3h0qsXPnvhBSTOng03NzcsXbr0iiUzWDtXOmKMCV9+g3qaJHPw4EE888wzAIB//etf6Nu3ryrH9fX1xRdffAGtVov//e9/+OGHH1Q5LnFAubnAk08q1xctAkaOVOe47u7AypWAtzewdSvw/vvqHLcd5s2bh0uXLiEhIQH/+te/VDvuM888g/Hjx6OyshKzZ89WZdd2Ip5Go4Gnp6d1L5cuwVOng6evLzyjouDp5WX6vw4lJv7+WLt/PwBlmY0Tx483+e+AgABcunSpXYe+dOkSAgMD2x+bCqRImsrLy/Hiiy9i4sSJ8Pf3h0ajwbJly8x67LJly6DRaJq9nD9/3rKBq8xoNOIvf/kL9Ho9pkyZggceeEDV4yclJZkSsgcffBAFBQWqHp84iL/+FSgtBZKTgb//Xd1j9+qlrPMEAM8+K2SY7ueff8bKlSuh0+nw2WefwcXFRbVj63Q6fPHFF/Dx8cGePXvw4YcfqnZs4kBKSwF+/u7eXVn3TCUHDh7EwnffxT3TpiG+b1/cN3cuSkpKTP8fFRWFS5cuNbkNuPqmunV1dcjJyUG/fv1Ui7U9pEiaLl68iIULF+Lo0aPtWi0UABYuXIgvv/yyycXPz0/dQC3siy++QEpKCry8vPDRRx9ZpBvz//7v/xAdHY2CggK88MILqh+fSG71aqU3SKcDPvzQMsNnDz4IDBkClJUBjWoprKG0tBSPPPIIAGD+/PntPh+1JiQkBK+99hoAYMGCBXbXuCOCMQbk5CjXg4IAFXevqK2txZw5cxAaGool//kPlr34Ii4UFOCJRp/DoUOHgjGGvXv3Nnmsp6cnAGXniuYcOXIE1dXVGDZsmGrxtkuby9BtUHV1NcvLy2OMmVeB39jSpUsZAFVmhYmcPVdcXGyalrx48WKLPteWLVsYAKbVatmBAwcs+lxEIlVVjHXrpsxy+/vfLftcBw4w5uSkPNdvv1n2uRp55plnGADWq1evJmvVqK2uro4NHjyYAWB33nmnxZ6HqMsm1mm6cEFZiyktjbHaWlUP/X//939Mo9GwDRs2KDdkZLBFDz3EALBff/2VMcaYXq9nAQEBbMGCBU0eW1NTw/z8/Fjfvn3ZJ598wr755ht26tQp0/+/9dZbzMPDg5WWlrY5LlpyoBUdSZpKS0tZXV1du59bZNLElwXo27cv0+v1Fn++6dOnMwBs/PjxbV/Tgzimf/5TSWK6dlWWG7C0p55Sni8mRlm92MJycnKYm5sbA8B++eUXiz/fnj17mEajUa3RRyxPeNJUW6skS6mpSvKkor179zInJyf22GOPNdxYVcXqUlJYYv/+LDQkxLQ467x581ivXr2uOMbPP//M+vfvb1qQufH3eHJyMps1a1a7YqOkqRXtTZq8vLwYAObi4sKmTp3KMjIyrvrY6upqVlJSYrrk5OQISZqys7OZs7MzA8B+s1KrOjMzk7m4uDAAbNWqVVZ5TmLHiouV7VEAxj75xDrPWVjYsDXLl19a/Onuvfde08r71mpIzJo1iwFgEyZMsMrzkY4RnjSdOdOw5Ym1Grv8OQ8eND1nZmYmc3Z2ZuvNXFMtLS2NaTQalpaW1q4QaJ0mFXl4eGDOnDn473//ix9//BHz58/Hn3/+iWHDhiGHj/u24PXXX4evr6/pEh4ebqWom3r11VdRW1uLsWPHYuLEiVZ5zsjISDz++OMAgOeeew5Go9Eqz0vs1OLFQFEREBUFzJ5tnef09weeflq5/sILyqa/FnLo0CHT5JM333zTatOiX375ZTg5OeGPP/7Apk2brPKcxE7V1AD5+cr1sDDT8gIWFxKi1C5WVyvnACjfH3PnzsU/+KSNq/jHP/6B6dOnIy4uzoKBmqldaZsNa2tPU3O2bt3KNBoNe+CBB1q9ny30NJ06dcrUldmeLRo6orCw0NRD99NPP1n1uYkdOX+eMQ8PpcenHftGdUhFBWMhIcpzv/OOxZ7mlltuMW1XZG0PP/wwA8CGDBlCQ+U2TmhPE+/xOXrU+ptb5+Ze0dtkTdTTZGHXXHMNkpOTsX79+lbv5+rqCh8fnyYXa1u0aBHq6upw7bXX4pprrrHqc/v7++Oxxx4DALzyyivtXrCMSG7JEqCyEkhKAm680brP7eGh9DIBSm9XTY3qT3H06FHTumV8BXBrev755+Hu7o5du3Zh7dq1Vn9+Ygca9zKFhFivl4nr3PmK3iZ7RUlTC8LDw1Fk43/c06dPm7ZTEHGyBoAnn3wSnp6e2Lt3L3777TchMRAbVlIC/Pe/yvVnn7X+yRoA7rkHCA5Wpll/843qh3/jjTfAGMONN94oZIXukJAQPPjggwCAxYsXW/35iR24cEFZasDTU9UlBsym0ymfQQDIy1NisVOUNLXg1KlTCAoKEh1Gq5YsWQKDwYDx48dj6NChQmIIDAzEQw89BEDpbSKkiQ8/VBbS69cPmDpVTAxubsATTyjX33gDULH+LisrC1999RUAZc0kUZ544gk4OTlhw4YN2LNnj7A4iA2qq2tYyDI0VEzDBWja29TCWkz2wKGSpry8PBw7dgy1tbWm25pb1XrNmjXYu3ev1Yqq26O4uBiffvopAOBvf/ub0Fj+9re/wdXVFbt27cKuXbuExkJsSHU18O9/K9fnzwe0Ak83Dz6obAh89KiyObBK3nrrLRgMBowbNw5JSUmqHbetwsPDceeddwJQCtEJMSkoUBoK7u5iepk4nU5ZTBNQer7slDRJ07vvvotFixbhs88+AwCsWrUKixYtwqJFi0zLtS9YsAD9+vXDuUZbKwwbNgwzZszAm2++iQ8//BAPPPAApk2bhvDwcDz77LNCfhdzfPzxxygvL0d0dDQmTJggNJYuXbqYTthvv/220FiIDfnqK+D8eWWmTv37QxgfH+Dhh5Xrb7yhyiEvXbqEpUuXAhDby8TxxtP333+PkydPCo6G2ASjsaGWqUsXcb1MXOfOSgzl5UBFhdhY2kvdGnVxunXrxgA0ezl9+jRjjLHZs2c3+TdjjD333HMsLi6O+fr6MmdnZxYREcEeeughdv78+TbHYK3FLWtqalhYWBgDwD777DOLPpe50tPTGQCm0+lYTk6O6HCIaEajsqgkwJiFV6g3W14eY87OSkx793b4cP/6178YABYdHW0zs9auv/56BqDpAoPEZlh99tzFi8qstfR0qyzwapbMTCWmzEyrPSXNnmtGVlYWmLJY5xWX7t27A1A25238b0CZfZaWlobi4mLU1NQgOzsb7733Hrp06SLmFzHDypUrcfbs2SY9PKLFxsZi9OjRMBgM+C8v/CWOa8cO4MABpZ7o3ntFR6MIDgamT1eud/A9ajQaTe/zxx57zGrrMl3NX//6VwDA559/jgp7bckTdTDWMAwWFCR2eLwx/t166ZJFZrNamo28iqQtPvjgAwDAQw89BFdXV8HRNOCLXX700UeorKwUGwwRiycld96pLDJpK+o308X//tehqc+///47MjMz4evri5kzZ6oUXMeNHz8ePXv2RGlpKf73v/+JDoeIVFGhLPWh0TTUEtkCT0/Ay0tJ6pqpKbZ1lDTZmaNHj2LLli3QarWYO3eu6HCamDJlCnr06IGioiKsXLlSdDhElAsXAP7350mKrRg2DIiNVYrU6+uR2uPdd98FANx7772m3dltgVarNc1mfe+992jtNEd28aLy098fcHYWG8vlOndWfl68aHfLD1DSZGc++ugjAEqCEhYWJjiapnQ6nSmR++STTwRHQ4T5+GOgthYYOhRISBAdTVMaTUMi99577Vp+4NSpU/jtt9+g0WjwMC8utyFz5syBm5sb0tPTkZKSIjocIoLB0NCTaku9TJyfH+DkpJwn6idqlZeXo3Pnzvj666+v+vDCwkJ4enpizZo1Fg70SpQ02ZHq6mrTYpYPPPCA4Giad88990Cr1WLr1q04duyY6HCItRmNAE+YbTChAKAMGfr6AqdOARs3tvnhfI+58ePHo1evXioH13EBAQG47bbbACi9TcQBFRYqn0U3N2U4zIa89NJL6B4ZCQQEKDfUD9EtWbIE3t7euP322033XbNmDV566aUrjhEQEID77rsPL/DV/q2IkiY7snLlSly6dAkRERG47rrrRIfTrNDQUEyePBkATOtIEQeyaROQna0kJbfcIjqa5nl6AnfcoVxv4xCdwWAwJU22NjzeGF8hfOXKlSgtLRUcDbE6PjQXFCR+mYGW8B6wkhLUVlRgyZIluO+++6DT6Ux3WbNmTYu7XTz44IPYt28fNmzYYI1oTShpsiM8Cbn8jWVr7rvvPgDKDJ4aO5wdQTqAJyF33KEspmer7rlH+fn996bhAXP8+eefyMnJgZ+fH6ZNm2ah4DouOTkZffv2RVVVFdUXOprGBeC8N8cWubkB3t4AgNXffouCggLMmDHD7If369cP0dHRpkaMtVDSZCeys7OxadMmaDQazJ49W3Q4rbr++usREhKCgoIC/PLLL6LDIdZSUqIkIUBDUmKrEhOB/v2VgvDly81+GF/McubMmXBzc7NUdB2m0WgwZ84cALD6lwoRrLBQ+dmpk1I3ZCVVVVWIiopCVFQUqqqqTLcXFRUhJCQEw4YNg8FgaPqgwEAAwE8//4zu3bujZ8+epv+aM2eOaVkPjUZjujR27bXXYtWqVVad8EBJk53g+1uNHj0aERERgqNpnZOTkymx+/LLLwVHQ6xmxQqgqkrZZy4xUXQ0rdNogPqkwtwhuqKiIvz4448AlFlztu6uu+4y1RfSCuEOwmhsKAC3ci+Tu7s7Pv/8c5w8eRLPPfec6fZHHnkEJSUlWLZs2ZUjJH5+gFaLHenpSIiJafJfDzzwAK699loAyvcIvzQ2aNAgFBcX4/Dhwxb5nZpDSZMdYIyZ3ix333234GjMM2vWLADAb7/9hkLe8iFy48nHPffYbh1FY3fdpeyHtWuXsifdVSxfvhx6vR4xMTGIj4+3QoAd07VrV9OXzhdffCE4GtIqxpRhtY5e8vKAsjJlk16drv3HaWfPTXJyMubPn48lS5Zg69atWLlyJb799lu8/vrr6NOnDwClEDwrK0t5gE6HOm9vZJ49ix6XLSg9dOhQ02NmzZplujQWGRkJADhy5Ei74m0PSprsQGpqKo4fPw53d3fcYqvFtZcZMGAA4uLiUFtbi++++050OMTSMjOBnTuVVYcvO7HZrOBgYNIk5boZ05z5YpF33323zawAfjV8iO6LL76AsR3LKxArqaxUFnzs6CUsDBg5Ulnuw9u7/cfpwOLEL730EgYMGIDZs2fj4YcfxqhRozBv3rwW71+k1YIxhk7Ozm1eAqRTp04AgIu88N0KKGmyA7yVeNNNN8G7vnDOHvBWAR9aJBL79lvl57hxQEiI2Fjagm9D9M03rbaus7OzsW3bNmg0GtN0fnswbdo0eHt7Izs7Gzt37hQdDnEALi4u+Oyzz3D69GmUlZVh6dKlrTcyvLwAAMxovOqkjMLCwib1UryWyZqNGEqabFxNTQ2+rf9CspehOe6OO+6ARqPB9u3bcfr0adHhEEv65hvlJ5/Kby9uuAHw8FDWbEpNbfFu/DM4atQom1tUtjXu7u646aabADT8DsQGeXgA5eUdu5w+DWzZAqSkKEN0HTmWh0eHfp21a9cCUNYWPHHiRKv39Q8IgEajwaXS0oYi9mbU1dUhKysLhw8fRnV1NQDg0qVLAIDA+oJya6CkycZdunQJ48aNQ48ePTBu3DjR4bRJaGgoxo4dCwC0D5bMDh4EDh8GXFyA+i9ou+HpqSROQEPi14xv6v/PVjbIbgu+WOCKFStQV1cnOBrSLI1GeS925KLXK8t8hIcrvTcdOVYHem4OHDiAhQsX4p577kF8fDzuu+8+lLTSg+Tk5ISekZE4nZur9DQ1eo827kEqLi4GYwzu7u6mmau8Md6vX792x9tWlDTZuC5dumD58uXIyMiAkxWnj6qFD9FRK1di/G97/fXKbBh7w3vHli9Xtp+4zOHDh7F//344OzvbTU1hY+PHj4e/vz/y8/OxefNm0eEQS6itVXqXAKEbZNfW1mLOnDkIDQ3FkiVLsGzZMly4cAFPPPFEq48bOmwY9hw7pgyRN0qw+L6OxcXFpl4lXscEAHv37oWvry8GDBhggd+meZQ02Ql7TJgApabC2dkZhw4dom1VZMRYQ9Jkb0Nz3HXXKcleXp4yvHEZ3ss0ceJE+Av8QmovZ2dnTJ8+HQA1XqRVn1DAwwNwdRUWxqJFi5Ceno7PPvsM3t7eiImJwf/93/9h6dKlre4TN23aNOScP4+M7OyGJROgLCkAAI8++iiWL1+OP/74o0nStG7dOkydOtW6EzMYUU1JSQkDwEpKSkSHYlMmTZrEALBXXnlFdChEbSkpjAGMeXoyVlEhOpr2mztX+T3uv7/JzUajkfXp04cBYF9//bWg4Dpuw4YNDADr1KkT0+v1osNxWFVVVezIkSOsqqpK3QMfO8ZYaipjeXnqHrcN9u7dy5ycnNhjjz3W5Pa6ujqWmJjIQkND2aVLl5p9rF6vZ4GBgeyVBx9kbM8exmprTY997LHHWGBgINNoNKxxynL06FEGgK1fv/6qsZnzupv7/U09TcTibr31VgCgpQdkxLfomDq1w8WjQvHtG376qckQ3eHDh5GRkQEXFxdMmTJFTGwqGDlyJIKDg3Hp0iWsW7dOdDhETY2H5hr1wlhbQkICamtr8c477zS5XafTYffu3Th37hz8Whi+d3Fxwbx58/DZqlUw1NWZhuh0Oh3eeecdbN++3XQM7oMPPkB8fLypbtZaKGkiFjdt2jQ4OTnhwIEDyMjIEB0OUQtjwA8/KNftsNaniTFjlCG6/Hxg+3bTzd/XbwszYcIE+Pj4CAqu43Q6HW6++WYAMK1qTiRhI0NzHfXEE0+gvLoa3/7xR5Mhurq6OpTVJ4V8eLywsBCffPIJFi1aZPU10yhpIhbn7+9vmvlHm4dK5OBBZVFLNzdg4kTR0XSMs3PDLDqeCKIhabLHAvDL8aTp559/pll0MuFJkx3W2zXm5eWF/JwczJw0CSgtNc2ia27WXEBAAMrLy3H99ddbPU5KmohV0BCdhPjmvBMnmhaos2s8MfrhB4AxnDhxAgcPHoSTkxNu4AmVHRs5ciT8/f1x8eJFbG/Um0bsmI0MzanG3V1phDWaRVdcXAyg6aw5kShpIlZx4403QqfTIT09nRa6lAXvkanvwbB7116rrFGTkwPs2WPqZRozZoxdzpq7nLOzsyn5+6FRbxqxY3x6vp0PzTXBk6PiYhgMBtMaTy3VQ1kbJU3EKgICAnDNNdcAAH755RfB0ZAOy8gADh0CnJwAOy6QbsLdXVlrCgC+/96UWPDp+jJoXNfE2rkpK7Eh9b0wdrk+Wkv471JSgtKSEjDG4OrqCnd3d6FhcZQ0EauZNm0aAKWmgtg5Xkw8dqwcwwJc/RBd3YoVSE1NhUajMb1vZXDttdfC09MTOTk52Lt3r+hwSEcYDErtDyBX0uThodQYGo3QFxQAUHqZbGWTbEqaiNXwL58tW7agqNHsCGKHeOJrb9umXM2kSYCzM5xOn0ZfAEOGDEGXLl1ER6UaNzc3U/EsDdGJo0ovX1kZYDQq2xfZSC+MKjQaUxLoVF4OoONDc2r2qlLSRKwmMjIS0dHRMBgMra4OS2xcfj6wa5dyXZahOc7HBxg9GgAwFcDUqVOFhmMJvPGyevVqwZE4HmdnZ2g0GlRUVHT8YI2H5mykF0Y19UmSD2NwcnKCVwcnmlRWVgJQXv+Oss+9OYjdmjZtGg4dOoSff/7ZtC8dsTNr1iizW+LjgbAw0dGoTj9hAlzXrcNUAAESzJq73KRJk6DVanHw4EFkZ2ejW7duokNyGDqdDr6+vigoKIBer4ePjw+cnJzaPvTEWNP1maqr1Q9WJGdnGDUaaBmDv5sb9Hp9uw7DGENlZSXy8/Ph5+cHnU7X4dAoaSJWNW3aNLz66qv4/fffodfr4SrLjA9HsmqV8lPCXhgA2OzjgwkAhgPQSjQ0x/n7+2P48OHYunUrVq9ejUceeUR0SA4lODgY7u7uyM/PRymvSWqr6mrgwgVAq1WSpvraH5lUXrwID8ZQW1mJig4Or/n5+SE4OFiVuChpIlY1aNAghIaGIjc3Fxs3bsREe18U0dHo9cAffyjXJU2avt21CyEABgLA778DEvaITp06FVu3bsWqVasoabIyjUYDPz8/+Pr6wmAwtG+h0X/+E/j4Y2V4/K231A9SsMzMTCx94AG8BcDQpw90HZhx7ezsrEoPE0dJE7EqrVaLyZMn4+OPP8Zvv/1GSZO92bQJKC8HQkKAhATR0ajOYDBg9erV6IX6pGnVKimTpilTpmD+/PnYuHEjysvLO1wzQtpOo9HAyckJTk7t+BpevhzIzgaGDVMWg5TMb7/9hm+ys/EOAG12ttKTFh4uOiwAVAhOBOCzd3799VdaK8be8KG5KVOUoQHJpKamoqCgABv45sO//w7U1IgNygKioqLQs2dP1NTU0Aa+9ubMGWULI60WuO460dFYxK+//ooiABe6d1dusKGJQ/Kd9YjNGzduHJydnZGZmYkTJ06IDoeYizGAz7iSdGjut99+AwAETJoEBAUp6+BIuOWIRqPBlPqZj6t4IkzsA08ghgwBAgLExmIBxcXF2Lp1KwDAle828OuvAiNqipImYnXe3t4YOXIkANDSA/bk+HFlSMDFRVnUUkI8abru+usbNiH+/XeBEVkOX05hzZo1MBqNgqMhZuPnzMmTxcZhIX/88QcMBgOioqLgf9ddyo1//mkzMwQpaSJCTK7/wFPSZEfqEwqMGqXs0SaZgoIC7NmzBwCUWrtJk5T/4L+3ZK655hp4enriwoULOHDggOhwiDmqq5UEApA2afq1vldp8uTJQGwsEBoKVFYCmzcLjkxBSRMRgtc1bd68GeX1q74SG8eTB0mL99euXQvGGGJjYxEaGqps4KvRKPUj586JDk91rq6uGFvfY/i7pL1p0tm0SUkgunYFYmJER6M6o9Foei9OnjxZ+fzx/SBtZIiOkiYiRJ8+fRAZGYmamhps2LBBdDjkaioqGlp6vAdGMnxojif0CAwEkpKU65ImFXz2KiVNdoI3XK6/Xr5VwAEcPHgQ+fn58PT0xPDhw5UbeY+ajfT4UtJEhNBoNJhU/+VLJ2w7sGmTMossIgKIihIdjeoMBgPWrl0LAKb3Zf0/lJ82csJWG0+atm/f3v6FFon11L9HZe3t/aN+DbjRo0fDxcVFuXHsWMDJCTh5Ejh1SmB0CkqaiDATJkwAAJrybA94YjtpkpQt3NTUVBQWFsLX1xdDhw5t+A+eNK1bB9TWignOgiIjI9G7d2/U1dVRj6+ty85WJmPodNJOxODfBddee23DjT4+AP9M8oV1BZIiaSovL8eLL76IiRMnwt/fHxqNBsuWLTP78cXFxbj//vsRFBQET09PjBkzBvv27bNcwAQAMGrUKOh0Opw8eRJZWVmiwyGtcYB6JkA5WTdZbHDQIGVad2lpwybFkuG9Tb9J2psmDZ4wJCebNrSVSVVVFbZs2QKgoUFtwtej4j1tAkmRNF28eBELFy7E0aNHERsb26bHGo1GTJ48Gf/73//w6KOP4s0330R+fj5Gjx5NawhZmK+vL5KTkwEA69evFxwNaVFmpnJxcgLGjRMdjUXwYYErTtY6nU2dsC2hcV0TLTZrw/j7T9IFLbdt2wa9Xo+uXbsi6vISAP65/PNP4T2+UiRNISEhyMvLQ3Z2NhYvXtymx65cuRI7duzAsmXL8OKLL+KRRx7Bpk2boNPp8OKLL1ooYsLxblgaorNhfIrzkCGAt7fYWCygpKQEKSkpAC4bFuDGj1d+8tdBMqNHj4arqyvOnDmDjIwM0eGQ5tTVNbz/JE2a+HfAhAkToLm8BCAhQenxLSsT3uMrRdLk6ura7h2MV65ciS5duuBmvvIogKCgIMyYMQM///wz9Hq9WmGSZoyv/0L6888/aYE9W8VP1jx5kMymTZtgMBjQu3dvdOfbNjTGe9dSU4GSEqvGZg0eHh4YNmwYAOVzSGxQaipQXAx06gQMHiw6Govgvb3NNlx0OmUJEOWOVozqSlIkTR2RlpaGhIQEaC/bRyspKQmVlZWttrz0ej1KS0ubXFRXXg68/TYwe7ayjYVkkpOT4e3tjcLCQqSnp4sOh1zOaGxImiQfmmv2ZA0oMwZ79wYMBptZYE9t4+r/tpQ02Sg+NDd+vJJASObChQvYv38/gIb34hX4EJ3gYXKHT5ry8vIQEhJyxe38ttzc3BYf+/rrr8PX19d0CbfELsw6HfD008AXXwAS1lg5Oztj9OjRAGiIziYdOAAUFgJeXkoBqoQaDwu0iJ/IJU0q+BfVxo0bYTAYBEdDrsDPja29R+0Yr2mNj49H586dm78T/9337AEuXrRSZFdy+KSpqqoKrq6uV9zu5uZm+v+WLFiwACUlJaZLTk6O+gG6uwP1XeeQdEowH6KjYnAbxJOEkSMBZ2exsVhAVlYWTpw4AZ1OZ0rem8WHJiV9jw4ePBg+Pj64dOkS9fjamrIyYPdu5XpLvaF2rtmlBi7XtSvw0EPKyEvjGa5W5vBJk7u7e7N1S9X1mwO6u7u3+FhXV1f4+Pg0uVgEX5ND0qSJf1C2bt3aapJKBOBJgqRDc/xknZycDF9f35bvOGaMsj7VkSNAXp6VorMeJycnjBo1CgAN0dmcbduUQvDISKBbN9HRqI4x1vLs1cu99x4wb57QJRccPmniM+8ux28LDQ21dkhX4knTxo1KjYlkoqKiEBoaCr1ej23btokOh3A1NUD9uimyFoHz3s2rnqz9/ZUZPIC0jReqa7JR/P0m6YKWR44cQV5eHtzc3Bq2TrFhDp80xcXFYd++fVfM3EpJSYGHhwf69OkjKLJGEhOVXeUvXgQOHRIdjeo0Go2pt4mG6GzIrl3K5qBBQUB0tOhoVMcYw8aNGwG0UnzaGL+PpO9R/hps3bqVZg3bEsmTJt7bO3LkSFNZjC1zqKQpLy8Px44dQ22jxbGmT5+OCxcu4IcffjDddvHiRXz33XeYOnVqs/VOVufiAowYoVyXtJVL6zXZoMaz5rTynSoOHz6MgoICeHh4IIlvzNuaxsXgEs5kHTBgALp06YKqqirsknT1c7tTVASkpSnXW6u5s2NmD83ZCHHVVCp79913UVxcbJrttmrVKpw9exYA8Nhjj8HX1xcLFizA559/jtOnT5vWY5k+fTqGDBmCe+65B0eOHEFgYCDee+89GAwGvPzyy6J+nSuNHavs/7VhA/D446KjUR1v5aalpaGgoABBQUGCIyKyLzXAe5mGDx/esDloa665RmnA5OQoM1ltoRdaRRqNBmPHjsU333yDP//801TjRATavFlJ0Pv1A5qZ5W3v9Ho9Ntcv49FqEbgNkab5+NZbb+GFF17A+++/DwD44Ycf8MILL+CFF17ApUuXWnycTqfDmjVrcNttt+Gdd97B3//+dwQGBmLDhg3o27evtcK/Ot41u3mzUhQomeDgYAwcOBAAaONQW1BWBtSvki170jRmzBjzHuDh0TCTVdK6H6prsjGSD83t2LEDlZWV6NKli+n8b+ukSZqysrLAGGv2wnuVli1b1uTfXKdOnfDJJ5/g4sWLqKiowKZNmzDY1lZdjYtTZgyUlgKSbiZMQ3Q2ZMuWhhk7PXqIjkZ1RqPR1MI1O2kCpF96gCdNu3fvRllZmeBoCOoTe1mTJt5wGT9+/JVbp9goaZIm6el0DWPakvbE8PWa1q1bRxuHiib50NyBAwdQVFQET09PDBo0yPwH8tdj40ZlhXDJdO/eHZGRkairqzPtOE8EuXABOHxYWepC0qHSTZs2AUDra6TZGEqa7Ink6zWNHDkSzs7OOHPmDE6fPi06HMcm+fpMvIU7YsQIOLdl0c7BgwEfH+DSpYYCXcnQEJ2N4L1McXHKZrWSqaqqMm2UTUkTsQyeNG3bBkg4JdjT0xOJiYkAYBo6IQLk5wMHDyrXJR0W4C3cNg3NAcpKxPwEL2lSQUmTjZC8nmnXrl2oqalB165d0bNnT9HhmI2SJnvSvz/QuTNQVdVQpCsZPmOHkiaB+Mk6NlZZo0kyBoOhffVMnOR1TWPrv6QPHDiA/Px8wdE4MMmTJt5wGTVqlN3UMwGUNNkXjUb6ITpKmmyA5PVM6enpKCkpgY+PD+Lj49t+AP66SNrjGxQUhJiYGAANw5jEyrKzgcxMpZaVr9EnGXusZwIoabI/kidNw4YNg06nQ1ZWFrKzs0WH45h4AXB7emHsAE8ERo4cCaf2bPzZr5/SA1ddrey4LiEaohOMJ6tJSYC3t9hYLKC6utou65kASprsD0+adu0CKirExmIB3t7eptlM1NskwPnzQEaG0qt5zTWio7GINq/PdDmNBhg5Urku6Qwz/trQDDpBJB+a27VrF/R6PUJCQtCrVy/R4bQJJU32JjISiIgAamuB7dtFR2MRNEQnEP+SjI0VupO4pdTV1WHr1q0AOpA0AQ1Jk6Tv0WuuuQYajQbHjx/HhQsXRIfjWBhr6GmStLeXn9tHjx5tV/VMACVN9qdxXZOkXeeUNAnEkyaeFEhm7969KCsrQ6dOnRAbG9v+A/F1c7Zvl3KF/k6dOplWaOZJJrGSkyeBs2eVLXv4CvSSsdd6JoCSJvvE32iSdp1fc8010Gq1yMzMxLlz50SH41gkT5oa1zNpO7IJcXS00hNXXg6kp6sSm60ZWf8eoCE6K6tPKDB0KODuLjQUS6iursbOnTsBwC73N6SkyR7xL7Q9e4DKSrGxWICvry/i4uIAUG+TVRUVNazPJGnS1O71mS6n0zXUfEmaVFDSJAjv2ZP0M7h7927o9XoEBwejjx1uek1Jkz3q3h3o2lUZFti1S3Q0FsG7bSlpsiJ+suazwyRTW1uLbdu2AVAhaQIahugkfY+OqJ/qfuDAgVY3PScq40mqAyw1YG/1TAAlTfZJo2n4QElab0B1TQJIPjS3Z88eVFRUICAgANHR0R0/IH+dtm4FjMaOH8/G8J4Axhi2SzrpxObk5ChrNOl0yvCchBovammPKGmyV5InTSNGjDDN3jl//rzocByD5EkTL2jucD0TFx8PeHoq+9AdPtzx49kgGqKzMn4+T0gAvLzExmIBer3eVM9kj0XgACVN9ot/se3cqSw/IJlOnTqZViWmE7YVlJUB+/Yp1yVPmq5Ra/0pZ+eG2U2SvkcpabIynjRJOjS3e/duVFdXo0uXLujbt6/ocNqFkiZ71b8/0KmTUggu6W7rNERnRdu3K0NMkZFAWJjoaFRnNBpNQ0wj1PxCkryuiSdNe/fuRXl5ueBoHICD1DPZ235zjVHSZK+0Wuln7/CkiX/QiAVJPjR35MgRXLp0CR4eHqaZmapovDI4Y+od10Z069YNERERqKurwy5JJ53YjMJC4MgR5bqkq/E3XtTSXlHSZM8kr2virdwjR46goKBAcDSSkzxp4rPmhg4dCmdnZ/UOnJgIuLoCFy4AJ06od1wbQkN0VlL/HkW/fkBgoNhYLKC2thY7duwAYL9F4AAlTfaNf8Ft2ybl7J3AwEAMGDAAAJ2wLaqqCti9W7kuadLE65lUHZoDADc3IDlZuS7pe5SSJiuRfH2mtLQ0VFVVISAgAP369RMdTrtR0mTPEhIADw9lUcKjR0VHYxG8RUInbAtKSVEmE3TtqtQ0SYj3NKlWBN6Yg9Q17dq1C9XV1YKjkZjkReD8Mzhs2DC7rWcCKGmyb87OwJAhynVJkwreM0DrxFgQ/7IfOVJZA0wyZ86cwZkzZ6DT6TCEf17U1LiuSUJ9+vRB586dodfrkZqaKjocOZWXA3v3KtclTZr4OdwiDRcroqTJ3kle1zR8+HAAQHp6Os3esRTJ65n40FxCQgI8PT3Vf4KhQwEnJ+DMGWVhQsloNBoaorO0XbsAgwGIiFAukmGMmXqa+DndXlHSZO8ar0os4eyd8PBwREREwGAwICUlRXQ48qmpUdb6AqRNmvjJWvV6Js7TExg0SLku+RAdJU0WIvnQ3MmTJ5Gfnw9XV1cMHjxYdDgdQkmTvRsyRGnlnj0rZSsXaGiZ0BCdBezbpxSCBwQos3YkpPqils1pPClDQjxp2r59O+rq6gRHIyHJi8D5uTsxMRGurq6Co+kYSprsnYdHQytX0lYgT5q2SfqFJBRPRIcPl7KeqaioCIfrtzixaNLEhxwkTeyjo6Ph5+eHiooKpKeniw5HLjU1DRuvS9rTJMvQHEBJkxwkr2viX3a7du2CwWAQHI1kGidNEuIt3L59+yIoKMhyT8S3UzlyRNmLTjI6nQ5D6zeQ5WvtEJXs3av09gYGAlFRoqOxCIvOXrUySppkIHnSFB0dDR8fH5SVleHgwYOiw5EHYwD/ApQ0abJ4PRMXFAT07q1c5zVikhlWnxjSMLnK+Hn7mmuk7O0tKCjA8ePHATS8h+wZJU0y4Nn78eNAfr7YWCyg8VRxOmGr6NQpZSVrF5eGIV7JWGxRy+ZIPkTHh1aop0llkheB8/dL//794e/vLziajqOkSQb+/kD9ytmQ9IRGdU0WwL/cBw1SVraWTFVVFfbs2QPASsMCkidNSUlJ0Ol0OHv2LM6cOSM6HDkYjQ3nbEmTJpmG5gBKmuTBT9iSJk38A0c9TSqSfGhu9+7dqK2tRWhoKHr06GH5J+RDD7t3KyusS8bT09O02TH1NqkkI0PZ0cHdHVBzI2kbIsuilhwlTbLgJ2xJT2bJycnQ6XTIycmhVq5aJC8Cb9zCtcq2DVFRQKdOSlGvpDPMaPkPlfHzdVKSssODZBr39sowcw6gpEkePGnaswfQ68XGYgGNW7l0wlZBcTFQPxUf9bOiZGPVeiYA0GobPoeSvkd5IS/1NKmEv44SFEg3JzU1FbW1tQgJCbFOb68VUNIki169lCmrej2QliY6GougIToV7dypzJ7r1Qvo0kV0NKozGo3YVb/2jVVbuJL3+PLXcv/+/bStkRokT5r4uXr48OF2vUlvY5Q0yUKjcZgTNiVNKpC8nuno0aMoKSmBp6cnBg4caL0nblwMLuG2RmFhYQgPD4fBYMDu3btFh2PfioqAo0eV65bYSNoGyFYEDlDSJBcHSZoOHDiA0tJSwdHYOZ54StrC5cNHSUlJcHJyst4TJyYq2xrl5tK2RqR1fBXwvn2VUQLJGI1G0+eQkiZimxrXU0jYyuWzoBoPvZB2qK0F+ObHkvY07axfYNLqi+l5eAAJCcp1SZMKqmtSieRDc0eOHEFxcTE8PT0RGxsrOhzVUNIkk8GDlVbu+fPUyiUtO3AAqKwE/Pyk3aSXf6EPFVHk7iA9vjt37oTRaBQcjR2TPGniQ3NDhgyxbm+vhVHSJBN394ZWruQnbEqaOqDx0JxWvlNAYWGhaduGISJqRSRf5DImJgaenp4oKSnBkSNHRIdjn+rqGnp7JU2aeMNFhq1TGpPmjKnX6/H0008jNDQU7u7uSE5Oxrp16676uJdeegkajeaKi5u9rpAseSu38ea9dXV1gqOxU5LXM/Gh2759+yIgIMD6AfDX9eBBQMLaOycnJyQnJwOgxku77d/f0Nsr6Sa9fIhcSG+vBUmTNM2ZMwf/+te/MHPmTCxZsgQ6nQ7XX3+92dtuvP/++/jyyy9Nl6VLl1o4YguRPGnq378/fH19UVFRQZv3tpfkM+eEt3BDQ4Hu3ZUtMiStvaN96DqIv25Dh0rZ21tQUICTJ08CENTba0FSDDTu3r0b3377LRYvXoy//e1vAIC7774b0dHRmD9/vlkf7OnTpyNQhhkMPKvfvx8oLwe8vMTGozKtVovk5GT88ccf2LVrF+Lj40WHZF/OnAHOnlVq35KSREdjETbRwh0+HMjKUr4cJ0wQF4eF8ISUepraSfJ6Jt7bGxUVhU6dOgmORl1SpLgrV66ETqfD/fffb7rNzc0Nc+fOxc6dO5GTk3PVYzDGUFpaCmbvs87CwoCICKWVK+k6Krzlwr8cSRvwL7n4eGWml2Tq6uqQUl8rIrSWQvK6piFDhkCj0SAzMxMXLlwQHY79kTxpsomGi4VIkTSlpaWhT58+8PHxaXJ7Un1LOt2MfaAiIyPh6+sLb29vzJo1y6wTgV6vR2lpaZOLTZB8iI5/EGnZgXaQvJ7p4MGDqKyshK+vL/qJnBnIX99duwCDQVwcFuLn54cBAwYAoCG6Njt7Vunx1Wqpt9cOSZE05eXlISQk5Irb+W25ubktPrZTp0549NFH8eGHH2LlypW47777sHz5cowYMeKqSdDrr78OX19f0yU8PLxjv4haJE+aeDJ84sQJFBYWCo7GzvBEU9KkiX+BDxkyBFqRtSLR0YC3tzJEzvf4kwzNZG0n3kMeGytd+QSg9Pby1eIpabJRVVVVcHV1veJ2PgOuqqqqxcf+9a9/xX/+8x/ceeeduOWWW/D222/j888/x4kTJ/Dee++1+rwLFixASUmJ6WLOMKBV8C/EnTuVYTrJ+Pv7o2/fvgCot6lNqqqUWjdA2m0bbKaFq9M19CJI+h7lw5/0GWwjyYfmeG+vj48P+vfvLzoc1UmRNLm7u0Ov119xe3V1ten/2+LOO+9EcHAw1q9f3+r9XF1d4ePj0+RiE2JilHqV4mLg2DHR0VgEDdG1w759yvowwcGArfSKqkz4zLnGeGIq6XuUfwb37t2LmpoawdHYEcmTJt5wSU5OFtvbayFS/EYhISHIy8u74nZ+W2hoaJuPGR4ejqKiog7HJoSzM1C/joqsQ3S8GJySpjbgi+klJysbPEvm/PnzOH36NDQajWkdIaEkT5p69eoFf39/VFdX48CBA6LDsQ9VVUrjBZA+aRLe22shUiRNcXFxyMjIuKIGic+iiYuLa9PxGGPIyspCUFCQWiFaX+N96CTEk6aUlBQYJCy0tQj+5S350Fx0dLRt9PryxO3oUaXXVzKNk1NqvJhpzx6ltzckBOjWTXQ0FkFJkx2YPn06DAYDPvroI9Nter0eS5cuRXJysqlA+8yZMzh22XBVQUHBFcd7//33UVBQgIkTJ1o2cEvib1jeuyCZ6OhoeHp6oqysDEePHhUdjn1o3NMkIZsamgOAoCCgZ0/luuTLf1DSZCZeBD50qJS9vfn5+cjMzAQA2+jttQApFrdMTk7GrbfeigULFiA/Px+9evXC559/jqysLHz66aem+919993YvHlzk7WYunXrhttuuw0DBw6Em5sbtm3bhm+//RZxcXF44IEHRPw66ri8levnJzIa1el0OiQlJWHjxo3YtWsXoqOjRYdk2/LylGnOGo2ysbOEbLKFO2QIkJmp9PJJuMglJU1tJHnDhb8P+vXrJ92ilpwUPU0A8MUXX+Dxxx/Hl19+iXnz5qG2tharV6/GyJEjW33czJkzsXv3brz00kt4/PHHkZqaivnz52PLli3wsOfF/wIDqZVLGvCTNZ8KL5mamhrs2bMHgA31NAHS1zXx5T8yMzOb7bUnl+GfQ8mHyG2q4aIyKXqaAGV5gcWLF2Px4sUt3mfTpk1X3Pbxxx9bMCrBJG/l8g8mrQxuBslbuGlpadDr9QgMDESvXr1Eh9OgcdLEmHRDMn5+fujXrx+OHj2KlJQUTJkyRXRItuvsWeDcOWU5ikGDREdjEY6QNEnT00SawU/YktY18THzI0eOoFjCQltVSZ408d5Gvr2HzYiJAdzcgEuXgBMnREdjEdTja6bGvb2enmJjsYC6ujqkpqYCoKSJ2Cv+BclbuZLp3LkzIiMjAcD0YSXNMBgA/vpIOizAZ8raXPGpi0tDr4KkSQUlTWaSvOFy4MAB29jCyMIoaZJZbCzg6goUFQEnT4qOxiJo814zHDmibOfh5QVIejLjX9g2lzQB0tc18c/g7t27afmP1kieNMm+qCUn729GmrZyJR2io5XBzcD/9omJSj2FZAoKCnD69GkAQGJiouBomiF50jRgwABa/uNq6uqUNZoAaXt7HaGeCaCkSX6Nh+gk1HhogEk4BKkKyRe15ENzUVFR8LPFpTX4637gAFBRITYWC+DLfwDUeGnR4cNAZSXg4wNERYmOxiIoaSJykLyVGxMTAzc3N1y6dAkZGRmiw7FNkg8L2Gw9ExcWBnTtqtSW7d0rOhqLaLxCP2kGP/8mJgISDl0VFBTg1KlTAGz4c6gS+f56pCmeNO3fr+x7JBkXFxcMrl+skVq5zSgrU1q5ACVNIkneeKFi8KtwkIaLzfb2qoiSJtmFhyu72tfVNWwUKRk6YbciNVWZOdmtm/I+kIzRaMTu+sVbh9jy8KPkSRNPWA8fPnzFHqAE0i9qyZMmm/4MqoSSJtlpNNKfsGkGXSskb+FmZGSgpKQE7u7uGDhwoOhwWsY/gzt3Srn8R5cuXdCjRw8wxmj5j8uVlirbWQHSfg7tordXJZQ0OQLJF7nkhYcHDx5EeXm54GhsjORJEz9ZDxo0CE5ONrzBQUIC4OQEnD8P5OSIjsYiqMe3Bby3t3t3oHNn0dGornFvLyVNRA6S9zSFhoYiPDwcRqPRtP8YgXKidpCZczZ/svbwUNZNA6T9HFLS1AL+etj6e7Sdjh8/bh+9vSqhpMkRDBqkzNjIyVH2PpIQDdE148wZ4MIFpYcjPl50NBZh04taXk7yxgst/9EC6u2VCiVNjsDLC+AtAMmH6KiV2wj/W8fGAu7uYmOxgMrKShw4cAAAJU22IC4uDq6urrh48aJp+rnDY4yKwCVDSZOjkLyuiVq5zZB8aG7fvn0wGAwIDg5GeHi46HCujv8d9u0D9HqxsViAi4sL4ut7NKnxUi87G8jPB5ydpe3ttZshcpVQ0uQoJF8ZPD4+Hs7OzsjPz0dWVpbocGyDgwwLJCcnQ6PRCI7GDD17AgEBSsK0f7/oaCyC6pouw1+H2FjAzU1sLBZgd729KqCkyVHwVu6ePcqaTZJxc3NDQkICAKprAgDU1DSsyyXpyczuWrgOtPwHJU31JG+48N7ekJAQhIWFiQ7HKihpchR9+wK+vsr+R4cOiY7GIuiE3ciBA0B1NdCpE9C7t+hoLMIuaykcJGlKT09HlYQ7ELSZ5PVMjSdi2EVvrwooaXIUWi1Qv6mm7Cds6mlC0xauhCez8+fP48yZM9BoNKZtdOyC5ElTREQEgoODUVdXh32S7kBgNgfq7bWrhksHUdLkSCQ/YVMrtxHJhwX4yXrAgAHw9vYWHE0bJCUpSezp08pyEJLRaDTU48sdOKDUr/n7A716iY7GIuxuiFwFlDQ5Esln0HXr1g2dO3dGXV0d0tLSRIcjluQz5+xqfabGfHyAAQOU65J+Dilpqsd/f54oSyYvLw85OTnQarX21dvbQZQ0ORI+PHfsGHDpkthYLKBxKzdF0i8ksxQVASdOKNf531wydt3CdZAeX4dPmhyot9fLy0twNNZDSZMjCQxs6Cau3ytINvxL1KGTJv637d1bGRqQjMFgMG0Ka9dJk6S1d4MHD4ZWq8XZs2dx9uxZ0eGI40BF4I6EkiZHI/kQHSVNkH5o7ujRoygvL4enpycG8KEue8K/ZPbsAQwGsbFYgKenJ2JiYgA48OfQgXp7HakIHKCkyfFIvshlYmIiNBoNsrKycEHCQluzOMiwwODBg6HT6QRH0w79+ilbG5WXA0eOiI7GIhx+iM4Benv55ujU00Tk1rieQsLtRnx8fNC/f38ADtrKbbzXlaQnM7uuZwIAnQ5ITFSuS/oedfgeX54s2ut79CqOHDmC8vJyeHl5oV+/fqLDsSpKmhxNTIyynP+lSw3dx5Jx6BP2iRPK39bNTflbS0iKYQHJe3z5Z3Dv3r2ok3AHgquSvJ6JfwYTExPts7e3AyhpcjQuLsCgQcp1SZMKfsJ2yKEB/jdNSFD+1pIpLy/HofoV7e22pwmQvrawb9++8PX1RWVlpenv5TAYaxies+f3aCsctQgcoKTJMUneyuU9EKmpqTBIWGjbKsmH5vbs2QOj0YiwsDCEhoaKDqf9+N/n8GGgrExsLBag1WqRWD8E6XA9vidPKoXgrq7U2yshSpocET9hS3oyGzBgADw9PVFWVoZjx46JDse6JJ85Z/f1TFxwMBARofRK1BfUysZhe3z57ytpb29ZWRkOHz4MQILPYTtQ0uSI+Bt9/35Awu1GdDqdaYVahzphV1Upf1NA2p4maZImQPrGi8MuNOsAvb2MMdM+g46GkiZHFBEBdOkC1NUBkm434pAn7LQ05W/apYvyN5YQJU32g/+Njh07hpKSEsHRWJGDFIFL8RlsB0qaHJFG4zAnbIdKmhoPzUm419XZs2eRm5sLnU6HQXwygz1rXFso4fIfQUFB6NGjBxhjphXcpVddTb29kqOkyVE5SNJ06NAhlJeXC47GSiQfFuAn6+joaHh6egqORgUJCcqaTefPAzk5oqOxCIdrvKSlAbW1QOfOQLduoqNRHWPMVPLgiEXgACVNjkvypCk0NBTh4eEwGo2mlWul5yBJkzQtXA8PIDZWuS7p59DhkqbGi1pK2Nubk5OD8+fPw8nJCQkJCaLDEYKSJkeVmKh8qLOygPx80dFYhEOdsM+fB7Kzlb9pfRG8bKSc5ix546XxDDom4RDkFRyknikmJgbu7u6CoxGDkiZH5eOj7IEFSH/Cdoikif+OAwYof1vJ1NXVybnXleRJU3x8PJydnVFQUICsrCzR4Vge9fZKj5ImRyb5CduhWrmSn6wPHTqEyspK+Pj4ICoqSnQ46uF/r717lVoYybi5uSEuLg6AAzReLlxQeu41moa9BSVDSZNESZNer8fTTz+N0NBQuLu7Izk5GevWrTPrsefOncOMGTPg5+cHHx8fTJs2DadOnbJwxDZA8qRp0KBB0Ol0yMvLw9mzZ0WHY1mSbxDaeK8rrVaa0xbQpw/g66ussSXpdiMO0+PLf79+/aTs7a2trcXevXsBUNIkhTlz5uBf//oXZs6ciSVLlkCn0+H666/Htm3bWn1ceXk5xowZg82bN+PZZ5/Fyy+/jLS0NIwaNQqFhYVWil4Q/sbfvRswGsXGYgEeHh6Iqd/GQOoTtsEA8CndktdSSHey1mqBpCTluqTvUYdLmmR7j9Y7ePAgqqqq4Ovriz59+ogOR5h2JU3mDHWUlpa259Dtsnv3bnz77bd4/fXXsXjxYtx///3YsGEDunXrhvnz57f62Pfeew8nTpzA6tWrMX/+fDzxxBP4448/kJeXh3/+859W+g0EiY5WZvCUlgLHj4uOxiIcYiuHo0eB8nLAywvo3190NBYhbdIENCS6kr5H+d9s3759qKmpERyNBTlIEXhycrJcvb1t1K7ffMSIEcjMzGzx/3/77TcMGDCg3UG11cqVK6HT6XD//febbnNzc8PcuXOxc+dO5LSyBsrKlSuRmJho2lwSAKKiojBu3DisWLHConEL5+QE8EUCJW0FOsTK4PzLdvBgZd0fyZSWluLo0aMAJE2aJB8m79WrF/z9/aHX67GfL/woG6OxobdXxvcoJG+4tEG7kqbMzEzExsbi3XffbXJ7WVkZ7rvvPkyePNmqO5CnpaWhT58+8LlsHDmpvts7PT292ccZjUYcOHDAtE/Z5Y/NzMxEWSs7kOv1epSWlja52B3JT9j8A753717USlhoC0D6Fm5qaioYY+jWrRu6dOkiOhz18eG5Y8eA4mKhoViCRqORf4ju2DGlx97DQ5nBKiFKmhTtSpqOHDmCqVOnYt68eRg3bhyys7Oxfv16DBw4EF999RUWLVqEnTt3qh1ri/Ly8hASEnLF7fy23NzcZh9XVFQEvV7frscCwOuvvw5fX1/TJTw8vD3hiyV50tSnTx/4+vqiqqoKBw8eFB2OZUheSyH9yTooCIiMVK5Lut2I9ElT495eJyexsVhAcXExjh07BqChM8JRtStp6tSpE7755husWLEChw4dQv/+/XHdddchMDAQqampePbZZ6065llVVQVXV9crbndzczP9f0uPA9CuxwLAggULUFJSYrq0Ngxos/gX0YEDQGWl2FgsQKvVyn3CLitrmHUlaVIhfdIESN94kfozCDhEby8AREZGIigoSHA0YnUoswkJCYGXlxeqqqrAGENcXBwieYvJitzd3aHX66+4vbq62vT/LT0OQLseCyjJlo+PT5OL3QkLA0JClBlY9dNJZSP1CXvPHmWz14gI5e8oGYfZ60rypIn3Tpw4cQJFRUWCo7EA6u11GO1KmvR6Pf72t79h9OjR8Pb2RmpqKl555RV89dVXiI2NxZYtW9SOs1UhISHIy8u74nZ+W0v1Vf7+/nB1dW3XY6Wh0Uh/wpY6aZL8ZJ2dnY38/Hw4OTkhPj5edDiW0/gzKOFCrP7+/ujduzcAZbazVCoqAD70L+nnkJKmBu1KmuLi4rBkyRLMnz8fqampGDRoEJ577jns3r0bPj4+GDt2LJ544gm1Y201noyMjCsKsfkfmq9IezmtVouBAwc2u6FrSkoKIiMj4e3trXq8NkfypIm3co8dO4ZLly4JjkZlDrKoZWxsrNx7XcXHAy4uQEEBcPq06GgsQtrlP/buVWbPde2qXCTDGKOkqZF2D89t374dr776KpydnU23xcTEYPfu3Xjuuefw3nvvqRKgOaZPnw6DwYCPPvrIdJter8fSpUuRnJxsKtA+c+aMqZit8WNTU1ObJE7Hjx/Hhg0bcOutt1rnFxBN8qQpKCgIPXv2BNAwNi8FxqSvpXCYk7WrK8Abd5J+DqVd/kPyhktWVhYKCgrg7OzcYgeEI2lX0pSWltZiBb2TkxNefvllq7YmkpOTceutt2LBggWYP38+PvroI4wdOxZZWVl48803Tfe7++670Y9vUlvv4YcfRs+ePTF58mQsXrwYb7/9Nq699lp06dIFTz31lNV+B6EGD1aG6XJygGaGKmUg5RBdTg5w/rwyWychQXQ0FuEwSRMgfeOF/w13794t116QDtJwiYuLM02QcmTtSprMeeGsXX/wxRdf4PHHH8eXX36JefPmoba2FqtXr8bIkSNbfZy3tzc2bdqEkSNHYtGiRXjhhRcQGxuLzZs3O84sAW9vZXVwQPoTtlRDA/x3iYkBJBy6qq2txb59+wBQ0iSDmJgYuLq6oqioCCdPnhQdjnokryt0qIaLGaRZUMLNzQ2LFy/G4sWLW7zPpk2bmr09LCwM3333nYUisxPJyUoxY0oKcOONoqNRXeOhAcYYNBqN4IhUIHkL98CBA6iuroafn5+piFhq/EspLQ2oqVFqnCTi4uKChIQE7Ny5EykpKXL8Tc+eBc6dU1bi57srSIaSpqYcdwMZ0pTkrdzY2Fi4uLigsLAQp06dEh2OOhykhZuUlOQYe1317AkEBAB6PSDpdiPS9fjyz2B0NODpKTYWC6ipqXGs3l4zOMCZiJiFfyBSU5U1myTj6upqGjKW4oRdW9uwrpakJzP+d3KYk7VG07CliqSNF+lqCyXv7d2/fz/0ej38/f3Rq1cv0eHYBEqaiKJ/f8DLCygvB+o3R5WNVLN3DhwAqquBTp0AGYY5msH/TlIvank5/rvKkNg3g/8t9+/fb1pA2K45UG+vFCUNKqCkiSh0OmUWHUCtXHvAf4ekJEDCoatLly4hIyMDgIPtdSX5MHm3bt3QuXNn1NbWIi0tTXQ4HVNXp6zID0ifNDlMb68Z5DvbkvaT/ITNP/jp6enNbp1jVyRfG4avGt2zZ08EBgYKjsaKeIJ48iRQWCg2FgvQaDTyNF4OH1b26/TxAaKiREdjEZQ0XYmSJtJA8qSpR48eCAwMRE1Njf23ciWvpXDYk3WnTkCfPsp12bYbqSdN0sQbLomJUvb2FhUV4cSJEwAcrLf3KuT7S5P2419Qhw4ptU2S0Wg0ctQ1FRUB9UNXkPRk5rBJEyB940WaGXSSN1x4b2/v3r0REBAgOBrbQUkTaRAaCoSFKfso8ZlZkpGilct7IHr1UqaoS8bh97qSPGlKTEyERqNBVlYW8vPzRYfTfg5SBO6Qn8FWUNJEmpL8hC1FK1fyFu6pU6dQWFgIFxcXx9zrin8Gd+9W9heUjK+vr2k7K7ttvJSUNMwypt5eh0JJE2lK8qSJT509ffo0CgoKRIfTPpK3cHlCGxcXB1dXV8HRCBATA7i5KcOwMm030ojd9/impioJbY8eQJcuoqNRHWPMNDxHSVNTlDSRpiRPmnx9fRFVP9PFLk/YjEmfNDl8C9fFpWEDZnvuEW2F3SdN/O8iaW9vZmYmCgsL4erqitjYWNHh2BRKmkhTgwYpazadO6dcJGTXQ3QnTyo9EK6ugKQnM4dc1PJykjde+Gdw9+7dMBqNgqNpB8mTJv4ZjI+Ph4tkeyB2FCVNpClPT2UfJUDaE7Zdz6DjMSckSLehKwDo9Xqkp6cDcOCeJkD6pCk6OhoeHh4oLS3FsWPHRIfTNow5TNLk0J/BFlDSRK4k+Qnbrlu5ki9qmZaWhpqaGgQGBiIyMlJ0OOLwv+/+/cp2OZJxcnLCoEGDANhh4yUzU1l41NUVkHSiAiVNLaOkiVxJ8qTJrlu5PGkaOlRsHBbCh0yHDBni2HtddesGdO6sbMxs7wuxtsBue3x5vPHx1NvrgChpIlfiH5Q9ewCDQWwsFmC3rdzKSqXnAZB2WIAnTUMlTQrNptE0/I3t6T3aBnZbDC750Fx6erqpt7dHjx6iw7E5lDSRK0VFAd7eQEWFsr+ShOyylbtvn7JJaEgIEB4uOhqLaNzT5PAk7/HlSdPBgwdRWVkpOJo2kDxp4p/B5ORkx+7tbQElTeRKOp2ynxIg/QnbrpKmnTuVn0OGKD0RksnLy0N2djY0Gg0S+fvPkfGkyR5neZohLCwMoaGhMBgM2GsvOxBUVQH1Q1eyJk1Uz9Q6SppI8xyklXvgwAFUVFQIjsZMDlLPFB0dDW9vb8HR2IDERCU5zsoC7Hm7kVbY3fIfvLc3OBiIiBAdjUVQ0tQ6SppI8xygldu1a1cYjUb7aOUy1rSnSUI0NHcZHx+gfrsR2RsvdtPj23hoTsLe3oKCApw6dQqAsnsCuRIlTaR5PGk6cgQoLRUbi4XY1Qn77FkgL08ZOq0vYpcNJU3NkLzH1+5qCyVf8oNvndK3b1/4+fmJDcZGUdJEmse7nxlTZtFJyK6GBniMsbGAh4fYWCygrq4OqampAChpakLypGnQoEHQarU4e/YscnNzRYdzdZJvlk1Dc1dHSRNpmeQnbLtq5fKhOUnrmQ4ePIiqqqomewMSNHw5794N2NtCrGbw8vJCdP0OBDb/OTx3DsjJAbRaYPBg0dFYBG1hdHWUNJGWSZ40DRo0CDqdDufOncPZs2dFh9M6B5rmrNXSaclkwAClZ7G0FDh+XHQ0FmE3w+Q8voEDAS8vsbFYgNFoNA3PUU9Ty+jsRFrWOGliTGwsFuDp6WkfrVy9Xpm1A0ifNFEL9zJOTg29GvYwjNwOdjNMLnnD5cSJEyguLoabmxsGDhwoOhybRUkTaVlCglJ4fP680i0tIbto5e7fryROAQFAz56io7EISppaIXmPL/8M7tmzBwZb3oFA8qSJnwMHDRoEZ2dnwdHYLkqaSMs8PICYGOW6pCdsu6hrknxRy8LCQmRkZACgac7Nkjxp6tevH7y9vVFRUYHDtroDQW1tw4QYSYeuqAjcPJQ0kdZJfsJu3Mqtq6sTHE0LJF/Ukp+s+/Tpg4CAAMHR2CD+GTx4UNl/UDI6nc60ArzNNl4OHlRWA/f1Bfr2FR2NRVDSZB5KmkjrJE+aoqKi4OPjg8rKShw6dEh0OM2TfFiAhuauIiwMCA1VNs+2h4VY28Hmh8l5XMnJyuw5yVRWVmJ//Wbg9DlsnXx/faIu/gHas0fpopaMVqs1DQnZ5An7/HllGw2NpmE/QMnwpGmopD1pquCfQ1t8j6rA5pMmyRsuvKc9NDQU4ZJuBq4WSppI6/r0ATp1AqqrGzaqlIxNn7D5yXrAAGVbDckYjUZaG8Yckvf48s/g4cOHUWqLOxBInjTtrK+bHDp0KDQS1k2qiZIm0jqttuFEwQuSJWPTU54lr2c6duwYSktL4eHhYVr+gTRD8r0gg4ODERERAcYY9tjaDgSFhUD9RAVIOlGhcdJEWkdJE7m6YcOUn5InTceOHUNJSYngaC4jeQuXJ6qJiYlwcnISHI0NGzRIacCcPQvYw3Yj7WCzM1nrF3xE797Ksh+SYYxR0tQGlDSRq+MfJEmTps6dO6NHjx5gjJn2P7MJdXUAj0fypImG5q7CywvgPXG2llSoxGaHySVvuJw+fRr5+flwdnZGQkKC6HBsHiVN5OqSkpRWbnY2kJcnOhqLsMkhOj7F3NcXkHQ/Nkqa2oC/Rrb0HlVR46SJ2dIOBJJv0st7mRISEuDm5iY4GttHSRO5Om/vhlaupL1NNtnK5V+Okk5zLi0tNS3zQEmTGfgw+Y4dYuOwkISEBDg5OeH8+fPIzs4WHY7CaHSYpImG5swj35mYWAb/QEl6wm5cT2EzrVzJhwVSU1PBGEP37t0RHBwsOhzbxz+DqalATY3YWCzA3d0d8fHxABq+yIXLyACKiwF3d2WjXglR0tQ2UiRNxcXFuP/++xEUFARPT0+MGTMG+/gGp1cxZ84caDSaKy5Rkg6HtJvkdU1xcXFwdnZGQUEBTp8+LTocheRJEw3NtREvRNbrpV3+Y1h9b9oOW2mc8c/g4MGAhPuxVVRUmBa1pKTJPHY/XcVoNGLy5MnYv38//v73vyMwMBDvvfceRo8ejb1796J3795XPYarqys++eSTJrf5+vpaKmT7xD9Qe/cqrVwXF7HxqMzNzQ1xcXFITU1FSkoKIiMjxQbUeJqzpNsaUNLURhqNMkS3apXS4yvh9Pdhw4ZhyZIltpc0SfoZ5Jskd+3alRa1NJPdJ00rV67Ejh078N1332H69OkAgBkzZqBPnz548cUX8b///e+qx3BycsKsWbMsHap9463cwkIgLU3Kk8iQIUNMSdMdd9whNhheR9G3L+DvLzYWC2CMUdLUHkOHNiRNjz8uOhrV8Z6m/fv3o6KiAp6enmIDarxZtoRoaK7t7H54buXKlejSpQtuvvlm021BQUGYMWMGfv75Z+j1erOOYzAYbHMlWluh0Ug/RMeLwW2inkLyoblTp07h4sWLcHFxQVxcnOhw7IfkxeBhYWEICwuDwWAQv/xHaakygxVoeN0lQ0lT29l90pSWloaEhARoL5tdlJSUhMrKSmTwIY5WVFZWwsfHB76+vvD398cjjzyC8vLyqz5Or9ejtLS0yUVqkidNvJWblpaGqqoqscFInjTxXqaEhAS4uroKjsaOJCYCOh1w7hyQkyM6GouwmbqmlBSAMaBHDyAkRGwsFsAYM73GlDSZz+6Tpry8PIQ084bmt+VeZfXckJAQzJ8/H0uXLsU333yDG264Ae+99x4mTpyIurq6Vh/7+uuvw9fX13SRfkxY8qSJz+Kqra0Vu5WDA0xzpk1628nDA+A9c6KTCguxmaRp+3blp6S9TJmZmabeXlrU0nw2lTQZjUZUV1ebdeHTwquqqpptqfJFuq7WY/D666/jH//4B2bMmIHbb78dy5Ytw6uvvort27dj5cqVrT52wYIFKCkpMV1yJG35mSQmKusF5eQo2zlIRqPRYPjw4QCA7fyEKcLRo8rQgKdnw/pYkuFfiMkS1sZZnORDdDxp2rlzJ4xGo7hA+OsradLUeFFL6u01n00lTVu2bIG7u7tZl+PHjwNQ1vZorm6purra9P9t9cQTT0Cr1WL9+vWt3s/V1RU+Pj5NLlLz8gJiYpTrkvY22UQrl7+2gwcDEu7HVl5ebprmzJNU0gaSJ01xcXFwd3dHUVGRWeUVFmEwNAyRS/oepXqm9rGpM3JUVBSWLl1q1n358FtISAjymtnag98WGhra5jjc3d0REBCAoqKiNj9WekOHKmvE7NwJ3Hqr6GhUx7/Ed+zYAcYYNBqN9YPgvVySnqxTUlJgMBgQERGBsLAw0eHYH/4ll56ubLPj4SE0HLU5OzsjMTERW7ZswY4dO8SsmXfoEFBW1nQ3BMlQ0tQ+NpU0BQcHY86cOW16TFxcHLZu3Qqj0dikGDwlJQUeHh7o06dPm+MoKyvDxYsXERQU1ObHSm/YMOD996XtaYqPj4ebmxsKCwuRkZGBvn37Wj8IyZMmPvRJvUztFBEBhIYCubnAnj3AyJGiI1LdsGHDTEnTvffea/0AeC/ekCFK4b1kysvLceDAAQCUNLWVTQ3Ptcf06dNx4cIF/PDDD6bbLl68iO+++w5Tp05tMlabmZmJzMxM07+rq6tRVlZ2xTFfeeUVMMYwceJEywZvj/gHbN8+ZWViybi4uCAxMRGAoLqm/HzgxAnluqQnM0qaOogvcglIO0THv8iFDZNLXs+UmpoKo9FoWuKBmM+mepraY/r06RgyZAjuueceHDlyxLQiuMFgwMsvv9zkvuPGjQMAZGVlAQDOnz+P+Ph43HHHHaYu4LVr12LNmjWYOHEipk2bZtXfxS5ERgJBQUBBgZI4SfjFPnz4cGzdulVMK5efrAcMADp1su5zW4HBYDANC1DS1AFDhwIrV0rb48uTpqNHj6KoqAj+1l7gVfKZc7TUQPvZfU+TTqfDmjVrcNttt+Gdd94xbaWyYcOGqw6t+Pn5YcqUKVi3bh0WLFiA+fPnIzs7G6+99hp++eWXK9Z+ImjayhU5w8yCeDG4kJ4myYfmDh06hLKyMnh7e2OgpBugWkXjniZb2WBaRUFBQaYtsFL48hvWkpcHnD6tnOskXfKDn9uuueYawZHYH7vvaQKATp064ZNPPrli/7jL8R4mzs/PD19++aUFI5PU8OHAzz8rX/B/+5voaFTHW1/Hjh1DYWEhAgICrPfk27YpPyVNmvjJesiQIdBJWCtiNfHxgKsrcPEicPKkss2RZIYNG4YTJ05gx44dmDRpkvWemPfeDRwISDgj2mAwmHqaKGlqO+pKIW3HP2jbtknZyg0MDDT1Ulp1S5WqKmVDZED6pImG5jrI1RUYNEi5Lmldk7DlPyQfmjt8+DBKSkrg5eWFGL6EDDEbJU2k7RISADc3pZUrah0VC2u89IDV7NkD1NYCwcFK7ZiEKGlSEf9Sl7SuiSdNKSkpV92dQVX8My/pe5R/BocOHQonCdeBszRKmkjbuboCSUnKdT6cJBkhdU2N65lErA9lYefOnUN2dja0Wi2tBK4GyWsL+/fvDx8fH1RUVOAg3zjX0qqrG3p7Je1p2lZ/zqaGS/tQ0kTap/EQnYT4CWX37t2ora21zpNKXgTOE9CYmBh4e3sLjkYCfObT4cNAcbHQUCxBq9ViSH0httV6fHlvb5cuyka9EuJJE9UztQ8lTaR9+AdO0lZunz594O/vj+rqaqSlpVn+CY1GhxkWoBauSoKDgV69lLpCyeuarNbj2/gzKGFvb05ODs6cOQOdTke9ve1ESRNpn6FDlZPKiRPAhQuio1GdVqu1biHq8eNAURHg7q7MjJIQJU0WMGKE8nPrVrFxWAjvDdm6datpk3aLknxRS/4ZjIuLg5eXl+Bo7BMlTaR9/Pwa9mSStLfJqq1c/hxJSYCzs+Wfz8oqKiqQnp4OgJImVfEeX0mTpiFDhsDJyQlnz55Fdna2ZZ+scY+dpO9RGprrOEqaSPs5SF0T37zXoiSvZ9q9ezcMBgPCwsIQEREhOhx58J6m1FSliFkynp6eSEhIAKD0NlnUyZPKTgeurtL39lLS1H6UNJH2kzxpGjx4MJycnJCbm2v5Vq7kSRMNzVlIr15K0XJNjZI4SWhEfWJo8aSJn8cGD1YSJ8mUlJSYNumlz2H7UdJE2o8nTfv2ARUVYmOxAA8PD1Mrd5slE0PapJe0l0YjfV2T1ZKmLVuUnyNHWvZ5BNm1axeMRiMiIyMREhIiOhy7RUkTab+ICCA8HDAYAGvvD2UlI+tPoBY9YUu+Sa/RaKRNei1J8romPpR07NgxFBQUWO6JJE+aaGhOHZQ0kY6RfOkBnjRt4SdUS5B8aI5v2+Dp6UnbNlgC72nasUNpwEgmICAA/fv3B2DBHt9z54BTpwCtVtqZc7SopTooaSIdI3ld0zXXXAONRoNjx44hPz/fMk8i+Sa9POGkbRssJDYW8PYGSksBa62cbWUWH6Ljx42Lk3KT3traWuzatQsA9TR1FCVNpGP4F/2OHYA194eykk6dOmHgwIEALHTCrqxs2LZB0pPZ5s2bAQCjRo0SHImkdLqG3hFJh+gsnjRJPjSXnp6Oqqoq+Pv7IyoqSnQ4do2SJtIx0dFKy6y8XNpWrkWH6HbuVLZtCAuTctsGxpjpdRsp6ReSTZC8roknTWlpaSgvL1f/CSRPmhoPzWm19LXfEfTqkY5p3MqVdIjOoklTfS8MRo2SctuGjIwMXLhwAa6urkjimzwT9fG6pm3blEUaJRMREYGIiAgYDAbTpALVFBYq+/cB0vb2Uj2TeihpIh0neV0Tb+Xu378fxWpvjMqTJklbuDzRTE5Ohpubm+BoJMZXks/LUwqaJWSxITp+3urXDwgKUvfYNoAxRkmTiihpIh3XeGhAwlZucHAw+vTpA8aYuluqVFc3LNUgab0P1TNZibs7kJioXJd8iE71pEnyoTk+icXNzQ2J/D1C2o2SJtJxSUmAi4vSyj15UnQ0FmGRIbrduwG9XlnRuU8f9Y5rIxhjpqSJ6pmswEHqmnbt2oWamhr1Dix50sQ/g8OGDYOrhCudWxslTaTj3N2BIUOU65s2CQ3FUiySNElez5SVlYWzZ8/CyckJQyVd6dymNK5rklC/fv0QEBCA6upq7OUzTjuqrAxIS1Ou89dPMpvqz8nU26sOSpqIOkaPVn5KnjTt2bMHFWptGdM4aZIQTzAHDx4MT09PwdE4gOHDleQ7IwO4cEF0NKrTaDSmNYZUW+Ry505lQdDu3ZXdDSTDGDMlTaP5OZp0CCVNRB2NkyYJ65q6deuG8PBw1NXVmRaJ65CamobtUyRNmqieyco6dVKWAAGk7W1Sva5J8qE5PnvVzc2NZq+qhJImoo4hQ5S6ptxcIDNTdDQWoeoQ3d69QFUVEBgI1G8RIRtan0kAPsTEezElw5Ombdu2wWg0dvyAkidNvJdpyJAhNHtVJZQ0EXVQXVPb8C+1ESOkrGc6d+4cMjMzodVqadsGa+I9vhs3Cg3DUuLj4+Hp6YlLly7h0KFDHTtYdbUyGQOQPmmioTn1UNJE1OMgdU27du2CXq/v2MEcpJ4pPj4ePhLu5WWz+Gfw0CGgoEBoKJbg7OxsSsI3djQxTE1tmL3aq5cK0dmWxrNXKWlSDyVNRD2S1zX17dsXQUFBqK6uxp49e9p/oLq6hpoTSZMmWmpAkKAgoH6vRFkbL2PGjAEAbNiwoWMHajw0J2Fv74kTJ5CXlwdXV1ckJyeLDkcalDQR9fC6pnPnpKxr0mg06gzRpaUpe/X5+TV8wUmGvz5UBC5AfVIh6xDd2LFjASiJucFgaP+BqJ6JtAMlTUQ9DlTXtLkjhbb8ZD1ihLJ3n2Ty8/Nx9OhRAKB6JhF40tTRnhgbxYd8S0pKkMbXWGqrmpqG3l5JkyYamrMMSpqIunjPgqRJE+852bZtW/tXJZa8nolPBx84cCACAgIER+OA+GKpx48rs1kl4+TkZPoctruuKSUFqKxUhjP5Mg0SofWZLIeSJqIuyeuaBg4ciMDAQFRUVGA3n3nTFgZDwzYXkiZNVM8kWKdOQHy8cl3Sxgsfomt3XRN/3NixgFa+r8GTJ08iNzcXLi4uVM+kMvneLUQsyeuatFqt6YT9559/tv0ABw8CxcWAtzcQF6dqbLaCtm2wAZLXNfFi8K1bt6K2trbtB+Cf3frPsmx4w2XIkCFwd3cXHI1cKGki6vLwAHjLRtJW7rhx4wC0M2nir8nw4YCTk3pB2Yjz58/j4MGDABq+2IgAPBmQNGniQ78VFRVITU1t24MrKgC+qn/9Z1k2NDRnOZQ0EfVJvl4TT5p27drV9n3o1q/nB1E5KtvAh0vi4+MRGBgoOBoHxicZZGYCZ86IjkZ1Wq3WlBC0eYhu2zagthbo1g2IjFQ/OMEa1zNRb6/6KGki6pO8rikyMhLdunVDbW1t2/bAqq1tSCSvvdYisYm2vj4pHCdpUmg3vL2BwYOV65L2NvFh8jYXgzcempNwfabMzEycO3cOLi4uGMJnMxPVUNJE1Cd5XZNGo2nfEF1KijI00HgBQokwxkxJ0/jx4wVHQxylrmn79u2orq42/4H8MytpYs97mZKTk+Hh4SE2GAlR0kTU17iuSdITdruSpsZDc5LO2MnJyYGLiwutz2QLGtc1SdjjGxUVheDgYOj1euziNUpXU1SkLC4LSFsEzhsuVFNoGfKduYlt4K24devExmEhfGggPT0dhYWF5j2IvxaS9sLwk/WwYcPg6ekpOBqC4cMBZ2elpunUKdHRqE6j0bR9SxVeMtCvHxASYrngBDEajabP4YQJEwRHIye7T5ry8vLwzDPPYMyYMfD29oZGozF1T5rr3LlzmDFjBvz8/ODj44Np06bhlIQnGaviNTt//qmsTSSZ4OBgDBgwAIwx82oqSkuV4TlA+qSJhuZshAP0+La5rknyobm0tDQUFhbC29sbSUlJosORkt0nTcePH8cbb7yBc+fOYWA76kTKy8sxZswYbN68Gc8++yxefvllpKWlYdSoUeb3IJArJSUBPj5Kd/i+faKjsYg2DdFt3qwkj716KbN2JGMwGEytfSoCtyGSLz3Ae5rMnsnKe6QkfY/+8ccfAJRk0tnZWXA0crL7pGnQoEEoLCxERkYGnnzyyTY//r333sOJEyewevVqzJ8/H0888QT++OMP5OXl4Z///KcFInYQTk4NJ6b6D7Js2pQ08XomSWfNpaWlobi4GD4+PhjMZ20R8RoXg0tY1xQZGYmIiAjU1dVhG99LriXnzgHHjin1hJJOxV9XXwJwraTnGVtg90mTt7c3/P392/34lStXIjExEYmJiabboqKiMG7cOKxYsUKNEB0XH1OXNGkaNWoUtFotTpw4gZycnNbv7CD1TGPGjIGThIt22q0hQwA3NyAvD6jfRFkmGo3G/C1V+P8nJChbzUimoqLClDhSPZPl2H3S1BFGoxEHDhxotmWclJSEzMxMlJWVtfh4vV6P0tLSJhfSCP/g7tgBtPI62itfX19Tst1qb9O5c8oXlkbT0PKXDNUz2Sg3t4ZeFUkbL2ZvayT50NyWLVtQW1uLbt26oVevXqLDkZZDJ01FRUXQ6/UIaWYWBb8tt5Vdwl9//XX4+vqaLuHh4RaL1S5FRgI9ewJ1ddKvDt7qCZv/3+DBUrZwq6qqTC1cqmeyQXyoRtKkiSfq+/btQ0FBQfN3Ykz6/eZ4PdOECROgkXDRTlthU0mT0WhEdXW1WRemwvh8VVUVAMDV1fWK/3Nzc2tyn+YsWLAAJSUlpstVh2gcET9hS770wJ9//tnye5LXM0naC7N9+3bo9XqEhoYiKipKdDjkcrzHd9MmQK8XGoolhISEICYmpsniqlc4eRLIyVEW3ZV0DTGqZ7IOm0qatmzZAnd3d7Mux48f7/Dz8d2f9c2cSPgKs63tEO3q6gofH58mF3IZyeuahg0bBldXV+Tl5eHYsWNX3oEx6YvAGw/NUQvXBkVHA8HBQFWVMlQuIV7D80dL5xn+GRw6VFmKQTK5ubk4fPhwk90KiGXYVMVmVFQUli5datZ9mxtSayt/f3/TF97l+G2hoaEdfh6HNmaMsnHo8eNAdrZ00+3d3d1xzTXX4M8//8S6devQr1+/pnc4ckQpwnV3V07YEuJDk1TPZKM0GqXx8sUXSuNFwrq6CRMm4K233sIff/wBxtiVyfvvv/M7Wj84K+C9TIMHD+7QxChydTaVNAUHB2POnDlWez6tVouBAwdiz549V/xfSkoKIiMj4e3tbbV4pOTnpyywt2OHMkR3332iI1Ldddddhz///BO///475s2b1/Q/eQt3xAilKFcyRUVF2Lt3LwCqZ7JpjZOm118XHY3qRowYATc3N+Tm5uLIkSMYMGBAw3/W1DQUgU+cKCZAC+NJE82aszybGp6ztDNnzlwxhDJ9+nSkpqY2SZyOHz+ODRs24NZbb7V2iHKSfIhu0qRJAJSNMq+ogZO8nonXcvXr1496ZW0Zf//t2we0VCxtx9zc3DCqfpbg2rVrm/7njh1AeTnQuTMQF2f94CzMaDRSPZMVSZE0LVq0CIsWLcL3338PAPjyyy9NtzV29913XzF88vDDD6Nnz56YPHkyFi9ejLfffhvXXnstunTpgqeeespqv4PUeNK0fr2UW6oMGDAAXbt2RVVVFbZu3drwHzU1DbMGJU2afvvtNwDARElb8NLo0gWIjVWuS9p4abGuiQ/NXXedlBtlHzx4EPn5+fD09MRQSUsAbAqTAIAWL42NGjXqitsYYywnJ4dNnz6d+fj4MC8vLzZlyhR24sSJNsdRUlLCALCSkpJ2/y5Sqq1lzNeXMYCxlBTR0VjE3LlzGQD2xBNPNNz455/K79ylC2MGg7jgLMRoNLKQkBAGgP3xxx+iwyFX88wzyvtx5kzRkVjEwYMHGQDm5ubGKioqGv4jLk75vb/+WlxwFvTmm28yAGzy5MmiQ7Fr5n5/S5F2M8ZavDS2adOmZqeFh4WF4bvvvkNJSQnKysqwatUqWhxMTU5ODWujSLr0AO9p+Z23agFgzRrl56RJUrZw9+/fj7y8PHh4eGDkyJGiwyFXUz+MjLVrpe3xDQ8PR3V1dcOm7Xl5QHq6Ugwv6dAVDc1Zl3xncmKbJK9rGj9+PHQ6HY4ePYrs7GzlRp40XX+9uMAsiA/NjRs3rtm1zoiNGToU8PUFLl4Empn8Yu80Gg2ur/+sreGfPX6+GTQICAoSFJnlVFZWYsuWLQAoabIWSpqIdTTeUqWkRGwsFuDn52eqJ1i7di2QlaVsnaLTSdvC5UkTL4QnNs7ZueFzyJMKyfCk6ddff1VGFerfo7juOoFRWc6GDRug1+vRrVu3K5c7IRZBSROxjshIICpK2VJF0t4mPkT366+/Npyshw1Tll2QTHFxMXbUL5RISZMd4X8r/v6UzNixY+Hi4oKsrCwcP3RIGYoEgMmTxQZmIatXrwYATJ48mRaWtRJKmoj18BNX/QddNpPrf7/169fDwH9HSYfm1q9fD4PBgKioKHTv3l10OMRcfJZjaiqQny82Fgvw8vLC6NGjAQAHPvgAKC4GAgOBpCShcVkCY0xpoAGYMmWK4GgcByVNxHr4B3vNGikLUWNjYxEWFgZDZWXD5qCSJk38ZE29THYmJASIj1euN560IBE+RMcaN1x0OoERWcbBgwdx9uxZuLu7mxJFYnmUNBHrGT68oRA1NVV0NKrTaDSYMmUKRgPQ6fVA167AwIGiw1KdwWAwJU1Tp04VHA1pM57I1/8NZcOTptgzZ5QbJO2F4UNz48aNa3WPVKIuSpqI9Tg7NwwPSDpEN2XKFNxQf51NmaJMdZZMSkoKCgoK4Ofnh2sk3TFeajfUv0N/+w1oZrNye9e7d2+M69YNUQCMOp20+83xhstkSeu1bBUlTcS6eKtP0qRp7JgxmFZ//bSEvUwA8MsvvwBQhuacnZ0FR0PabPBgIDgYKCsDNm8WHY1FPBoZCQA4GhSk9G5LprCwELt27QJASZO1UdJErGviRKX3Zf9+ICdHdDSqcz96FF0BlANYIeEeX0BD0nQD77Eg9kWrBfiwav3fUjajysoAAN+UlKCurk5wNOr79ddfYTQaERMTg/DwcNHhOBRKmoh1BQYq0/AB4OefxcZiCfVfQr8D+EnCQtuTJ0/i6NGjcHJyov3m7BlPeH/5BWhmlwS7VlwMv/R0AMCKqips27ZNbDwW8NNPPwEAbrzxRqFxOCJKmoj18Q/6jz8KDcMi6pOmX6DU/uTm5oqNR2WrVq0CAIwcORJ+Eq4/5TDGjQM8PJTe3voEQxq//gpNXR3O+friBICfJWucVVZWmrZruummmwRH43goaSLWxz/omzcDhYViY1HTmTPKF5BWi/xBgwA0tAhlQUNzknB3byiQlm2Irr4xVla/Ev/PP//c7J6j9mrdunWoqqpCt27dEBsbKzoch0NJE7G+nj2VqfgGg1wF4bxFO3w4xt12GwDgR4l60woKCkz7XFHSJAH+N5Qpsa+qMq123u2vf4WrqytOnz6Nw4cPCw5MPY2H5mgVcOujpImIwXubJEoq8P33ys8bbzR1m2/cuBFFRUUCg1LPTz/9BKPRiISEBPTo0UN0OKSjpk5VFn1MTwdOnRIdjTrWrQMqK4GICLgPH47x48cDkKfxUldXZxoip6E5MShpImLwD/wffygnOXt34QJQ3wuD6dPRq1cvDBw4EAaDwbQInb37vj4pvOWWWwRHQlQRGAjwlaR5wm/veHJ0442ARmN6r65cuVJcTCratm0bCgsLERAQgOHDh4sOxyFR0kTEiI0FundXutNlmGX244/KLKSkJCAiAgBw8803AwB++OEHkZGp4tKlS/izfmsYSpokwv+WMiQVdXVAfS8Mb5RNmzYNTk5OOHDgADIyMgQGpw5+Lpk6dSqcnJwER+OYKGkiYmg0Db1NMpyw+e/QKKHg3edr165FeXm5iKhUs2rVKtTV1WHAgAHo27ev6HCIWm66Sfks7t4NZGeLjqZjNm1SJpYEBAD1K9X7+/tj3LhxAOy/t8lgMOC7774DANx6662Co3FclDQRcWbMUH7+8ot9D9EVFCgnbKBJ0hQTE4OePXuiurratOWBveJDc9OnTxccCVFVcDAwYoRy3d57RJcvV37ecgvQqBeGJxj2njRt27YN58+fh5+fn6lWi1gfJU1EnORkoFs3oKICWLNGdDTt9/PPykzA+HhlZmA9jUaD22+/HQDw7bffioquw8rKyrB27VoANDQnJZ4I23NdU21tQ9JXP3OVmzZtGnQ6HdLS0pCZmSkgOHWsWLECgNKD7eLiIjgax0VJExFHo2nobeKtRHtU32WOZnphbqs/ga9ZswYlJSXWjEo1P/30E/R6Pfr27Yvo6GjR4RC11dfeYft2+93aaP16oKgI6NIFGDWqyX8FBgZi7NixAGAa3rI3dXV1pp6y2y5LCol1UdJExOIngF9/Beyx7ufCBeWEDTQkgI1ER0ejf//+qKmpsduFLv/3v/8BAO68805aF0ZGXbs2DNHZa48ob3TdequyjMJl+LDycjttnG3ZsgX5+fnw9/c3JYBEDEqaiFgJCcqQVlVVw8wXe7J8OWA0KrPmevW64r/tfYguPz8f69atAwDccccdgqMhFjNzpvKzPkG2K9XVDUsNtNALc8stt8DZ2Rnp6el2udAlH5q7+eab4ezsLDgax0ZJExFLo2k40dlhUoGvv1Z+8i+dZvDu9HXr1uHixYvWiEo13333HQwGAwYPHozevXuLDodYyvTpgLOzstDlkSOio2mbtWuB0lKlx4xvBn6ZgIAAXH/99QCAr/ln1k7o9XrTsCINzYlHSRMR7847lZ9r1gD2lFScPKlM1dbpWmzhAkCfPn2QkJDQZMqwvfjmm28AKENzRGIBAcDEicp1O0sq8OWXys/bbgO0LX+lzZo1C4CSNBmNRmtEpoo1a9agqKgIoaGhGDNmjOhwHB4lTUS8AQOUYbq6OqD+S9ou8KGM8eOVAtRWzKzvifr8888tHZVqsrKysH37dmg0GmrhOoLGQ3T2ssFtUVHDsP7s2a3edcqUKfDx8cGZM2ewfft2KwSnji+++AKAcg7RNVOvRayLkiZiG/gJr/4EYfMYa0iazOiF4Se8lJQUHDt2zMLBqeOrr74CAIwePRqhoaGCoyEWN3Uq4OUFZGUBO3aIjsY8334L1NQAcXFATEyrd3VzczMVhPP3tq0rLCw0rfF29913C46GAJQ0EVtxxx3KgnR79thHTcWuXcDx44CHR8PK5q3o0qWLqabCHnqbjEYjli5dCgC45557BEdDrMLDo2Fx1vq/vc3jnyUzEwo+RLdixQpUV1dbKirVLF++HLW1tYiPj6flPmwEJU3ENgQFAfVJhV30Nn3yifLz1lsBb2+zHjK7vjftiy++gMFgsFRkqtiyZQtOnToFb29vWtDSkdx7r/Jz+XLbXwLk2LGGmkIza+5GjRqFiIgIFBcX28WekF/W12vdddddgiMhHCVNxHbwIbovv1Tqm2xVWVnDujD33Wf2w6ZMmQJ/f3/k5uaaNr+1VZ999hkAZZkBDw8PwdEQqxkxAujdW0mY6qe52yzeyzRp0lVrCjmtVot76xPDT3jDx0YdOXIEu3btgk6no+U+bAglTcR2TJ4MBAYCubnAb7+JjqZlK1YoW7/07QsMH272w1xdXU2z0D799FNLRddhJSUlptWH+RcMcRAaTUNvkw2/R1Fb2zCEeJUC8Mvde++90Gg02LhxI06ePGmB4NTx4YcfAgCmTp2K4OBgwdEQjpImYjtcXQFeP/PBB2JjaQ3/Mrn3XuVLpg3mzp0LAPjxxx9x4cIFtSNTxbfffouqqir0798fSUlJosMh1jZ7tjLktWMHcPSo6Gia9/PPymr8wcHAtGltemh4eDgm1i+vYKuNl6qqKtOsuQceeEBwNKQxSpqIbbn/fuXnb78ps3hszeHDwM6dStF6O2azxMXFYejQoaitrbXJEzZjDB/UJ6y8RU4cTEhIQ32hrQ5hvf++8nPuXGVRzja6r35YfdmyZaitrVUzMlWsWLECxcXF6N69OyZMmCA6HNIIJU3EtvTqBVx7rTKl/+OPRUdzpXffVX7ecIPSym2Hhx56CADwwQcf2FxB+Pbt25Geng53d3eaNefIeOPls8+UoWhbcvw4sGGD0sv7l7+06xBTp05F586dcf78efzyyy8qB9hxfGjuL3/5C7StLNhJrI/+GsT2PPig8vPTT5U1WGzFpUsNM/vmzWv3YW699VYEBAQgJyfHtAaLrXi3PimcOXMm/P39BUdDhLn+emVPyOJi21sh/KOPlJ+TJwPdurXrEM7OzvhLfcK1ZMkStSJTxYEDB7Bz5044OTlRTaENoqSJ2J6pU5UhggsXgPqCZJvw2WdAZSUwcCAwcmS7D+Pm5mY6Gb733ntqRddhubm5+P777wEAjz76qOBoiFBaLcDfA++8YzsrhFdUNBSA88ZVOz388MNwcnLC1q1bsW/fPhWCU8fbb78NALjpppuoANwGUdJEbI+zM/Dww8r1t96yjRO2wdAwNDdvXpsLwC/34IMPQqPRYO3atTaz6/qHH36Iuro6jBgxArGxsaLDIaLdcw/g6anU8W3cKDoaxWefKT2+PXs27JXXTqGhobj11lsB2E5v0/nz500bCj/55JOCoyHNoaSJ2KaHHgLc3YG0NNs4Ya9erRSm+/ubvZBeayIjI3HzzTcDAN56660OH6+jqqqqTAXg1MtEAAC+vg3T+W0hqairA/79b+X6U08pM/w66PHHHwegzBg9f/58h4/XUe+++y5qamowbNgwDBkyRHQ4pBl2nzTl5eXhmWeewZgxY+Dt7Q2NRoNNmzaZ/fiXXnoJGo3mioubm5vlgiZXFxDQsF7M4sViY2EM+Mc/lOt/+Yuy3YQK/v73vwNQdl0/d+6cKsdsr08//RT5+fno3r07bjJjWxjiIB57TPn5yy9Kj5NIP/4InD6tnBvauDZTS5KSkjBkyBDU1NTgv//9ryrHbK/Kykq8Xz8rkHqZbJfdJ03Hjx/HG2+8gXPnzmHgwIHtPs7777+PL7/80nRZai97L8nsiSeUYbDffwcOHRIXx8aNyl5zbm5AfctUDcnJyRg5ciRqa2uFDg/U1NTgzTffBADMnz8fzu2Ywk0kFRUF1PeI4vXXxcXBWEPj6ZFHVGu4AMBTTz0FAPjPf/6D4uJi1Y7bVsuWLUNRUREiIyNx4403CouDXAWzc6WlpaywsJAxxth3333HALCNGzea/fgXX3yRAWAFBQUdjqWkpIQBYCUlJR0+Fql3yy2MAYzdcYe4GMaMUWJ49FHVD7169WoGgHl7e7NLly6pfnxzLF26lAFgXbp0YVVVVUJiIDZszx7l/a/VMnbihJgY1q9XYnBzY+zCBVUPbTAYWP/+/RkAtnDhQlWPba7q6moWFhbGALD//Oc/QmJwdOZ+f9t9T5O3t7cqU6MZYygtLQWzhaJj0uDZZ5Wf334rprdpxw6lp8nZGagfTlPTpEmTEB0djbKyMvzzn/9U/fhXYzAY8I/6occnn3yShqXJlQYNUvZ3MxqBN96w/vMzBjz/vHL9L38BOndW9fBarRYvvPACAGXmWllZmarHN8fHH3+Ms2fPomvXrqaFN4ltsvukSS2RkZHw9fWFt7c3Zs2aZbNbXDichATglluUE+eLL1r/+RcuVH7Ong1ERKh+eK1Wi4X1z/Hvf//b6u+7r776CsePH4efnx8e7OAUbiIxnrR8/rlSV2RNv/6qDI+7uzc0olR26623om/fvigqKrJ6bVNVVRVee+01AMBzzz1HDRcb5/BJU6dOnfDoo4/iww8/xMqVK3Hfffdh+fLlGDFiBEpLS1t9rF6vR2lpaZMLsYCFC5Xaph9+APbutd7zrlsHrF2r9DI984zFnubGG29EYmIiKioq8LoV60aqqqpMLewFCxbAx8fHas9N7MywYcD48cpGuRZKXJplNAL171E89li7V+G/Gp1Oh+eeew4A8Oabb6KoqMgiz9OcDz/8EHl5eYiIiKDFLO2BVQYLzWQwGFhVVZVZF6PReMXj21PT1Jyvv/6aAWCvv/56q/fj9VCXX6imyQJmzVJqGq69lrFm/vaqq6tjbOBA5Tkff9ziT7du3ToGgLm4uLCsrCyLPx9jjL3xxhsMAAsPD2eVlZVWeU5ix9LSGNNolM9ESop1nvPbb5Xn8/Zm7OJFiz5VXV0di4mJYQDYvHnzLPpcXGFhIQsICGAA2Mcff2yV5yTNM7emyaaSpo0bNzabhDR3OXr06BWPVytpYoyx4OBgNm7cuFbvU11dzUpKSkyXnJwcSpos5eRJxlxclBPojz9a/vk+/VR5Lj8/i5+sGWPMaDSyMWPGMABsxowZFn++ixcvMl9fXwaAff755xZ/PiKJOXOUz8WIEZZvvJSVMRYWpjzfyy9b9rnqrV+/ngFgTk5O7NixYxZ/vkceeYQBYP3792c1NTUWfz7SMnOTJierdGeZKSoqyuyp/iEhIRaNJTw8/KpdtK6urnB1dbVoHKRez57A3/4GvPYa8Ne/AhMmqDrtuImSkoYajuefV9aFsTCNRoN//etfGDRoEFasWIG5c+dadHfzBQsWoKSkBLGxsZg5c6bFnodI5pVXgOXLga1bge++A2bMsNxzLVoEnD0L9OhhkUkYzRk3bhymTp2KVatW4amnnsLq1ast9lwHDhwwrcv0n//8h5b6sBdWSuKsQq2eJqPRyIKCgtiECRPa9DhacsDCyssZi4hQWp7PP2+557n/fuU5evZkrLracs/TjMcff5wBYL169bLY9P/GPbqbN2+2yHMQib30kvL56NzZcr2wR44w5uSkPM+qVZZ5jhYcO3aMOTk5MQBs+fLlFnkOo9HIRo4cyQCw6dOnW+Q5SNs4zJIDbXHmzBkcO3asyW0FBQVX3O/9999HQUEBJnZwbyOiMk9PoH4zS7z5JrB/v/rP8eefDbuof/opYOWexIULFyI0NBQnT57Eq6++qvrxq6qqTLu7P/jggxjZgY2HiYN65hmgf38gP1/VxV5NDAbg/vuVbVOmTgWmTFH/OVrRt29fLFiwAICyqa8lZrS+++672LJlC9zd3YUsNUI6wEpJnEW98sor7JVXXmG33347A8Duvfde022NjRo1il3+K7u7u7M5c+awf/7zn+y///0vu+OOO5hGo2FxcXGsoqKiTXFQT5MVGI2M3XCD0gLt14+xNv6NWlVWxliPHsqxH35YveO2Ee8x1Wq1qvcEPfnkkwwACw0NZcXFxaoemziQXbuUxS4Bxn75Rd1jv/xyQ/H3qVPqHttMer3eVBR+8803NzvxqL0OHTrEXF1daSFLG2OXheDthVYKxhtrLmm67777WP/+/Zm3tzdzdnZmvXr1Yk8//TQrLS1tcxyUNFlJQQFjISHKifWBB9Q5ptHI2PTpyjG7dWOsHX9/Nc2ePduU3KixWj1jjK1cudL0ufj5559VOSZxYE89pXxe/P3VS262bWtIxr76Sp1jtlNaWpppmO6DDz5Q5ZjV1dUsNjaWAWCTJk1SNRkjHeNQSZOtoKTJitavb5j+vHRpx4/36qvKsZydGdu+vePH66CysjIWFRXFALCJEyey2traDh3vyJEjzMvLiwFgTz31lEpREodWVcVYYqLyuYmJUWoOO+LsWcbCw5Xj3XWXOjF20Ouvv26aTdfRWlmDwcBmzpzJALDAwECWl5enTpBEFZQ0CUBJk5U995xygtXpOlYs+sMPDQnYRx+pF18H7d+/n7m5uTEA7K677mIGg6Fdxzl79izr1asXA8BGjRrV4QSMEJOcHKUgHGDs5psZa++0+cJCxgYMUI7Tp4/wnl7OaDSayj78/f3ZiQ7svffMM8+YErC1a9eqGCVRAyVNAlDSZGUGA2N3362caN3dGWtPS/Cbb5SkC2DswQdVD7GjfvnlF6bT6RgA9sgjj7S5Oz8nJ8eUMEVERLDz589bKFLisLZuVXpoAcZuuqntM06LixkbOlR5fGgoY6dPWyTM9qqsrGSDBw82DZcfPHiwTY83Go2mHisAbKkaPeNEdZQ0CUBJkwA1NYxdf71ywnVyYszc2gOjkbH//Kehh2nWLMZstAfm66+/ZhqNhgFgN954o9kF3Pv27WORkZEMAOvevTs7bWNfRkQiq1Yx5uqqfJYmTDB/KYKjRxnr27dhIdk2JiTWkpubywYMGMAAsE6dOrFt27aZ9Ti9Xs/mzp1rSpgWLlxo4UhJe1HSJAAlTYJUVDB2223KiRdgbOZMxlrbiiQrS9mOhd//gQeUXisbtnTpUubi4mJaw2ndunUt9jrp9Xr22muvMWdnZwaA9ejRw2pbsxAHtm4dYx4eDWs4/fBDy6uGV1cz9t//Mubjo9w/LIyxvXutG28bFRYWsiFDhphmtv71r39tdcLQ1q1bTT1UWq2WLVmyhAq/bZi5398axhhTcQUDh1ZaWgpfX1+UlJTQ5qfWxhjwj38Azz2nXHd2BmbNAsaMAWJjgYoKIDtbWc3411+VjUfd3IBXXwWeeELZENjGpaam4pZbbkFOTg4AICkpCbNmzUJcXBx8fX1x9uxZbN68GUuXLjWtP3bTTTfho48+QmBgoMjQiaPYuxe4+27gyBHl39HRwOzZwODBysr6ubnAzp3A0qXAmTPKfUaMAFauBDp3Fhe3mSoqKvDggw/iq6++AgAEBgZi+vTpmDp1KoKCgqDRaLBr1y78+uuv+P333wEA3t7e+Pbbb3H99deLDJ1chdnf31ZJ4RwE9TTZgF27GBs3rqEXqaXLqFGMHT8uOto2u3jxIps3bx5zd3dvdamN0NBQtnTpUmrZEuurqmJswYKG4bqWLiEhjL37bvuLxwX6/fffTUPfLV20Wi37y1/+wnJzc0WHS8xAPU0CUE+TDdm8GfjlF6VVe/w44OurtHRHj1ZawgMHio6wQ/Lz8/Hxxx8jJSUF6enpqKqqQlhYGHr16oWZM2diypQpcHKyqa0liaO5dAlYsULpRTp7Frh4EfDzA4YMAUaOVHqC3d1FR9lutbW12LhxI1asWIHt27ejoqICer0eMTExGDt2LG6++Wb07dtXdJjETOZ+f1PSpCJKmgghhBD7Y+73t0PtPUcIIYQQ0l6UNBFCCCGEmIGSJkIIIYQQM1DSRAghhBBiBkqaCCGEEELMQEkTIYQQQogZKGkihBBCCDEDJU2EEEIIIWagpIkQQgghxAyUNBFCCCGEmIGSJkIIIYQQM1DSRAghhBBiBkqaCCGEEELMQEkTIYQQQogZnEQHIBPGGACgtLRUcCSEEEIIMRf/3ubf4y2hpElFZWVlAIDw8HDBkRBCCCGkrcrKyuDr69vi/2vY1dIqYjaj0Yjc3Fx4e3tDo9GodtzS0lKEh4cjJycHPj4+qh1XFvT6tI5en5bRa9M6en1aR69P6+zp9WGMoaysDKGhodBqW65cop4mFWm1WoSFhVns+D4+Pjb/xhOJXp/W0evTMnptWkevT+vo9Wmdvbw+rfUwcVQITgghhBBiBkqaCCGEEELMQEmTHXB1dcWLL74IV1dX0aHYJHp9WkevT8votWkdvT6to9endTK+PlQITgghhBBiBuppIoQQQggxAyVNhBBCCCFmoKSJEEIIIcQMlDQRQgghhJiBkiYbptfr8fTTTyM0NBTu7u5ITk7GunXrRIdlE1JTU/Hoo49iwIAB8PT0REREBGbMmIGMjAzRodmkV199FRqNBtHR0aJDsRn79u3DDTfcAH9/f3h4eCA6OhrvvPOO6LBswokTJ3D77bcjLCwMHh4eiIqKwsKFC1FZWSk6NKsqLy/Hiy++iIkTJ8Lf3x8ajQbLli1r9r5Hjx7FxIkT4eXlBX9/f9x1110oKCiwbsBWZs7rYzQasWzZMtxwww0IDw+Hp6cnoqOjsWjRIlRXV4sJvANo9pwNu+OOO7By5Uo8/vjj6N27N5YtW4bU1FRs3LgR11xzjejwhJo+fTq2b9+OW2+9FTExMTh//jzeffddlJeXY9euXZQcNHL27Fn07dsXGo0G3bt3x6FDh0SHJNwff/yBqVOnIj4+Hrfddhu8vLyQmZkJo9GIN998U3R4QuXk5CAmJga+vr548MEH4e/vj507d5q++H7++WfRIVpNVlYWevTogYiICERGRmLTpk1YunQp5syZ0+R+Z8+eRXx8PHx9fTFv3jyUl5fjrbfeQkREBHbv3g0XFxcxv4CFmfP6lJeXw9vbG0OGDMGUKVPQuXNn7Ny5E59//jlGjhyJDRs2qLrtmMUxYpNSUlIYALZ48WLTbVVVVaxnz55s6NChAiOzDdu3b2d6vb7JbRkZGczV1ZXNnDlTUFS26bbbbmNjx45lo0aNYgMGDBAdjnAlJSWsS5cu7KabbmIGg0F0ODbn1VdfZQDYoUOHmtx+9913MwCsqKhIUGTWV11dzfLy8hhjjKWmpjIAbOnSpVfc76GHHmLu7u4sOzvbdNu6desYAPbhhx9aK1yrM+f10ev1bPv27Vc89uWXX2YA2Lp166wRqmpoeM5GrVy5EjqdDvfff7/pNjc3N8ydOxc7d+5ETk6OwOjEGzZs2BWtt969e2PAgAE4evSooKhsz5YtW7By5Uq8/fbbokOxGf/73/9w4cIFvPrqq9BqtaioqIDRaBQdls0oLS0FAHTp0qXJ7SEhIdBqtdL2mjTH1dUVwcHBV73f999/jylTpiAiIsJ02/jx49GnTx+sWLHCkiEKZc7r4+LigmHDhl1x+0033QQAdne+pqTJRqWlpaFPnz5XbHKYlJQEAEhPTxcQlW1jjOHChQsIDAwUHYpNMBgMeOyxx3Dfffdh4MCBosOxGevXr4ePjw/OnTuHvn37wsvLCz4+PnjooYfsssZCbaNHjwYAzJ07F+np6cjJycHy5cvx/vvvY968efD09BQboI05d+4c8vPzMXjw4Cv+LykpCWlpaQKisn3nz58HALs7X1PSZKPy8vIQEhJyxe38ttzcXGuHZPO+/vprnDt3DrfddpvoUGzCBx98gOzsbLzyyiuiQ7EpJ06cQF1dHaZNm4brrrsO33//Pe6991588MEHuOeee0SHJ9zEiRPxyiuvYN26dYiPj0dERARuv/12PPbYY/j3v/8tOjybk5eXBwAtnq+Lioqg1+utHZbNe/PNN+Hj44NJkyaJDqVNnEQHQJpXVVXV7H49bm5upv8nDY4dO4ZHHnkEQ4cOxezZs0WHI1xhYSH+7//+Dy+88AKCgoJEh2NTysvLUVlZiQcffNA0W+7mm29GTU0NPvzwQyxcuBC9e/cWHKVY3bt3x8iRI3HLLbcgICAAv/76K1577TUEBwfj0UcfFR2eTeHn4qudr2Xaf62jXnvtNaxfvx7vvfce/Pz8RIfTJpQ02Sh3d/dmWyd8+MDd3d3aIdms8+fPY/LkyfD19TXVgjm6559/Hv7+/njsscdEh2Jz+GfnjjvuaHL7nXfeiQ8//BA7d+506KTp22+/xf3334+MjAyEhYUBUJJKo9GIp59+GnfccQcCAgIER2k7+PuJztfmWb58OZ5//nnMnTsXDz30kOhw2oyG52xUSEiIqdu3MX5baGiotUOySSUlJZg0aRKKi4vx+++/0+sCZfjpo48+wrx585Cbm4usrCxkZWWhuroatbW1yMrKQlFRkegwheHvkcsLnTt37gwAuHTpktVjsiXvvfce4uPjTQkTd8MNN6CyspJqdC7Dh+VaOl/7+/tTL1O9devW4e6778bkyZPxwQcfiA6nXShpslFxcXHIyMgwzWThUlJSTP/v6KqrqzF16lRkZGRg9erV6N+/v+iQbMK5c+dgNBoxb9489OjRw3RJSUlBRkYGevTogYULF4oOU5hBgwYBUF6nxnidoKMPZ164cAEGg+GK22trawEAdXV11g7JpnXt2hVBQUHYs2fPFf+3e/duOlfXS0lJwU033YTBgwdjxYoVcHKyz4EuSpps1PTp02EwGPDRRx+ZbtPr9Vi6dCmSk5MRHh4uMDrxDAYDbrvtNuzcuRPfffcdhg4dKjokmxEdHY0ff/zxisuAAQMQERGBH3/8EXPnzhUdpjAzZswAAHz66adNbv/kk0/g5ORkmj3mqPr06YO0tLQrVtf/5ptvoNVqERMTIygy23XLLbdg9erVTZaC+fPPP5GRkYFbb71VYGS24ejRo5g8eTK6d++O1atX2/VwJa0IbsNmzJiBH3/8EU888QR69eqFzz//HLt378aff/6JkSNHig5PqMcffxxLlizB1KlTTV+Cjc2aNUtAVLZt9OjRuHjxIq0IDmU6/WeffYYZM2Zg1KhR2LRpE7777jssWLAAr732mujwhNqyZQvGjh2LgIAAPProowgICMDq1avx22+/4b777sPHH38sOkSrevfdd1FcXIzc3Fy8//77uPnmmxEfHw8AeOyxx+Dr64ucnBzEx8fDz88Pf/3rX1FeXo7FixcjLCwMqampUg/PXe310Wq1GDBgAM6dO4fXXnsNXbt2bfL4nj172lejV/DimqQVVVVV7G9/+xsLDg5mrq6uLDExkf3++++iw7IJo0aNYgBavJAr0YrgDWpqathLL73EunXrxpydnVmvXr3Yv//9b9Fh2YyUlBQ2adIkFhwczJydnVmfPn3Yq6++ympra0WHZnXdunVr8Txz+vRp0/0OHTrEJkyYwDw8PJifnx+bOXMmO3/+vLjAreRqr8/p06dbPVfPnj1b9K/QJtTTRAghhBBiBqppIoQQQggxAyVNhBBCCCFmoKSJEEIIIcQMlDQRQgghhJiBkiZCCCGEEDNQ0kQIIYQQYgZKmgghhBBCzEBJEyGEEEKIGShpIoQQQggxAyVNhBBCCCFmoKSJEEKuYseOHXjppZdQXFwsOhRCiECUNBFCyFXs2LEDL7/8MiVNhDg4SpoIIYQQQsxASRMhhLTipZdewt///ncAQI8ePaDRaKDRaJCVlSU2MEKI1TmJDoAQQmzZzTffjIyMDHzzzTf497//jcDAQABAUFCQ4MgIIdZGSRMhhLQiJiYGCQkJ+Oabb3DjjTeie/fuokMihAhCw3OEEEIIIWagpIkQQgghxAyUNBFCCCGEmIGSJkIIuQqNRiM6BEKIDaCkiRBCrsLT0xMAaHFLQhwczZ4jhJCrGDRoEADgueeew+233w5nZ2dMnTrVlEwRQhyDhjHGRAdBCCG2btGiRfjggw+Ql5cHo9GI06dP0/IDhDgYSpoIIYQQQsxANU2EEEIIIWagpIkQQgghxAyUNBFCCCGEmIGSJkIIIYQQM1DSRAghhBBiBkqaCCGEEELMQEkTIYQQQogZKGkihBBCCDEDJU2EEEIIIWagpIkQQgghxAyUNBFCCCGEmIGSJkIIIYQQM/w/FSbo76SHKNcAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -127,7 +130,8 @@ "t = np.linspace(0, 4*np.pi, 200)\n", "x = np.sin(t) + C * np.cos(t)\n", "x_dot = np.cos(t) - C * np.sin(t)\n", - "\n", + "x_dot_dot = -np.sin(t) - C * np.cos(t)\n", + "x_dot_dot_dot = -np.cos(t) + C * np.sin(t)\n", "max_axis_idx = x.ndim - 1\n", "\n", "plt.plot(t, x, color = 'k', label = 'x(t)')\n", @@ -153,7 +157,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "id": "a0250493", "metadata": {}, "outputs": [ @@ -161,23 +165,23 @@ "name": "stdout", "output_type": "stream", "text": [ - "setting builder with \n", - "setting builder with \n", - "trig_token_params: VALUES = (0, 0)\n", - "OrderedDict([('power', (1, 1)), ('dim', (0, 0))])\n" + "setting builder with \n", + "setting builder with \n", + "trig_token_params: VALUES = (0, 0)\n" ] } ], "source": [ "bnd = 20\n", - "n_epochs = 50\n", + "n_epochs = 10\n", "popsize = 8\n", "\n", - "epde_search_obj = epde.EpdeSearch(multiobjective_mode = True, boundary = bnd, \n", - " dimensionality = max_axis_idx, coordinate_tensors = [t,])\n", + "epde_search_obj = epde.EpdeSearch(use_solver=True, multiobjective_mode = True, boundary = bnd, \n", + " dimensionality = max_axis_idx, coordinate_tensors = [t,], \n", + " verbose_params = {'show_iter_idx' : True}, device='cuda') # False for brevity\n", "\n", - "trig_tokens = epde.TrigonometricTokens(freq = (0.95, 1.05), dimensionality=max_axis_idx)\n", - "grid_tokens = epde.GridTokens(['x_0',], dimensionality = max_axis_idx)\n", + "trig_tokens = epde.TrigonometricTokens(freq = (0.999, 1.001), dimensionality=max_axis_idx)\n", + "grid_tokens = epde.GridTokens(['x_0',], dimensionality = max_axis_idx, max_power = 2)\n", "\n", "epde_search_obj.set_moeadd_params(population_size = popsize, training_epochs=n_epochs)" ] @@ -202,7 +206,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "id": "0cb9d921", "metadata": {}, "outputs": [ @@ -210,522 +214,3123 @@ "name": "stdout", "output_type": "stream", "text": [ - "Deriv orders after definition [[None], [0]]\n", + "Deriv orders after definition [[0]]\n", + "200\n", + "initial_shape (200,) derivs_tensor.shape (200, 1)\n", + "Size of linked labels is 2\n", "initial_shape (200,) derivs_tensor.shape (200, 1)\n", - "self.tokens is ['u', 'du/dx0']\n", - "Here, derivs order is {'u': [None], 'du/dx0': [0]}\n", - "The cardinality of defined token pool is [2 2 1]\n", - "Among them, the pool contains [2 1]\n", - "Creating new equation, sparsity value [0.00013173]\n", + "Training NN to represent data for 10000.0 epochs\n", + "min loss is 0.5400543063879013, in last epoch: [0.915598452091217, 0.4044734239578247], \n", + "The cardinality of defined token pool is [1 1 2 1]\n", + "Among them, the pool contains [1 1 1]\n", + "self.vars_demand_equation {'u'}\n", + "Creating new equation, sparsity value [0.01975544]\n", "New solution accepted, confirmed 1/8 solutions.\n", - "Creating new equation, sparsity value [0.00041759]\n", + "Creating new equation, sparsity value [0.07179427]\n", "New solution accepted, confirmed 2/8 solutions.\n", - "Creating new equation, sparsity value [0.00025425]\n", - "Creating new equation, sparsity value [0.00017177]\n", + "Creating new equation, sparsity value [0.32091706]\n", "New solution accepted, confirmed 3/8 solutions.\n", - "Creating new equation, sparsity value [2.06723287e-06]\n", + "Creating new equation, sparsity value [0.01018523]\n", "New solution accepted, confirmed 4/8 solutions.\n", - "Creating new equation, sparsity value [0.00086434]\n", + "Creating new equation, sparsity value [0.01619061]\n", "New solution accepted, confirmed 5/8 solutions.\n", - "Creating new equation, sparsity value [4.96260272e-05]\n", + "Creating new equation, sparsity value [0.02050472]\n", "New solution accepted, confirmed 6/8 solutions.\n", - "Creating new equation, sparsity value [3.79898092e-05]\n", + "Creating new equation, sparsity value [0.05710126]\n", "New solution accepted, confirmed 7/8 solutions.\n", - "Creating new equation, sparsity value [1.04795544e-05]\n", + "Creating new equation, sparsity value [0.01763084]\n", "New solution accepted, confirmed 8/8 solutions.\n", "best_obj 2\n", "Multiobjective optimization : 0-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.17602090401249287 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + -0.0010072008321658568 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -32.12186859655273 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0005379153129839, dim: 0.0} + 32.08082516437511 * u{power: 1.0} * cos{power: 1.0, freq: 1.0004010719364531, dim: 0.0} + -41.603907101826096 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019755437998256384}}\n", + "Step = 1000 loss = 0.017485.\n", + "Step = 2000 loss = 0.012757.\n", + "Step = 3000 loss = 0.009706.\n", + "Step = 4000 loss = 0.008226.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.7340916483480605, while loss addition is 0.008226213045418262\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9990721316770192, dim: 0.0} + 0.0 * u{power: 1.0} + 0.04532096104047007 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + 0.012210825568783591 * x_0{power: 2.0, dim: 0.0} + -0.15782559519670383 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.4724520702143913 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0717942739964566}}\n", + "Step = 1000 loss = 0.171465.\n", + "Step = 2000 loss = 0.069246.\n", + "Step = 3000 loss = 0.027097.\n", + "Step = 4000 loss = 0.009575.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.527029631006959, while loss addition is 0.009574532508850098\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0005065084577194, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.13119788265965474 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0009795938970176, dim: 0.0} + -0.019801237553983466 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.32091706162489664}}\n", + "Step = 1000 loss = 0.175007.\n", + "Step = 2000 loss = 0.076612.\n", + "Step = 3000 loss = 0.049392.\n", + "Step = 4000 loss = 0.032936.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.207617781712534, while loss addition is 0.0329357348382473\n", + "solving equation:\n", + "-0.07454463701533846 * u{power: 1.0} + 0.0012282283017115646 * x_0{power: 2.0, dim: 0.0} + -1.322105260909465 * u{power: 1.0} * sin{power: 1.0, freq: 1.0001669537505464, dim: 0.0} + 0.01281680256006059 * du/dx0{power: 1.0} + 0.0004122071784346405 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.5894912298872101 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.010185231599862609}}\n", + "Step = 1000 loss = 0.081030.\n", + "Step = 2000 loss = 0.071747.\n", + "Step = 3000 loss = 0.068017.\n", + "Step = 4000 loss = 0.065847.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.240416875096804, while loss addition is 0.06584709882736206\n", + "solving equation:\n", + "0.010250141085065315 * du/dx0{power: 1.0} + -0.07019029841657627 * u{power: 1.0} + 0.0003196309313673873 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0010705032388617641 * x_0{power: 2.0, dim: 0.0} + -0.7103312302020051 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0002326282943674, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.01619060556873039}}\n", + "Step = 1000 loss = 0.070828.\n", + "Step = 2000 loss = 0.046773.\n", + "Step = 3000 loss = 0.037850.\n", + "Step = 4000 loss = 0.033046.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 23.51355680966454, while loss addition is 0.03304643556475639\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.7744432540440178 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -2.06393056954344 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0008198530503918, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0007758176456265, dim: 0.0} + -0.03941800390138538 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.020504719365927394}}\n", + "Step = 1000 loss = 0.810500.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.14634543850368917, while loss addition is 0.8097173571586609\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.31640859856123005 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0006347583405688, dim: 0.0} + -0.12037243090915703 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -0.13474451501236487 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.26458266602232533 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05710125813632406}}\n", + "Step = 1000 loss = 0.095339.\n", + "Step = 2000 loss = 0.021955.\n", + "Step = 3000 loss = 0.010892.\n", + "Step = 4000 loss = 0.006736.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.240090768545118, while loss addition is 0.006735977251082659\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 2.60546768915308 * du/dx0{power: 1.0} + -0.1636312295567348 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.999051783089883, dim: 0.0} + -0.054048671646233386 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.06814151930571019 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.24343956867875985 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.017630844780571402}}\n", + "Step = 1000 loss = 0.071007.\n", + "Step = 2000 loss = 0.032380.\n", + "Step = 3000 loss = 0.019562.\n", + "Step = 4000 loss = 0.014047.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.6384443510437996, while loss addition is 0.01404670998454094\n", + "solving equation:\n", + "0.11855139044567758 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990489972929576, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992729962278806, dim: 0.0} + 0.19116097077533303 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0324776240219171}}\n", + "Step = 1000 loss = 0.562931.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 19.09995421773746, while loss addition is 0.5604084730148315\n", + "solving equation:\n", + "0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.30478085842133334}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9995434682122984, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0036408053760711217 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.12000070157128279 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.5728925116560653 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0007758176456265, dim: 0.0} + -0.040111367297900664 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02001922824058052}}\n", + "Step = 1000 loss = 0.565440.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.267426314307237, while loss addition is 0.5667074918746948\n", + "solving equation:\n", + "0.47133316922146884 * u{power: 1.0} * du/dx0{power: 1.0} + 8.29659404046781e-05 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.0013471963148231775 * x_0{power: 2.0, dim: 0.0} + -0.0007838413382276667 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.5528943308701321 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9994575508680793, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019298634991474953}}\n", + "Step = 1000 loss = 0.036527.\n", + "Step = 2000 loss = 0.032538.\n", + "Step = 3000 loss = 0.030871.\n", + "Step = 4000 loss = 0.029861.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.846542649478238, while loss addition is 0.029861457645893097\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "-0.03991313011053719 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0003343111437257, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.0003743141320481731 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 1.3270188077145784 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992169433907744, dim: 0.0} + -0.6746839974687904 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019916637338925396}}\n", + "Step = 1000 loss = 0.098843.\n", + "Step = 2000 loss = 0.089998.\n", + "Step = 3000 loss = 0.086390.\n", + "Step = 4000 loss = 0.083760.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.2339489226193985, while loss addition is 0.08376005291938782\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.4813078594494527 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.4951011828773482 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992416797352368, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06905370474260752}}\n", + "Step = 1000 loss = 0.034449.\n", + "Step = 2000 loss = 0.031363.\n", + "Step = 3000 loss = 0.029932.\n", + "Step = 4000 loss = 0.028999.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.7847731344713273, while loss addition is 0.028999006375670433\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995079508447512, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 43.27455603156956 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + -2.4512789549954794 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.29115940092156456}}\n", + "Step = 1000 loss = 0.809047.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.497484374145422, while loss addition is 0.7600930333137512\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 42.69519106074836 * du/dx0{power: 1.0} + -9.231061297838927 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000801201331042, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0001669537505464, dim: 0.0} + 1.9880389243817795 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02337022480715453}}\n", + "Step = 1000 loss = 0.818163.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.488203108597304, while loss addition is 0.7279964685440063\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "-0.03288002254011457 * du/dx0{power: 1.0} + 2.6765603289168887 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9998016639246733, dim: 0.0} + -1.295338916551178 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000565990314849, dim: 0.0} + 0.005380734594229823 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03383942385037123}}\n", + "Step = 1000 loss = 1.049083.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.15246912095847154, while loss addition is 1.0473054647445679\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0009573249925376, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9992859573044329, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2903448735550057}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + -0.5432756454718325 * x_0{power: 1.0, dim: 0.0} + -0.061049227669371264 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 6.06195304739007 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0002326282943674, dim: 0.0} + 3.3624975968931095 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.01987573700078829}}\n", + "Step = 1000 loss = 0.514909.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.024341979878981, while loss addition is 0.5149120092391968\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0000576640632381, dim: 0.0} + -0.07184560629474246 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.029634248850579338 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07107535880509416}}\n", + "Step = 1000 loss = 0.124359.\n", + "Step = 2000 loss = 0.048326.\n", + "Step = 3000 loss = 0.032750.\n", + "Step = 4000 loss = 0.025898.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.86937699819532, while loss addition is 0.02589772269129753\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.9922971139262374 * u{power: 1.0} * cos{power: 1.0, freq: 1.0003061231999173, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9992859573044329, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -1.2878436377510172 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.276150962903109}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07275859964031862, while loss addition is 9.393150321557187e-06\n", + "solving equation:\n", + "-6.428678691709879 * du/dx0{power: 1.0} + -0.5390279970340588 * x_0{power: 2.0, dim: 0.0} + -10.55340656477214 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9994575508680793, dim: 0.0} + 20.50161498083303 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001917780120828, dim: 0.0} + -1.3066488762584305 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 22.83550978515317 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.034455996524072864}}\n", + "Step = 1000 loss = 0.735739.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 32.51944752342019, while loss addition is 0.789408266544342\n", + "solving equation:\n", + "0.008058914706625972 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.38025879135727747 * u{power: 1.0} + -0.09283492802268256 * u{power: 1.0} * sin{power: 1.0, freq: 0.9990721316770192, dim: 0.0} + 0.1524500828206087 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.1588642653112029 * x_0{power: 1.0, dim: 0.0} + -0.9331382047397477 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.020320438030929453}}\n", + "Step = 1000 loss = 0.054542.\n", + "Step = 2000 loss = 0.019312.\n", + "Step = 3000 loss = 0.010514.\n", + "Step = 4000 loss = 0.007467.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 7.84672348725327, while loss addition is 0.007466577924787998\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.3782039590824927 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + -0.6460037863793525 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0005129710997187, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06910195791996865}}\n", + "Step = 1000 loss = 0.056752.\n", + "Step = 2000 loss = 0.049108.\n", + "Step = 3000 loss = 0.046140.\n", + "Step = 4000 loss = 0.044147.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.3179767161316387, while loss addition is 0.044146619737148285\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "-0.0270949801935484 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.016534669226523385 * x_0{power: 1.0, dim: 0.0} + 0.4776874124202909 * u{power: 1.0} * du/dx0{power: 1.0} + 0.03024715908939432 * u{power: 1.0} + 0.6026242907570387 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9998011298290552, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.018119873909048904}}\n", + "Step = 1000 loss = 0.040974.\n", + "Step = 2000 loss = 0.037646.\n", + "Step = 3000 loss = 0.036297.\n", + "Step = 4000 loss = 0.035443.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.775371317581703, while loss addition is 0.03544331341981888\n", + "solving equation:\n", + "-1.3186124382629616 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + 0.017833064156978218 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.05806676878529962 * u{power: 1.0} + 0.5293862879098774 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02088362623861933}}\n", + "Step = 1000 loss = 0.079463.\n", + "Step = 2000 loss = 0.070444.\n", + "Step = 3000 loss = 0.066776.\n", + "Step = 4000 loss = 0.064555.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.2290724275730023, while loss addition is 0.06455471366643906\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.001257683938292182 * du/dx0{power: 1.0} + -0.013729805586716963 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 1.0174195867237064 * u{power: 1.0} * cos{power: 1.0, freq: 0.9997062236962024, dim: 0.0} + -0.6821158444358169 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022058369807432587}}\n", + "Step = 1000 loss = 0.129630.\n", + "Step = 2000 loss = 0.110121.\n", + "Step = 3000 loss = 0.105938.\n", + "Step = 4000 loss = 0.103642.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.028658208660333, while loss addition is 0.10364190489053726\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.001524787851165534 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.001982869534807724 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0006347583405688, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.048549256738547486 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.054196922366743214}}\n", + "Step = 1000 loss = 0.118972.\n", + "Step = 2000 loss = 0.044129.\n", + "Step = 3000 loss = 0.028763.\n", + "Step = 4000 loss = 0.020835.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 9.344415517925551, while loss addition is 0.020835036411881447\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "1.002051501341004 * u{power: 1.0} * du/dx0{power: 1.0} + -2.8075848006708566 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000157250707808, dim: 0.0} + -1.4876816322407154 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 1.4908986840846479 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.020086644161538856}}\n", + "Step = 1000 loss = 0.000463.\n", + "Step = 2000 loss = 0.000244.\n", + "Step = 3000 loss = 0.000145.\n", + "Step = 4000 loss = 0.000093.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.6686674250458036, while loss addition is 9.315699571743608e-05\n", + "solving equation:\n", + "-0.003161521757896412 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9995964822558295, dim: 0.0} + 0.0 * u{power: 1.0} + -0.0014257562256783556 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.037649971672052994 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.17927959412412428 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.018404032271181536}}\n", + "Step = 1000 loss = 0.153251.\n", + "Step = 2000 loss = 0.083416.\n", + "Step = 3000 loss = 0.066435.\n", + "Step = 4000 loss = 0.061798.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 9.226463246277339, while loss addition is 0.06179799884557724\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 1.3168215510922103 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0006348639119964, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.6670894075215962 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2644806749479448}}\n", + "Step = 1000 loss = 0.096831.\n", + "Step = 2000 loss = 0.087337.\n", + "Step = 3000 loss = 0.083996.\n", + "Step = 4000 loss = 0.081142.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.206079056621306, while loss addition is 0.08114228397607803\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993569974485433, dim: 0.0} + 0.11855139044567758 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.19116097077533303 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06675361002870268}}\n", + "Step = 1000 loss = 0.562857.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 19.085971852989136, while loss addition is 0.560365617275238\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.014891658927305373 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2626702770365374}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.00238038862187524 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + -0.0024063316009717544 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.05910492569497001 * u{power: 1.0} + 0.06282046647089785 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03596041420838886}}\n", + "Step = 1000 loss = 0.128878.\n", + "Step = 2000 loss = 0.056005.\n", + "Step = 3000 loss = 0.042071.\n", + "Step = 4000 loss = 0.036223.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.98693096266222, while loss addition is 0.036223363131284714\n", + "solving equation:\n", + "36.54785076299437 * u{power: 1.0} + -4.423410379710823 * x_0{power: 1.0, dim: 0.0} + -15.55969143545463 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.000405814862166, dim: 0.0} + -3.3917455359908124 * du/dx0{power: 1.0} + -9.183701494445218 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9994113061670207, dim: 0.0} + 29.46866249990436 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019394598945632332}}\n", + "Step = 1000 loss = 2.312336.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.327273739919647, while loss addition is 1.6816339492797852\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994055387640451, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000401404311892, dim: 0.0} + 0.0 * u{power: 1.0} + -0.000834679649125831 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.2471217775170353 * u{power: 1.0} * sin{power: 1.0, freq: 0.9990721316770192, dim: 0.0} + -0.2503825013898116 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02270749649985348}}\n", + "Step = 1000 loss = 0.257337.\n", + "Step = 2000 loss = 0.080780.\n", + "Step = 3000 loss = 0.022956.\n", + "Step = 4000 loss = 0.007318.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.612149886501212, while loss addition is 0.007317807991057634\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.9919943088768896 * u{power: 1.0} * cos{power: 1.0, freq: 1.000401404311892, dim: 0.0} + -1.2901103502375597 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994055387640451, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06836571375988372}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07262328592426327, while loss addition is 9.92585682979552e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.9919103408942589 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + -1.2896965122757953 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023524202470099644}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07406581449338516, while loss addition is 9.467266863794066e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.10549676877810225 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0006427721896556, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.14544055078662146 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26221726709290566}}\n", + "Step = 1000 loss = 0.641559.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.80000849054431, while loss addition is 0.6577365398406982\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + -0.0009225809507425498 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.665473503870486 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999956098340488, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03509157604773534}}\n", + "Step = 1000 loss = 0.071151.\n", + "Step = 2000 loss = 0.047686.\n", + "Step = 3000 loss = 0.037342.\n", + "Step = 4000 loss = 0.031708.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.36097310036848, while loss addition is 0.031707581132650375\n", "Multiobjective optimization : 1-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "1.0270100249309024 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990586844434237, dim: 0.0} + 0.0 * u{power: 1.0} + -0.051941291471000905 * x_0{power: 1.0, dim: 0.0} + -0.002321315268483547 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.03863424825035474 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990951802170442, dim: 0.0} + 0.9569736695572534 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.01969389939779046}}\n", + "Step = 1000 loss = 0.171463.\n", + "Step = 2000 loss = 0.147740.\n", + "Step = 3000 loss = 0.144804.\n", + "Step = 4000 loss = 0.143813.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.031835278284076, while loss addition is 0.14381316304206848\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0000598481445195, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0001909842697667, dim: 0.0} + 0.014891658927305373 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.064478831628936}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "-0.0029879888010796328 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9990117172155114, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -0.01094731225386852 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.028995820068719105 * x_0{power: 1.0, dim: 0.0} + 0.6603109500690683 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000157250707808, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.01979976828212135}}\n", + "Step = 1000 loss = 0.079666.\n", + "Step = 2000 loss = 0.055920.\n", + "Step = 3000 loss = 0.044323.\n", + "Step = 4000 loss = 0.037252.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.309078715422256, while loss addition is 0.03725165128707886\n", + "solving equation:\n", + "-0.02562617451511234 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.34177442618004317 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.07034681486291354 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.020368145995708666}}\n", + "Step = 1000 loss = 0.026952.\n", + "Step = 2000 loss = 0.017663.\n", + "Step = 3000 loss = 0.013146.\n", + "Step = 4000 loss = 0.010243.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.85293594325907, while loss addition is 0.010243221186101437\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9990611909973286, dim: 0.0} + -0.000920096568816249 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.6671630633181946 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994055387640451, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06967320034174733}}\n", + "Step = 1000 loss = 0.071078.\n", + "Step = 2000 loss = 0.047684.\n", + "Step = 3000 loss = 0.037400.\n", + "Step = 4000 loss = 0.031782.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.300616271256033, while loss addition is 0.03178169205784798\n", + "solving equation:\n", + "-0.002471869356642441 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * u{power: 1.0} + -0.26721975804282233 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996663276940853, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9991134771852198, dim: 0.0} + 0.1272566658777892 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.037102841241510864}}\n", + "Step = 1000 loss = 0.293623.\n", + "Step = 2000 loss = 0.071471.\n", + "Step = 3000 loss = 0.026377.\n", + "Step = 4000 loss = 0.014557.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.862627481034526, while loss addition is 0.014557466842234135\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -1.2929092985994306 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + 1.002262813364081 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994767537543096, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9997175114030576, dim: 0.0} + 1.2957761537885832 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26225359537207427}}\n", + "Step = 1000 loss = 0.000270.\n", + "Step = 2000 loss = 0.000140.\n", + "Step = 3000 loss = 0.000085.\n", + "Step = 4000 loss = 0.000055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.17670996903055414, while loss addition is 5.549287743633613e-05\n", + "solving equation:\n", + "-0.001716328775401604 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0003061231999173, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0000059829252297, dim: 0.0} + 0.47510185534257027 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000157250707808, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.031245208987999897}}\n", + "Step = 1000 loss = 0.075737.\n", + "Step = 2000 loss = 0.053012.\n", + "Step = 3000 loss = 0.041960.\n", + "Step = 4000 loss = 0.035233.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.69662314227693, while loss addition is 0.03523316606879234\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + -1.2933840384213484 * u{power: 1.0} * sin{power: 1.0, freq: 1.000126024555391, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 1.001174209012046 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994055387640451, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 1.2948118464320089 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06648570825722967}}\n", + "Step = 1000 loss = 0.000271.\n", + "Step = 2000 loss = 0.000141.\n", + "Step = 3000 loss = 0.000121.\n", + "Step = 4000 loss = 0.000055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.17507774916224755, while loss addition is 5.543314546230249e-05\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022901196818997712}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 6.062631430332547 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000094828011732, dim: 0.0} + 0.009422619057554928 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26493331247402063}}\n", + "Step = 1000 loss = 0.539781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 30.944365688044606, while loss addition is 0.5324820280075073\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9998267720008615, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + 0.015341523080443523 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9991250828873254, dim: 0.0} + 0.06576318298547892 * x_0{power: 1.0, dim: 0.0} + -0.4148393537527631 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07747958758127517}}\n", + "Step = 1000 loss = 0.155620.\n", + "Step = 2000 loss = 0.047051.\n", + "Step = 3000 loss = 0.015374.\n", + "Step = 4000 loss = 0.007549.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.967731207227068, while loss addition is 0.00754914153367281\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0} + -6.169024737097029 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.10735240452794267 * x_0{power: 2.0, dim: 0.0} + 0.27815540792041105 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + -4.362328282250228 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025294507393211152}}\n", + "Step = 1000 loss = 0.915097.\n", + "Step = 2000 loss = 0.899259.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.012484572214522, while loss addition is 0.9066529870033264\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * u{power: 1.0} + 1.026767917704454 * u{power: 1.0} * cos{power: 1.0, freq: 0.9998115326572813, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.029786031132966545 * x_0{power: 1.0, dim: 0.0} + -0.49114675928751883 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03623829352090231}}\n", + "Step = 1000 loss = 0.146385.\n", + "Step = 2000 loss = 0.123790.\n", + "Step = 3000 loss = 0.116693.\n", + "Step = 4000 loss = 0.112881.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.562026223021303, while loss addition is 0.11288081109523773\n", + "solving equation:\n", + "36.13933411672509 * u{power: 1.0} + -3.701459942271832 * du/dx0{power: 1.0} + -4.124388786449067 * x_0{power: 1.0, dim: 0.0} + -3.138659914353858 * du/dx0{power: 1.0} * u{power: 1.0} + -15.671047771997602 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990951802170442, dim: 0.0} + 23.126464018058442 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022781378232383753}}\n", + "Step = 1000 loss = 1.167751.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 33.632608606769715, while loss addition is 1.33817720413208\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.9919943088768896 * u{power: 1.0} * cos{power: 1.0, freq: 1.000401404311892, dim: 0.0} + -1.2901103502375597 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994055387640451, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06818483335235524}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07262328592426327, while loss addition is 9.92585682979552e-06\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0003438510191223, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9995826428805992, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.065173386595122}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.9918888471710173 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + -1.290338084044087 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02143576905236429}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07423926311057613, while loss addition is 9.648359082348179e-06\n", + "solving equation:\n", + "42.68846157461123 * du/dx0{power: 1.0} + -4.417993692182253 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -11.219991816359181 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 30.714871430912535 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025846004277502055}}\n", + "Step = 1000 loss = 0.608436.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.516624241317067, while loss addition is 0.8437348008155823\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9995557075149651, dim: 0.0} + 0.0 * u{power: 1.0} + -0.10431743331005502 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.11897665134495441 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07465336397756546}}\n", + "Step = 1000 loss = 0.279747.\n", + "Step = 2000 loss = 0.099224.\n", + "Step = 3000 loss = 0.031915.\n", + "Step = 4000 loss = 0.012341.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.253237599963915, while loss addition is 0.012341426685452461\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.06576318298547892 * x_0{power: 1.0, dim: 0.0} + 0.015341523080443509 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.4148393537527632 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06678141356155097}}\n", + "Step = 1000 loss = 0.155620.\n", + "Step = 2000 loss = 0.047051.\n", + "Step = 3000 loss = 0.015374.\n", + "Step = 4000 loss = 0.007549.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.967731207227068, while loss addition is 0.00754914153367281\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + -0.05772948076901529 * u{power: 1.0} + 0.0166894622371144 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0005041617138206, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + -0.7718036860700209 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021718713852380243}}\n", + "Step = 1000 loss = 0.071969.\n", + "Step = 2000 loss = 0.045605.\n", + "Step = 3000 loss = 0.035662.\n", + "Step = 4000 loss = 0.030537.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.20102284218393, while loss addition is 0.03053722344338894\n", + "solving equation:\n", + "0.3777438753190622 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9991966001843727, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.6464660307507686 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26433738581868726}}\n", + "Step = 1000 loss = 0.056829.\n", + "Step = 2000 loss = 0.049162.\n", + "Step = 3000 loss = 0.046190.\n", + "Step = 4000 loss = 0.044193.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.3192336339181394, while loss addition is 0.04419286921620369\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0002946923143268, dim: 0.0} + 2.6221910025714847 * du/dx0{power: 1.0} + 0.07950261639313408 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.20430570948912352 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0367987185030574}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049511.\n", + "Step = 4000 loss = 0.035781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063926167008288, while loss addition is 0.03578059747815132\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "-0.08089097267687269 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + -0.029626184198839146 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 1.0448927025018375 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007247281402205, dim: 0.0} + 0.8382074294627614 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02233045724553149}}\n", + "Step = 1000 loss = 0.158724.\n", + "Step = 2000 loss = 0.137763.\n", + "Step = 3000 loss = 0.135212.\n", + "Step = 4000 loss = 0.133991.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.804108780787648, while loss addition is 0.13399095833301544\n", + "solving equation:\n", + "-0.09543903230202863 * x_0{power: 2.0, dim: 0.0} + -0.2222465381309469 * u{power: 1.0} + 1.2693962888263852 * x_0{power: 1.0, dim: 0.0} + 0.016666994071975916 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -3.600265520536428 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019465202543934045}}\n", + "Step = 1000 loss = 0.082462.\n", + "Step = 2000 loss = 0.022451.\n", + "Step = 3000 loss = 0.016521.\n", + "Step = 4000 loss = 0.013028.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.15057509160507, while loss addition is 0.013027605600655079\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.9920361649580334 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9991900481122311, dim: 0.0} + -1.2913425704120387 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.024939790503525114}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07237982665621258, while loss addition is 9.277939170715399e-06\n", + "solving equation:\n", + "43.27455603156956 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993235305191435, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0001353163327673, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -2.4512789549954794 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07544471061852336}}\n", + "Step = 1000 loss = 0.808937.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.499102436704145, while loss addition is 0.7603638172149658\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0035255564352444763 * x_0{power: 2.0, dim: 0.0} + -0.01829275136568726 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0003667165937922, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0001934287652887, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.017410878525667868 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + -0.14661446777770934 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02446246118847768}}\n", + "Step = 1000 loss = 0.204146.\n", + "Step = 2000 loss = 0.141447.\n", + "Step = 3000 loss = 0.098620.\n", + "Step = 4000 loss = 0.064674.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 10.59575426047578, while loss addition is 0.06467355042695999\n", + "solving equation:\n", + "-0.05806187621982751 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0007124696014715, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0007851184064174, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.6657512314638702 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02537296678600122}}\n", + "Step = 1000 loss = 0.069960.\n", + "Step = 2000 loss = 0.047092.\n", + "Step = 3000 loss = 0.037625.\n", + "Step = 4000 loss = 0.032296.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.064213091640557, while loss addition is 0.03229571506381035\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9998267720008615, dim: 0.0} + -0.05804993619299364 * u{power: 1.0} + -0.6646630871475704 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999166557684636, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0694203967241785}}\n", + "Step = 1000 loss = 0.070005.\n", + "Step = 2000 loss = 0.047157.\n", + "Step = 3000 loss = 0.037670.\n", + "Step = 4000 loss = 0.032346.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.077061079465054, while loss addition is 0.03234558925032616\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9991250828873254, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -0.25094077587467145 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + 0.0 * u{power: 1.0} + 0.005702169128014571 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07660328251600572}}\n", + "Step = 1000 loss = 0.273842.\n", + "Step = 2000 loss = 0.065100.\n", + "Step = 3000 loss = 0.021255.\n", + "Step = 4000 loss = 0.009826.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.973783318601214, while loss addition is 0.009826106950640678\n", "Multiobjective optimization : 2-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.6010504246911793 * u{power: 1.0} * sin{power: 1.0, freq: 0.9998267720008615, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + -0.6966581075885905 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + 1.0005271425835947 * u{power: 1.0} * du/dx0{power: 1.0} + -0.6001741771386695 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997166533434261, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06940836019474575}}\n", + "Step = 1000 loss = 0.000091.\n", + "Step = 2000 loss = 0.000066.\n", + "Step = 3000 loss = 0.000030.\n", + "Step = 4000 loss = 0.000020.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.03752044013885897, while loss addition is 2.0335013687144965e-05\n", + "solving equation:\n", + "43.27455603156956 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0008370543944722, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -2.4512789549954794 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07587723400704065}}\n", + "Step = 1000 loss = 0.809632.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.487266432796304, while loss addition is 0.7604629993438721\n", + "solving equation:\n", + "2.772983520317303 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.07466730395869228 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.19021335795650998 * x_0{power: 1.0, dim: 0.0} + -0.02078140419934083 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9991900481122311, dim: 0.0} + 1.3803419111952067 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.01905430147289046}}\n", + "Step = 1000 loss = 0.099844.\n", + "Step = 2000 loss = 0.065449.\n", + "Step = 3000 loss = 0.050054.\n", + "Step = 4000 loss = 0.041512.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.129641758532707, while loss addition is 0.04151182249188423\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.9920361649580334 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -1.2913425704120387 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025555232827394223}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07237982665621258, while loss addition is 9.277939170715399e-06\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.9921937475157846 * u{power: 1.0} * sin{power: 1.0, freq: 0.9998267720008615, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.9915715316219474 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0747789593662521}}\n", + "Step = 1000 loss = 0.000159.\n", + "Step = 2000 loss = 0.000088.\n", + "Step = 3000 loss = 0.000044.\n", + "Step = 4000 loss = 0.000029.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8870475390866496, while loss addition is 2.850415876309853e-05\n", + "solving equation:\n", + "0.9938876875617234 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 1.2776388780059955 * u{power: 1.0} * sin{power: 1.0, freq: 1.0007451809213503, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + -1.2857261231651544 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0006709413292258, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025645711965827565}}\n", + "Step = 1000 loss = 0.000266.\n", + "Step = 2000 loss = 0.000140.\n", + "Step = 3000 loss = 0.000083.\n", + "Step = 4000 loss = 0.000055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.18443039084671575, while loss addition is 5.4753174481447786e-05\n", + "solving equation:\n", + "-0.05805379219632775 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000875400451996, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.6650132029798609 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.999796673915813, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0678267196482392}}\n", + "Step = 1000 loss = 0.069990.\n", + "Step = 2000 loss = 0.047178.\n", + "Step = 3000 loss = 0.037650.\n", + "Step = 4000 loss = 0.032328.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.0744296587379, while loss addition is 0.03232809901237488\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + -0.056443131605222054 * du/dx0{power: 1.0} + -0.029156016648096084 * x_0{power: 1.0, dim: 0.0} + 1.0400087768727582 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9996701785722001, dim: 0.0} + 0.008157850509311022 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9994465685749654, dim: 0.0} + 0.8383087495705382 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03760844148300289}}\n", + "Step = 1000 loss = 0.156874.\n", + "Step = 2000 loss = 0.135980.\n", + "Step = 3000 loss = 0.133626.\n", + "Step = 4000 loss = 0.132556.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.874120300507905, while loss addition is 0.1325562745332718\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.9918486023064553 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006852150290508, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + -1.2901945488318154 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.037351959461000075}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07547760328946285, while loss addition is 9.510607014817651e-06\n", + "solving equation:\n", + "-0.22155757986384328 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + -0.7305872814715674 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 42.7358340377181 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 6.9810071713199955 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025652994659823448}}\n", + "Step = 1000 loss = 0.706835.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 22.668205554723226, while loss addition is 0.7818622589111328\n", + "solving equation:\n", + "0.07950261639313408 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 2.622191002571486 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000405897832136, dim: 0.0} + 0.2043057094891202 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07257111431016423}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049520.\n", + "Step = 4000 loss = 0.035786.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063316463831445, while loss addition is 0.03578565642237663\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9998504385947801, dim: 0.0} + 2.6221910025714847 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996595280275831, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.07950261639313408 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.20430570948912352 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023306879040201046}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049511.\n", + "Step = 4000 loss = 0.035781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063926167008288, while loss addition is 0.03578059747815132\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "-0.2222465381309454 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.0954390323020284 * x_0{power: 2.0, dim: 0.0} + 1.2693962888263832 * x_0{power: 1.0, dim: 0.0} + 0.016666994071975857 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -3.6002655205364302 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022370791738883133}}\n", + "Step = 1000 loss = 0.082462.\n", + "Step = 2000 loss = 0.022451.\n", + "Step = 3000 loss = 0.016521.\n", + "Step = 4000 loss = 0.013028.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.15057509160507, while loss addition is 0.013027605600655079\n", + "solving equation:\n", + "4.538529316187794 * du/dx0{power: 1.0} + -0.034650907007438614 * x_0{power: 2.0, dim: 0.0} + -0.48269013819446355 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0002468069559043, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.999907579876233, dim: 0.0} + 1.415316064263043 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0718941088009652}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 11.460603124159624, while loss addition is 0.5263543725013733\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004645041848703, dim: 0.0} + 0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.264158268212425}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.9919103408942589 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + -1.2896965122757953 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03677681454886085}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07406581449338516, while loss addition is 9.467266863794066e-06\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + -0.0025154962681780955 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.039490450512594574 * x_0{power: 1.0, dim: 0.0} + 1.0387713801920473 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.8903402667722248 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.024557414573563897}}\n", + "Step = 1000 loss = 0.161939.\n", + "Step = 2000 loss = 0.137285.\n", + "Step = 3000 loss = 0.134018.\n", + "Step = 4000 loss = 0.132698.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.0506964180557254, while loss addition is 0.13269765675067902\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -0.0024781543691838035 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.0018869445329198348 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.585052819516699 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992551399994811, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025450880342604117}}\n", + "Step = 1000 loss = 0.085757.\n", + "Step = 2000 loss = 0.058683.\n", + "Step = 3000 loss = 0.045247.\n", + "Step = 4000 loss = 0.037242.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.742528918724922, while loss addition is 0.03724176064133644\n", + "solving equation:\n", + "0.029403395820856337 * u{power: 1.0} + 0.4739791895353223 * du/dx0{power: 1.0} * u{power: 1.0} + 0.003027987590380117 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.0007134375767239853 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.4995907732276462 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0005261775423149, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03955814145994465}}\n", + "Step = 1000 loss = 0.036345.\n", + "Step = 2000 loss = 0.033287.\n", + "Step = 3000 loss = 0.031867.\n", + "Step = 4000 loss = 0.031011.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.721901312896178, while loss addition is 0.031011288985610008\n", + "solving equation:\n", + "-0.05803323508035151 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9996745369944514, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000875400451996, dim: 0.0} + -0.6631605684787294 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000429920711927, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06657420965462092}}\n", + "Step = 1000 loss = 0.070073.\n", + "Step = 2000 loss = 0.047260.\n", + "Step = 3000 loss = 0.037761.\n", + "Step = 4000 loss = 0.032435.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.082606314979216, while loss addition is 0.032435327768325806\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 1.3202108097143974 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000238356447357, dim: 0.0} + -0.6655265168741225 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08532198362351619}}\n", + "Step = 1000 loss = 0.096084.\n", + "Step = 2000 loss = 0.086784.\n", + "Step = 3000 loss = 0.083365.\n", + "Step = 4000 loss = 0.080549.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.196614245020917, while loss addition is 0.08054874837398529\n", + "solving equation:\n", + "2.6221910025714847 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9998728635637242, dim: 0.0} + 0.07950261639313408 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.999351016744915, dim: 0.0} + 0.20430570948912352 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26727504969827587}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049511.\n", + "Step = 4000 loss = 0.035781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063926167008288, while loss addition is 0.03578059747815132\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0001311470591947, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.0 * u{power: 1.0} + 0.014891658927305373 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03606966315284349}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 33.045294620764516 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.9635341361839882 * x_0{power: 2.0, dim: 0.0} + -0.8002975355726226 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 33.37946993152935 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.026331503668449663}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.22384424360567412, while loss addition is 27.62917709350586\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "-5.737532367883183 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9997534341188876, dim: 0.0} + 0.0 * u{power: 1.0} + 7.847445529616871 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008378548439758, dim: 0.0} + 0.770813447923846 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03922965351415231}}\n", + "Step = 1000 loss = 0.732543.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.60844246102941, while loss addition is 0.7551364302635193\n", + "solving equation:\n", + "-0.05854477036762726 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 1.3178351549271043 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000875400451996, dim: 0.0} + -0.6778075770291409 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06733076789478534}}\n", + "Step = 1000 loss = 0.103116.\n", + "Step = 2000 loss = 0.094035.\n", + "Step = 3000 loss = 0.090472.\n", + "Step = 4000 loss = 0.087886.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.295740266403389, while loss addition is 0.08788594603538513\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + -0.0003686232417818727 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.040331488083816126 * u{power: 1.0} + 1.324579933772352 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996683012336985, dim: 0.0} + -0.6759055342476172 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.027692146599734825}}\n", + "Step = 1000 loss = 0.099453.\n", + "Step = 2000 loss = 0.090497.\n", + "Step = 3000 loss = 0.086919.\n", + "Step = 4000 loss = 0.084294.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.2425042685844785, while loss addition is 0.08429411798715591\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.9920361649580334 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + -1.2913425704120387 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02117608946686896}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07237982665621258, while loss addition is 9.277939170715399e-06\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.11932179996891133 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0005804101845703, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0004312703411726, dim: 0.0} + 3.38958135988981 * du/dx0{power: 1.0} + -0.056353757876598465 * x_0{power: 2.0, dim: 0.0} + 2.0305837510158664 * u{power: 1.0} + 2.5594854656636183 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03923938093455992}}\n", + "Step = 1000 loss = 0.216691.\n", + "Step = 2000 loss = 0.021335.\n", + "Step = 3000 loss = 0.004018.\n", + "Step = 4000 loss = 0.002335.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.493509146247144, while loss addition is 0.0023347348906099796\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 2.6221910025714847 * du/dx0{power: 1.0} + 0.07950261639313408 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.20430570948912352 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2635691450771985}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049511.\n", + "Step = 4000 loss = 0.035781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063926167008288, while loss addition is 0.03578059747815132\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.9987032094829399 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991193999056384, dim: 0.0} + -1.2952135370832165 * u{power: 1.0} * sin{power: 1.0, freq: 0.9998267720008615, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0002263074272828, dim: 0.0} + 1.293344318457159 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.037387317707933264}}\n", + "Step = 1000 loss = 0.000272.\n", + "Step = 2000 loss = 0.000143.\n", + "Step = 3000 loss = 0.000084.\n", + "Step = 4000 loss = 0.000055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.1712640823560996, while loss addition is 5.5456308473367244e-05\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0} + 1.0374535378648395 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0026376859447069245 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.6809669174052195 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07345187113979149}}\n", + "Step = 1000 loss = 0.130457.\n", + "Step = 2000 loss = 0.113042.\n", + "Step = 3000 loss = 0.109398.\n", + "Step = 4000 loss = 0.107644.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.0814978480373325, while loss addition is 0.10764399915933609\n", "Multiobjective optimization : 3-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9992859573044329, dim: 0.0} + 0.0 * u{power: 1.0} + 0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.999958765611606, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26243608619204895}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991502956445225, dim: 0.0} + 5.661192140869142 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.027484677428800362 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + 0.18295400112565882 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.049424948313935076}}\n", + "Step = 1000 loss = 0.522759.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 32.78871962450824, while loss addition is 0.5178305506706238\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000601382698919, dim: 0.0} + 0.0 * u{power: 1.0} + 0.9920469309250388 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -1.2912502375919577 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995774408921521, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02797738446239718}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07227724433058055, while loss addition is 9.205302376358304e-06\n", + "solving equation:\n", + "-0.1776832767036318 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9994499672266174, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9996064135525922, dim: 0.0} + 0.14054954106415601 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9996430657646558, dim: 0.0} + -0.016882756214405818 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07286418258226306}}\n", + "Step = 1000 loss = 0.055465.\n", + "Step = 2000 loss = 0.033457.\n", + "Step = 3000 loss = 0.019273.\n", + "Step = 4000 loss = 0.010245.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 12.03299862072874, while loss addition is 0.010244891047477722\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0} + -0.961828350496408 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.3070202823909904 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0008559807203943, dim: 0.0} + 0.03796463028906094 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + -0.056327507689507894 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022695125557307898}}\n", + "Step = 1000 loss = 0.030333.\n", + "Step = 2000 loss = 0.010967.\n", + "Step = 3000 loss = 0.004628.\n", + "Step = 4000 loss = 0.002254.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.355287903164526, while loss addition is 0.0022540355566889048\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.24901222882019244 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.00044174520216, dim: 0.0} + -0.24358723978629354 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06553733090639581}}\n", + "Step = 1000 loss = 0.242677.\n", + "Step = 2000 loss = 0.075321.\n", + "Step = 3000 loss = 0.021413.\n", + "Step = 4000 loss = 0.006897.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.263355147920953, while loss addition is 0.00689727021381259\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000550551461995, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.26576867933168524}}\n", + "Step = 1000 loss = 0.291552.\n", + "Step = 2000 loss = 0.079744.\n", + "Step = 3000 loss = 0.023927.\n", + "Step = 4000 loss = 0.010367.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.574989799120303, while loss addition is 0.010367344133555889\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0006856661355579, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.001712948425016607 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9992096814532785, dim: 0.0} + 0.0 * u{power: 1.0} + 0.4715492446157598 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9991242357352422, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.050634101789147935}}\n", + "Step = 1000 loss = 0.075602.\n", + "Step = 2000 loss = 0.052859.\n", + "Step = 3000 loss = 0.041810.\n", + "Step = 4000 loss = 0.035050.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.764892365535427, while loss addition is 0.035050246864557266\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 6.062631430332547 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007191002827536, dim: 0.0} + 0.009422619057554928 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06086066917827878}}\n", + "Step = 1000 loss = 0.539814.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 30.945575860492436, while loss addition is 0.5324665307998657\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 4.470857075438188 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.1389788041995263 * x_0{power: 2.0, dim: 0.0} + 7.243570449024178 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9996656615609463, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0008304248962907, dim: 0.0} + -5.6196699955410425 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.024658545683197364}}\n", + "Step = 1000 loss = 0.655285.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 56.15341449353809, while loss addition is 0.6557793021202087\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03602324214846455}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.9921831537397393 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -1.2900357650392855 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.028087402482029625}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07223108141941993, while loss addition is 7.925616955617443e-06\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "-0.0054064352730481295 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.023656529860846057 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.009634642391786156 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.17431785385974208 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.027711368863671197}}\n", + "Step = 1000 loss = 0.257182.\n", + "Step = 2000 loss = 0.205134.\n", + "Step = 3000 loss = 0.201765.\n", + "Step = 4000 loss = 0.200280.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 7.94318993241792, while loss addition is 0.20028044283390045\n", + "solving equation:\n", + "0.9977377451171527 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -1.2911615879196259 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0003103252570729, dim: 0.0} + 0.00014205591961689856 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0009888895166625, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021127596115868678}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.082612532684748, while loss addition is 9.784803296497557e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 1.3198959138960134 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0000808801683057, dim: 0.0} + -0.6656742935423294 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07416760470883565}}\n", + "Step = 1000 loss = 0.096154.\n", + "Step = 2000 loss = 0.086781.\n", + "Step = 3000 loss = 0.083418.\n", + "Step = 4000 loss = 0.080575.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.197636662821732, while loss addition is 0.08057541400194168\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000102380752889, dim: 0.0} + 43.27455603156956 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -2.4512789549954794 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25747779708024204}}\n", + "Step = 1000 loss = 0.809124.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.48674323713541, while loss addition is 0.7605380415916443\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.12116477635736683 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.3857717710685837 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.000000458074414, dim: 0.0} + -0.8413365690813501 * u{power: 1.0} + -0.007523136775206829 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.8024332422550079 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04102517345519972}}\n", + "Step = 1000 loss = 0.037902.\n", + "Step = 2000 loss = 0.013101.\n", + "Step = 3000 loss = 0.004218.\n", + "Step = 4000 loss = 0.001664.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.671189021324703, while loss addition is 0.001663770992308855\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.9922857653300342 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -1.289057161738866 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0003817035855842, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25462996865123033}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07293032489743992, while loss addition is 9.50909816310741e-06\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.05801998989240054 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992199636461893, dim: 0.0} + -0.6619845167895883 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008298344709265, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07384526415939292}}\n", + "Step = 1000 loss = 0.070131.\n", + "Step = 2000 loss = 0.047341.\n", + "Step = 3000 loss = 0.037821.\n", + "Step = 4000 loss = 0.032493.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.103515469998452, while loss addition is 0.032492995262145996\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9991969109910004, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06445316548694027}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000515810801695, dim: 0.0} + -0.012131377100072694 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.002473793043966798 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9996539518175757, dim: 0.0} + 0.5903967781529361 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992199636461893, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06943565666294439}}\n", + "Step = 1000 loss = 0.081094.\n", + "Step = 2000 loss = 0.056633.\n", + "Step = 3000 loss = 0.044535.\n", + "Step = 4000 loss = 0.037136.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.289902661922884, while loss addition is 0.03713631629943848\n", + "solving equation:\n", + "-2.1441123454378035 * u{power: 1.0} + -10.514787203111508 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000069263250753, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0006546354031438, dim: 0.0} + -0.19100332431025002 * x_0{power: 2.0, dim: 0.0} + 42.24328571565595 * du/dx0{power: 1.0} + 11.041897366024997 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023599135764991324}}\n", + "Step = 1000 loss = 0.628229.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.519771792717123, while loss addition is 0.8305673599243164\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000550551461995, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25324385136278527}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.9920226719508357 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006852150290508, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0006817353356554, dim: 0.0} + -1.2889032175877482 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995018374407709, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.048851643805684845}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07267502030327942, while loss addition is 9.965779099729843e-06\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997905441805, dim: 0.0} + 0.6200382221300772 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006852150290508, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.2559871653360017 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0000488437290826, dim: 0.0} + -0.0289849266224357 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.050854169055007316}}\n", + "Step = 1000 loss = 0.124797.\n", + "Step = 2000 loss = 0.108930.\n", + "Step = 3000 loss = 0.106742.\n", + "Step = 4000 loss = 0.106394.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.30107033792241, while loss addition is 0.10639376193284988\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2542206008950524}}\n", + "Step = 1000 loss = 0.162376.\n", + "Step = 2000 loss = 0.055818.\n", + "Step = 3000 loss = 0.020853.\n", + "Step = 4000 loss = 0.010093.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.374612019249557, while loss addition is 0.010092579759657383\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + -0.02728770936640827 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -0.24977366769642234 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996945249238635, dim: 0.0} + 0.0006283557031551978 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023863691572277787}}\n", + "Step = 1000 loss = 0.270807.\n", + "Step = 2000 loss = 0.063572.\n", + "Step = 3000 loss = 0.020214.\n", + "Step = 4000 loss = 0.008866.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.51189526612541, while loss addition is 0.008865890093147755\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9999674968916878, dim: 0.0} + 0.9923485088507161 * u{power: 1.0} * cos{power: 1.0, freq: 1.0002627964350315, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -1.2874866899687225 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0005407207914359, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0701397936907243}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07315698941382023, while loss addition is 9.720480193209369e-06\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9994145617876964, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999290914004724, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2566315340455224}}\n", + "Step = 1000 loss = 0.163182.\n", + "Step = 2000 loss = 0.056044.\n", + "Step = 3000 loss = 0.020875.\n", + "Step = 4000 loss = 0.010074.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.46413143158294, while loss addition is 0.010074407793581486\n", + "solving equation:\n", + "-0.009576530142226156 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * u{power: 1.0} + -0.8423289664014684 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.8193340279456833 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05025009727789239}}\n", + "Step = 1000 loss = 0.599104.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 38.460109535600665, while loss addition is 0.5994678735733032\n", + "solving equation:\n", + "0.9995494873662483 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.0 * u{power: 1.0} + 1.3027531678665083 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9999318646418973, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 4.565032184089546e-05 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02333334643362534}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.08205866226886922, while loss addition is 9.620147466193885e-06\n", + "solving equation:\n", + "0.01252429022746852 * du/dx0{power: 1.0} + 0.0199256362463629 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.7523966702143492 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -2.0428485205315603 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.999452335008158, dim: 0.0} + 0.003059472611424724 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07229578777542711}}\n", + "Step = 1000 loss = 0.012931.\n", + "Step = 2000 loss = 0.005610.\n", + "Step = 3000 loss = 0.003201.\n", + "Step = 4000 loss = 0.002080.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.29569593923089355, while loss addition is 0.002080324338749051\n", "Multiobjective optimization : 4-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008298344709265, dim: 0.0} + -0.022493158689664637 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + -0.06213820311889662 * du/dx0{power: 1.0} + 0.6188616411584826 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992199636461893, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07178450298149033}}\n", + "Step = 1000 loss = 0.075505.\n", + "Step = 2000 loss = 0.053583.\n", + "Step = 3000 loss = 0.042929.\n", + "Step = 4000 loss = 0.036141.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.737630249729367, while loss addition is 0.036140892654657364\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9999094094567123, dim: 0.0} + -0.057713375436893664 * u{power: 1.0} + 0.01681649900978245 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.7713590955887905 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.024640007735168147}}\n", + "Step = 1000 loss = 0.071959.\n", + "Step = 2000 loss = 0.045605.\n", + "Step = 3000 loss = 0.035655.\n", + "Step = 4000 loss = 0.030506.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.20554690097784, while loss addition is 0.03050628863275051\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.24898132277358337 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000659856480183, dim: 0.0} + -0.2433694542235977 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04793930113571718}}\n", + "Step = 1000 loss = 0.242553.\n", + "Step = 2000 loss = 0.075334.\n", + "Step = 3000 loss = 0.021455.\n", + "Step = 4000 loss = 0.006924.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.262292779446835, while loss addition is 0.006924120243638754\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000358547080109, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000550551461995, dim: 0.0} + 2.6221910025714847 * du/dx0{power: 1.0} + 0.07950261639313408 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.20430570948912352 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2534709568490664}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049511.\n", + "Step = 4000 loss = 0.035781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063926167008288, while loss addition is 0.03578059747815132\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9999984770015918, dim: 0.0} + -0.057658136277756865 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.017247866613316745 * x_0{power: 1.0, dim: 0.0} + -0.7699124762015056 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0009617298669684, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.030819373165370922}}\n", + "Step = 1000 loss = 0.071953.\n", + "Step = 2000 loss = 0.045638.\n", + "Step = 3000 loss = 0.035685.\n", + "Step = 4000 loss = 0.030526.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.21351289713944, while loss addition is 0.030525853857398033\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008298344709265, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.03758573415628685 * x_0{power: 2.0, dim: 0.0} + 1.0348556677330305 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07138744444115136}}\n", + "Step = 1000 loss = 0.600652.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 30.32235641246107, while loss addition is 0.6033748984336853\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0} + 0.0 * u{power: 1.0} + 1.042350895428217 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + -0.02902362952168401 * x_0{power: 1.0, dim: 0.0} + 0.01764208430436145 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9994089980797222, dim: 0.0} + -0.5024118596615111 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0383627997301006}}\n", + "Step = 1000 loss = 0.152664.\n", + "Step = 2000 loss = 0.132422.\n", + "Step = 3000 loss = 0.126325.\n", + "Step = 4000 loss = 0.122990.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.437226691463133, while loss addition is 0.12299007177352905\n", + "solving equation:\n", + "0.9962628651289044 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -1.2959245631172214 * u{power: 1.0} * sin{power: 1.0, freq: 0.9995357690649379, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + 1.290831065109276 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04858590179280345}}\n", + "Step = 1000 loss = 0.000273.\n", + "Step = 2000 loss = 0.000141.\n", + "Step = 3000 loss = 0.000164.\n", + "Step = 4000 loss = 0.000141.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.16680193241857866, while loss addition is 0.00014092540368437767\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.2471121914690245 * u{power: 1.0} * sin{power: 1.0, freq: 0.9993022924696956, dim: 0.0} + -0.0008335451089696785 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.25060164384901745 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.026905066037575125}}\n", + "Step = 1000 loss = 0.257413.\n", + "Step = 2000 loss = 0.080749.\n", + "Step = 3000 loss = 0.022990.\n", + "Step = 4000 loss = 0.007329.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.610667062969483, while loss addition is 0.007328555453568697\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.0075081775387082226 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.11619699091961168 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.23361177160542398 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019473438822491553}}\n", + "Step = 1000 loss = 0.555355.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 20.73227096099069, while loss addition is 0.5549392104148865\n", + "solving equation:\n", + "0.0207884626454103 * du/dx0{power: 1.0} + 0.033242129062061476 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -2.0361092029990773 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.7452013693754217 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.005096935084467041 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02251582754782313}}\n", + "Step = 1000 loss = 0.013000.\n", + "Step = 2000 loss = 0.005756.\n", + "Step = 3000 loss = 0.003280.\n", + "Step = 4000 loss = 0.002141.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.3027882255441003, while loss addition is 0.002140830969437957\n", + "solving equation:\n", + "0.07706791444825184 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008298344709265, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.14170350745076138 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0000842880167873, dim: 0.0} + -0.4962195386235582 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07184531558925104}}\n", + "Step = 1000 loss = 0.168120.\n", + "Step = 2000 loss = 0.067772.\n", + "Step = 3000 loss = 0.035033.\n", + "Step = 4000 loss = 0.020348.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 9.669736819573373, while loss addition is 0.02034788206219673\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "-0.0028646455767800836 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.01620544235825594 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0003280084055381, dim: 0.0} + 0.6064864887527163 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996114579905128, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03830638493764778}}\n", + "Step = 1000 loss = 0.082314.\n", + "Step = 2000 loss = 0.057605.\n", + "Step = 3000 loss = 0.045665.\n", + "Step = 4000 loss = 0.038303.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.505727834325677, while loss addition is 0.03830307722091675\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 7.907008006881454 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -1.9813628461059163 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07113889427568337}}\n", + "Step = 1000 loss = 0.756288.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 36.99870838055233, while loss addition is 0.7521856427192688\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0009074689671351, dim: 0.0} + -0.05769495854118309 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.016961055601504973 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.7708637393373213 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.027140197316849444}}\n", + "Step = 1000 loss = 0.071953.\n", + "Step = 2000 loss = 0.045621.\n", + "Step = 3000 loss = 0.035674.\n", + "Step = 4000 loss = 0.030556.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.197097352123773, while loss addition is 0.030556434765458107\n", + "solving equation:\n", + "-0.001739896923914301 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0009875818421683, dim: 0.0} + 0.0021872803942656373 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0000952143664463, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000410885210491, dim: 0.0} + 0.47779966696047454 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0007558729530357, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.047094551180908724}}\n", + "Step = 1000 loss = 0.076996.\n", + "Step = 2000 loss = 0.054555.\n", + "Step = 3000 loss = 0.044110.\n", + "Step = 4000 loss = 0.037405.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.281888168087045, while loss addition is 0.03740539774298668\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9993654780169857, dim: 0.0} + 0.9966913849772441 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -1.2850839444983548 * u{power: 1.0} * sin{power: 1.0, freq: 1.0005275519388912, dim: 0.0} + -0.00016528437996643497 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.036205765314100004}}\n", + "Step = 1000 loss = 0.000262.\n", + "Step = 2000 loss = 0.000149.\n", + "Step = 3000 loss = 0.000095.\n", + "Step = 4000 loss = 0.000064.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.13035571331375528, while loss addition is 6.429143832065165e-05\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.9921831537397393 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -1.2900357650392855 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.029842994513261743}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07223108141941993, while loss addition is 7.925616955617443e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008298344709265, dim: 0.0} + 0.0 * u{power: 1.0} + 0.14308520138589667 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.07245886164600983 * x_0{power: 1.0, dim: 0.0} + -0.4720946128539159 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0704723602421285}}\n", + "Step = 1000 loss = 0.101082.\n", + "Step = 2000 loss = 0.047780.\n", + "Step = 3000 loss = 0.023287.\n", + "Step = 4000 loss = 0.011436.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.427309375572065, while loss addition is 0.011435629799962044\n", + "solving equation:\n", + "1.0327475325386986 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.0026905747561516615 * x_0{power: 2.0, dim: 0.0} + 0.003844215278855854 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + -0.07489510864333196 * du/dx0{power: 1.0} + 0.7738256074760775 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023607846449418633}}\n", + "Step = 1000 loss = 0.154746.\n", + "Step = 2000 loss = 0.132088.\n", + "Step = 3000 loss = 0.129482.\n", + "Step = 4000 loss = 0.128432.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.932629656491429, while loss addition is 0.12843188643455505\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.014891658927305373 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9991069117710083, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0627582217441392}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.47784010827790824 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0012409667989641102 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.000857465812002, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.4950273450060168 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9991148208397588, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03638143465577531}}\n", + "Step = 1000 loss = 0.032665.\n", + "Step = 2000 loss = 0.029714.\n", + "Step = 3000 loss = 0.028389.\n", + "Step = 4000 loss = 0.027460.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.783140342016797, while loss addition is 0.027459869161248207\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.9924239475188741 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + 0.9905200703559413 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.99929325416395, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.034804472141388834}}\n", + "Step = 1000 loss = 0.000155.\n", + "Step = 2000 loss = 0.000075.\n", + "Step = 3000 loss = 0.000043.\n", + "Step = 4000 loss = 0.000028.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8923809274163459, while loss addition is 2.803864117595367e-05\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + -1.28665683200422 * u{power: 1.0} * sin{power: 1.0, freq: 0.999025939783322, dim: 0.0} + 0.0 * u{power: 1.0} + 0.9841619568719587 * u{power: 1.0} * cos{power: 1.0, freq: 1.000550551461995, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.00013305309338607074 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25295007375298645}}\n", + "Step = 1000 loss = 0.000271.\n", + "Step = 2000 loss = 0.000157.\n", + "Step = 3000 loss = 0.000098.\n", + "Step = 4000 loss = 0.000065.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.11829981866416783, while loss addition is 6.479575677076355e-05\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.999466739182568, dim: 0.0} + 0.9918958593616103 * u{power: 1.0} * cos{power: 1.0, freq: 0.9998112629439612, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + -1.2928468056716844 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0718274931680753}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07352435240919084, while loss addition is 9.072919965547044e-06\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.7657107129631627 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000289633492301, dim: 0.0} + 0.770445182304322 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + -0.0001487644052013959 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992199636461893, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0261215134702725}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.0952403921106568, while loss addition is 9.188472176901996e-06\n", + "solving equation:\n", + "0.3782677713712927 * u{power: 1.0} * du/dx0{power: 1.0} + -0.0041898269931806755 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.005383260935042669 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9995379251488964, dim: 0.0} + -0.6492138459677277 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021478561141997463}}\n", + "Step = 1000 loss = 0.056428.\n", + "Step = 2000 loss = 0.048474.\n", + "Step = 3000 loss = 0.045419.\n", + "Step = 4000 loss = 0.043210.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.2414363159591515, while loss addition is 0.04321034625172615\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.9918932006344064 * u{power: 1.0} * cos{power: 1.0, freq: 1.000767761195601, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992900102911381, dim: 0.0} + 0.0 * u{power: 1.0} + -1.2895697732389209 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991482550016808, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.032116834612249436}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07446776925823148, while loss addition is 9.48059641814325e-06\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.14308520138589667 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0006360414542053, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.07245886164600983 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.4720946128539159 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07586800856651196}}\n", + "Step = 1000 loss = 0.101082.\n", + "Step = 2000 loss = 0.047780.\n", + "Step = 3000 loss = 0.023287.\n", + "Step = 4000 loss = 0.011436.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.427309375572065, while loss addition is 0.011435629799962044\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25671887173520347}}\n", + "Step = 1000 loss = 0.592692.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 11.444680996907316, while loss addition is 0.5952538251876831\n", + "solving equation:\n", + "0.3749684128948087 * du/dx0{power: 1.0} * u{power: 1.0} + -0.019771809387618973 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.008373722120223005 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + -0.6534242849637866 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06959002975603559}}\n", + "Step = 1000 loss = 0.058503.\n", + "Step = 2000 loss = 0.050293.\n", + "Step = 3000 loss = 0.047031.\n", + "Step = 4000 loss = 0.044695.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.2654842637296064, while loss addition is 0.04469488561153412\n", + "solving equation:\n", + "-0.018660995813577833 * x_0{power: 1.0, dim: 0.0} + -0.014056749637439386 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.04907554682852285 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9992378406038669, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.07867917429058932 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023664331873711336}}\n", + "Step = 1000 loss = 0.125539.\n", + "Step = 2000 loss = 0.051566.\n", + "Step = 3000 loss = 0.037031.\n", + "Step = 4000 loss = 0.030854.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.863072802346169, while loss addition is 0.030854225158691406\n", "Multiobjective optimization : 5-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + -0.0025235732315622723 * x_0{power: 2.0, dim: 0.0} + -0.2071824190425867 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.2589875572817854 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000476187209835, dim: 0.0} + 0.16463120289488709 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03474935566122701}}\n", + "Step = 1000 loss = 0.255523.\n", + "Step = 2000 loss = 0.210726.\n", + "Step = 3000 loss = 0.143193.\n", + "Step = 4000 loss = 0.128730.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 20.917891985026213, while loss addition is 0.12873047590255737\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9991938580381823, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000550551461995, dim: 0.0} + 0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25371374643885675}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9999135991152718, dim: 0.0} + 2.6129840525884167 * du/dx0{power: 1.0} + -0.16340724630499204 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.999466739182568, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.062132825937999944 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.2268914759151136 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06560021207498869}}\n", + "Step = 1000 loss = 0.060564.\n", + "Step = 2000 loss = 0.025157.\n", + "Step = 3000 loss = 0.014521.\n", + "Step = 4000 loss = 0.010023.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.647359423327474, while loss addition is 0.010023276321589947\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0005604626371087, dim: 0.0} + 0.0 * u{power: 1.0} + -0.10431743331005502 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9998112629439612, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.11897665134495441 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07141974168660191}}\n", + "Step = 1000 loss = 0.279747.\n", + "Step = 2000 loss = 0.099224.\n", + "Step = 3000 loss = 0.031915.\n", + "Step = 4000 loss = 0.012341.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.253237599963915, while loss addition is 0.012341426685452461\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.0008710276096189175 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.1261273863187252 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.046830318710080296}}\n", + "Step = 1000 loss = 0.282016.\n", + "Step = 2000 loss = 0.069510.\n", + "Step = 3000 loss = 0.019528.\n", + "Step = 4000 loss = 0.007496.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.29615768589948, while loss addition is 0.007495936006307602\n", + "solving equation:\n", + "-5.705939789508198 * x_0{power: 1.0, dim: 0.0} + 25.851138199776493 * du/dx0{power: 1.0} + -0.7712787876972751 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000599692930118, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 7.840471539423163 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000659856480183, dim: 0.0} + 27.0717987300899 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.031057030675157096}}\n", + "Step = 1000 loss = 0.779728.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 7.579732459327367, while loss addition is 0.8610690832138062\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 20.61089300200343 * du/dx0{power: 1.0} + -34.73687154854754 * u{power: 1.0} + 1.5290990976098655 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.5840561498675267 * x_0{power: 2.0, dim: 0.0} + -25.102556981141 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02312392394819754}}\n", + "Step = 1000 loss = 1.411051.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.458637493108995, while loss addition is 1.1846338510513306\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0020914222649029077 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.015764499351099828 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.08859138692038288 * x_0{power: 1.0, dim: 0.0} + -0.5326080472180074 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03670302184122759}}\n", + "Step = 1000 loss = 0.110772.\n", + "Step = 2000 loss = 0.032401.\n", + "Step = 3000 loss = 0.015796.\n", + "Step = 4000 loss = 0.009431.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 11.19591944163214, while loss addition is 0.009431049227714539\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "-1.702653245467542 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + -7.269735633154265 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9998244354589872, dim: 0.0} + 28.657825257499287 * u{power: 1.0} + -0.6234967548421917 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000877089915498, dim: 0.0} + 28.475635978893948 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03709946938919954}}\n", + "Step = 1000 loss = 1.569567.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.32355559252901, while loss addition is 1.947150468826294\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0006370024603044, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0} + -0.9917403188148655 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009034282635207, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9994573489948375, dim: 0.0} + 0.9983849681029658 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0006846882054066, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07106685363839556}}\n", + "Step = 1000 loss = 0.000162.\n", + "Step = 2000 loss = 0.000080.\n", + "Step = 3000 loss = 0.000046.\n", + "Step = 4000 loss = 0.000030.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8577207054788971, while loss addition is 2.9998896934557706e-05\n", + "solving equation:\n", + "1.0195118513073371 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + -0.0022347460048780243 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9997010494980495, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.6900616184261064 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02977775514028131}}\n", + "Step = 1000 loss = 0.139269.\n", + "Step = 2000 loss = 0.120720.\n", + "Step = 3000 loss = 0.116387.\n", + "Step = 4000 loss = 0.113898.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.138732693030972, while loss addition is 0.11389833688735962\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.0559788262785935 * u{power: 1.0} + 0.0007458526677101875 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0001462384235293, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0005558442101314, dim: 0.0} + -0.7010687080187394 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022591548412602323}}\n", + "Step = 1000 loss = 0.070504.\n", + "Step = 2000 loss = 0.046159.\n", + "Step = 3000 loss = 0.036577.\n", + "Step = 4000 loss = 0.031395.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 23.76824944384673, while loss addition is 0.03139476850628853\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9992983420594567, dim: 0.0} + 2.6221910025714847 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.07950261639313408 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.20430570948912352 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021156805539005598}}\n", + "Step = 1000 loss = 0.182823.\n", + "Step = 2000 loss = 0.088607.\n", + "Step = 3000 loss = 0.049511.\n", + "Step = 4000 loss = 0.035781.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.063926167008288, while loss addition is 0.03578059747815132\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.0062353251076318755 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.6640634933983686 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991875404498936, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03605684166483257}}\n", + "Step = 1000 loss = 0.070262.\n", + "Step = 2000 loss = 0.048635.\n", + "Step = 3000 loss = 0.039327.\n", + "Step = 4000 loss = 0.033972.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 22.36071045476366, while loss addition is 0.03397199884057045\n", + "solving equation:\n", + "0.026893665159698134 * u{power: 1.0} + -0.029540037533301568 * du/dx0{power: 1.0} + -0.0012179764895727217 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.47818144043601163 * du/dx0{power: 1.0} * u{power: 1.0} + 0.5509071164529306 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9990027225528297, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03028843204056539}}\n", + "Step = 1000 loss = 0.037875.\n", + "Step = 2000 loss = 0.034463.\n", + "Step = 3000 loss = 0.033076.\n", + "Step = 4000 loss = 0.032211.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.7675038644911214, while loss addition is 0.032211363315582275\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + -0.06742895906092468 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 42.49688266936501 * du/dx0{power: 1.0} + 9.044839747291167 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000659856480183, dim: 0.0} + -7.866586653330813 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04758414135956619}}\n", + "Step = 1000 loss = 0.818426.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.874698704933547, while loss addition is 0.8656592965126038\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.24784013714413816 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000418095431953, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0} + -0.004786747375647993 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.2474961189991262 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.029138852518562013}}\n", + "Step = 1000 loss = 0.251457.\n", + "Step = 2000 loss = 0.077879.\n", + "Step = 3000 loss = 0.021883.\n", + "Step = 4000 loss = 0.006805.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.87285203063138, while loss addition is 0.0068053919821977615\n", + "solving equation:\n", + "-0.2085909100162216 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0} + -0.11255557446060634 * x_0{power: 2.0, dim: 0.0} + 1.4162766240646407 * x_0{power: 1.0, dim: 0.0} + -0.22179776953533323 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0008829367170493, dim: 0.0} + -3.6995226879353647 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.020813259806474554}}\n", + "Step = 1000 loss = 0.298899.\n", + "Step = 2000 loss = 0.197886.\n", + "Step = 3000 loss = 0.126785.\n", + "Step = 4000 loss = 0.081886.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 39.81599829213609, while loss addition is 0.08188556134700775\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0004090247432251, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.014891658927305373 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.035196149612324236}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "-0.9925552193765225 * u{power: 1.0} * sin{power: 1.0, freq: 1.0001241120243072, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.9889375462725319 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9990182162258029, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021649734594405952}}\n", + "Step = 1000 loss = 0.000155.\n", + "Step = 2000 loss = 0.000075.\n", + "Step = 3000 loss = 0.000043.\n", + "Step = 4000 loss = 0.000028.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8924445473609469, while loss addition is 2.7865760785061866e-05\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9999016926339899, dim: 0.0} + 0.0 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.25483115004378487}}\n", + "Step = 1000 loss = 0.164706.\n", + "Step = 2000 loss = 0.056474.\n", + "Step = 3000 loss = 0.020951.\n", + "Step = 4000 loss = 0.009988.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.536365374927264, while loss addition is 0.009988048113882542\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.00092302635154, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03338168968759993}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "1.033627428949668 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.002693036224533152 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.999466739182568, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + 0.6597837604963406 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07124923331829204}}\n", + "Step = 1000 loss = 0.130982.\n", + "Step = 2000 loss = 0.112210.\n", + "Step = 3000 loss = 0.109909.\n", + "Step = 4000 loss = 0.108607.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.991884705671709, while loss addition is 0.10860662162303925\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.10431743331005502 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.11897665134495441 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05120041859138231}}\n", + "Step = 1000 loss = 0.279747.\n", + "Step = 2000 loss = 0.099224.\n", + "Step = 3000 loss = 0.031915.\n", + "Step = 4000 loss = 0.012341.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.253237599963915, while loss addition is 0.012341426685452461\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + -0.772250141611195 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0} + 0.0 * u{power: 1.0} + 0.7691943789517184 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.00010670930108680476 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0005079563162693, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.029509352173166414}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.08755616996015125, while loss addition is 9.740448149386793e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0006759952708242, dim: 0.0} + 0.11641184810545885 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.07910364942555276 * x_0{power: 1.0, dim: 0.0} + -0.2938981772035265 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + -0.5045697164140114 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02186656361723488}}\n", + "Step = 1000 loss = 0.011038.\n", + "Step = 2000 loss = 0.001832.\n", + "Step = 3000 loss = 0.000804.\n", + "Step = 4000 loss = 0.000505.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.506799449938428, while loss addition is 0.0005051021580584347\n", + "solving equation:\n", + "0.1788680815621716 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0006990130607514, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.008289271537140538 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000738296114285, dim: 0.0} + 0.10492173849829474 * x_0{power: 1.0, dim: 0.0} + -0.6476656916262802 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03565509570277156}}\n", + "Step = 1000 loss = 0.058488.\n", + "Step = 2000 loss = 0.026397.\n", + "Step = 3000 loss = 0.017362.\n", + "Step = 4000 loss = 0.013409.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 19.06795496811919, while loss addition is 0.013409473933279514\n", + "solving equation:\n", + "-0.2899060466224581 * u{power: 1.0} + 0.04272691106145045 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.008085368444478287 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + -0.013803361533480614 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.3403023601767817 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02059683040835181}}\n", + "Step = 1000 loss = 0.090391.\n", + "Step = 2000 loss = 0.032352.\n", + "Step = 3000 loss = 0.012476.\n", + "Step = 4000 loss = 0.004562.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.938553605165343, while loss addition is 0.004562029615044594\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.9918768028246459 * u{power: 1.0} * cos{power: 1.0, freq: 1.000061081262492, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -1.2921808603625689 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990550220269443, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2544306141375621}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07397054457093415, while loss addition is 9.02751344256103e-06\n", + "solving equation:\n", + "0.004788975805774157 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0006624739564427, dim: 0.0} + 0.15880953481871554 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + -0.003904922833680318 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03578092502347982}}\n", + "Step = 1000 loss = 0.033280.\n", + "Step = 2000 loss = 0.019410.\n", + "Step = 3000 loss = 0.014240.\n", + "Step = 4000 loss = 0.010787.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 23.599958875526074, while loss addition is 0.010786520317196846\n", + "solving equation:\n", + "0.9942289977787157 * u{power: 1.0} * cos{power: 1.0, freq: 0.9990081410696675, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.000901192780644, dim: 0.0} + -1.296191488718308 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 4.48362924470036e-05 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06542363361916717}}\n", + "Step = 1000 loss = 0.000256.\n", + "Step = 2000 loss = 0.000145.\n", + "Step = 3000 loss = 0.000102.\n", + "Step = 4000 loss = 0.000060.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.1797453091575259, while loss addition is 6.04935412411578e-05\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999270637303784, dim: 0.0} + -0.004627488603572427 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0013905876906450516 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.999466739182568, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9998112629439612, dim: 0.0} + 0.0 * u{power: 1.0} + 0.4831770204575569 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9995218996889677, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07086062013905482}}\n", + "Step = 1000 loss = 0.072970.\n", + "Step = 2000 loss = 0.052552.\n", + "Step = 3000 loss = 0.042114.\n", + "Step = 4000 loss = 0.035339.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.362331786301194, while loss addition is 0.03533874452114105\n", "Multiobjective optimization : 6-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2565749587239569}}\n", + "Step = 1000 loss = 0.291552.\n", + "Step = 2000 loss = 0.079744.\n", + "Step = 3000 loss = 0.023927.\n", + "Step = 4000 loss = 0.010367.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.574989799120303, while loss addition is 0.010367344133555889\n", + "solving equation:\n", + "-0.7613180215006569 * u{power: 1.0} * cos{power: 1.0, freq: 0.9991407633654096, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.7658436562747326 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + 0.9941247324609566 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0002628402993003, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08298246472868197}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.0771572431097685, while loss addition is 9.568065252096858e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.8356118207626106 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.9996249537212885 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -0.45451406898537006 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.999641025192669, dim: 0.0} + -0.8374921166851753 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.00044174520216, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06329152600439804}}\n", + "Step = 1000 loss = 0.000146.\n", + "Step = 2000 loss = 0.000082.\n", + "Step = 3000 loss = 0.000045.\n", + "Step = 4000 loss = 0.000030.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.05786095239900652, while loss addition is 2.959571429528296e-05\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.5211283001989595 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + 6.04337696746387 * du/dx0{power: 1.0} + -0.5132232718960816 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999369082593108, dim: 0.0} + 2.9664429954101874 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.025255961821374982}}\n", + "Step = 1000 loss = 0.519730.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.065794403126586, while loss addition is 0.5162031054496765\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.000527243553206, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0000250246039968, dim: 0.0} + 43.27455603156956 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9991721355568639, dim: 0.0} + -2.4512789549954794 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24451943981511645}}\n", + "Step = 1000 loss = 0.809193.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 26.48968476424691, while loss addition is 0.7602835297584534\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.002446344287443562 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.0018872859356991734 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0003961974987254, dim: 0.0} + 0.5863059568417283 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9999336004679271, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03196189367792917}}\n", + "Step = 1000 loss = 0.085674.\n", + "Step = 2000 loss = 0.058713.\n", + "Step = 3000 loss = 0.045407.\n", + "Step = 4000 loss = 0.037364.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.71010235178851, while loss addition is 0.03736421838402748\n", + "solving equation:\n", + "7.907008006881454 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -1.9813628461059163 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08032456058018075}}\n", + "Step = 1000 loss = 0.756234.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 36.99669654055135, while loss addition is 0.7523428201675415\n", + "solving equation:\n", + "40.39736134657816 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.20115302583267072 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9996905558711882, dim: 0.0} + -2.8726122368278086 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000855440202874, dim: 0.0} + 16.87378964715409 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02522522818172687}}\n", + "Step = 1000 loss = 0.772540.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.214869774458716, while loss addition is 0.8133267164230347\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.48033414724817025 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9998890730748481, dim: 0.0} + 0.4967068193976616 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996466374775308, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24432147804751703}}\n", + "Step = 1000 loss = 0.034648.\n", + "Step = 2000 loss = 0.031536.\n", + "Step = 3000 loss = 0.030091.\n", + "Step = 4000 loss = 0.029148.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.785081237011693, while loss addition is 0.02914782613515854\n", + "solving equation:\n", + "1.0280417639314954 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9994481064456632, dim: 0.0} + 0.0 * u{power: 1.0} + -0.013663889841563526 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0009885574813294773 * du/dx0{power: 1.0} + 0.6431746156072567 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04137200695063648}}\n", + "Step = 1000 loss = 0.136996.\n", + "Step = 2000 loss = 0.116400.\n", + "Step = 3000 loss = 0.113735.\n", + "Step = 4000 loss = 0.112071.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.883419660718625, while loss addition is 0.11207128316164017\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0001202756449972, dim: 0.0} + 0.6227932718841535 * u{power: 1.0} + -0.2562254096505429 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000393262965317, dim: 0.0} + -0.02934433831054832 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07906874370186741}}\n", + "Step = 1000 loss = 0.125166.\n", + "Step = 2000 loss = 0.109302.\n", + "Step = 3000 loss = 0.107110.\n", + "Step = 4000 loss = 0.106766.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.2837355756933215, while loss addition is 0.10676632076501846\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.34177442618004295 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.02562617451511231 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002567273647396, dim: 0.0} + -0.0703468148629135 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02460727780238678}}\n", + "Step = 1000 loss = 0.026952.\n", + "Step = 2000 loss = 0.017664.\n", + "Step = 3000 loss = 0.013141.\n", + "Step = 4000 loss = 0.010243.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.852494680027725, while loss addition is 0.010242557153105736\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0033828420209456923 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.000942737114076, dim: 0.0} + 0.14157287979234795 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.17388198457134152 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022242667006570974}}\n", + "Step = 1000 loss = 0.090355.\n", + "Step = 2000 loss = 0.042917.\n", + "Step = 3000 loss = 0.021231.\n", + "Step = 4000 loss = 0.010803.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 11.03902575746506, while loss addition is 0.010803164914250374\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.9921831537397393 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992967203327513, dim: 0.0} + -1.2900357650392855 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02837099693207544}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07223108141941993, while loss addition is 7.925616955617443e-06\n", + "solving equation:\n", + "-0.057732857968616665 * u{power: 1.0} + 0.016662748197964816 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992526142518535, dim: 0.0} + -0.7718983125655592 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990246085322694, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04581257240758855}}\n", + "Step = 1000 loss = 0.071971.\n", + "Step = 2000 loss = 0.045620.\n", + "Step = 3000 loss = 0.035685.\n", + "Step = 4000 loss = 0.030520.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.191131666592774, while loss addition is 0.030519867315888405\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -0.9919801424879072 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000659856480183, dim: 0.0} + 0.9940889264594777 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000435081000999, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.037860193341461845}}\n", + "Step = 1000 loss = 0.000161.\n", + "Step = 2000 loss = 0.000093.\n", + "Step = 3000 loss = 0.000131.\n", + "Step = 4000 loss = 0.000029.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8726897014726273, while loss addition is 2.904380380641669e-05\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.9918888471710173 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + -1.290338084044087 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9991152478537982, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02098932767712756}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07423926311057613, while loss addition is 9.648359082348179e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.07842014652256799 * x_0{power: 1.0, dim: 0.0} + 0.14389897847622068 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.008151800252020752 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + -0.5009240037503044 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02155280897255517}}\n", + "Step = 1000 loss = 0.080800.\n", + "Step = 2000 loss = 0.031706.\n", + "Step = 3000 loss = 0.016106.\n", + "Step = 4000 loss = 0.010829.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 12.743617748585232, while loss addition is 0.010829402133822441\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9994511911485991, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999946983009786, dim: 0.0} + 42.31788399551951 * du/dx0{power: 1.0} + -0.18370280059504582 * x_0{power: 2.0, dim: 0.0} + 10.446048586593118 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000659856480183, dim: 0.0} + 0.5967767468630948 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.030188318619661363}}\n", + "Step = 1000 loss = 0.836492.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 22.257841576314068, while loss addition is 0.831069827079773\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 1.029498031972464 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0} + 0.0 * u{power: 1.0} + -0.08186474302083979 * du/dx0{power: 1.0} + 0.6439987480376954 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.046645870472798834}}\n", + "Step = 1000 loss = 0.140973.\n", + "Step = 2000 loss = 0.120591.\n", + "Step = 3000 loss = 0.117827.\n", + "Step = 4000 loss = 0.116140.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.832183013722308, while loss addition is 0.11614016443490982\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.6227932718841535 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008767008020476, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0002628402993003, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993306897792292, dim: 0.0} + -0.2562254096505429 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + -0.02934433831054832 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08031216803719872}}\n", + "Step = 1000 loss = 0.125166.\n", + "Step = 2000 loss = 0.109302.\n", + "Step = 3000 loss = 0.107110.\n", + "Step = 4000 loss = 0.106761.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.283650671293017, while loss addition is 0.10676111280918121\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0008584247318963, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0008855370585164, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023188007212062357}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.10431743331005502 * du/dx0{power: 1.0} * u{power: 1.0} + -0.11897665134495441 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04727470674109865}}\n", + "Step = 1000 loss = 0.279747.\n", + "Step = 2000 loss = 0.099224.\n", + "Step = 3000 loss = 0.031915.\n", + "Step = 4000 loss = 0.012341.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.253237599963915, while loss addition is 0.012341426685452461\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.26460614549752104 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992967203327513, dim: 0.0} + -0.0024569333476697746 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -0.13716788218648254 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.027821859218198124}}\n", + "Step = 1000 loss = 0.257435.\n", + "Step = 2000 loss = 0.082740.\n", + "Step = 3000 loss = 0.026683.\n", + "Step = 4000 loss = 0.012032.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.184944909736945, while loss addition is 0.01203156542032957\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "-0.04269306820870159 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9995925553049142, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.01363043251375209 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9994411428660006, dim: 0.0} + 0.0 * u{power: 1.0} + -0.03631127620253753 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03374971502903401}}\n", + "Step = 1000 loss = 0.122008.\n", + "Step = 2000 loss = 0.046434.\n", + "Step = 3000 loss = 0.031446.\n", + "Step = 4000 loss = 0.025019.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.931097316612988, while loss addition is 0.02501906268298626\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.9921077291276743 * u{power: 1.0} * cos{power: 1.0, freq: 1.0009768692250987, dim: 0.0} + -1.2872077941338709 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997684727633499, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2441362328174418}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07249850748781193, while loss addition is 9.107824553211685e-06\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.002944288295322434 * x_0{power: 2.0, dim: 0.0} + -0.023674592432281996 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9993359725422439, dim: 0.0} + -0.02600819172372868 * du/dx0{power: 1.0} + 0.09398676194138003 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03786495332653064}}\n", + "Step = 1000 loss = 0.145191.\n", + "Step = 2000 loss = 0.075426.\n", + "Step = 3000 loss = 0.059146.\n", + "Step = 4000 loss = 0.055832.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.741757187610782, while loss addition is 0.055831607431173325\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.000587296788116, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.9918526521093713 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -1.2946106647266606 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04782439078316838}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07471791812512772, while loss addition is 9.621608114684932e-06\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.13051194544246913 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.000518442942655, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000843379695958, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.11051299133795643 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.12673978435402988 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2459203166840856}}\n", + "Step = 1000 loss = 0.557640.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 17.008129315893203, while loss addition is 0.5579334497451782\n", + "solving equation:\n", + "1.0335934290583164 * u{power: 1.0} * cos{power: 1.0, freq: 1.0004756744273549, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -0.0031800894776073656 * x_0{power: 2.0, dim: 0.0} + -0.01577556628887154 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.5447331042705459 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.033451995858462764}}\n", + "Step = 1000 loss = 0.165169.\n", + "Step = 2000 loss = 0.145890.\n", + "Step = 3000 loss = 0.138706.\n", + "Step = 4000 loss = 0.135017.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.785470007671127, while loss addition is 0.1350165456533432\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0000285644142592, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.000300590895081, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.9925866645506131 * u{power: 1.0} * sin{power: 1.0, freq: 0.9990967616531459, dim: 0.0} + 0.9860015852877528 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9993344050031358, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.029141804917668605}}\n", + "Step = 1000 loss = 0.000157.\n", + "Step = 2000 loss = 0.000076.\n", + "Step = 3000 loss = 0.000053.\n", + "Step = 4000 loss = 0.000083.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.910055300898786, while loss addition is 8.256507862824947e-05\n", + "solving equation:\n", + "-0.06223896567645601 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9993501687014227, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.03972562775736383 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0007476732652747, dim: 0.0} + -0.017513304113022966 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0030918870699284946 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0222846316701916}}\n", + "Step = 1000 loss = 0.094203.\n", + "Step = 2000 loss = 0.036225.\n", + "Step = 3000 loss = 0.014267.\n", + "Step = 4000 loss = 0.005040.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.057569390973981, while loss addition is 0.005039863754063845\n", "Multiobjective optimization : 7-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9994794359590108, dim: 0.0} + 0.009008922694392258 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0002628402993003, dim: 0.0} + 0.26967975865967564 * u{power: 1.0} + -0.2887209576843819 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + 0.012519306575802136 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08297931632771391}}\n", + "Step = 1000 loss = 0.072682.\n", + "Step = 2000 loss = 0.067512.\n", + "Step = 3000 loss = 0.066966.\n", + "Step = 4000 loss = 0.066767.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.080629426639761, while loss addition is 0.0667669028043747\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -1.3082572210613796 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9992607147155012, dim: 0.0} + 0.6450758239695349 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06889470160339439}}\n", + "Step = 1000 loss = 0.076418.\n", + "Step = 2000 loss = 0.067416.\n", + "Step = 3000 loss = 0.064221.\n", + "Step = 4000 loss = 0.062369.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.112596195364355, while loss addition is 0.06236889958381653\n", + "solving equation:\n", + "0.0007539907743087961 * x_0{power: 2.0, dim: 0.0} + -0.05594247816172304 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0002359956971039, dim: 0.0} + -0.7001981229969874 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021308130624967652}}\n", + "Step = 1000 loss = 0.070516.\n", + "Step = 2000 loss = 0.046172.\n", + "Step = 3000 loss = 0.036584.\n", + "Step = 4000 loss = 0.031404.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 23.769027403986247, while loss addition is 0.03140420839190483\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.992093888032644 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + -1.2887603229336022 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021274053099513343}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07188686747439017, while loss addition is 8.599126886110753e-06\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.9918526521093713 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + -1.2946106647266606 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.048746648870882335}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07471791812512772, while loss addition is 9.621608114684932e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.05835824890691128 * u{power: 1.0} + -1.3105489934928636 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.637315135890835 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06464644248288999}}\n", + "Step = 1000 loss = 0.076134.\n", + "Step = 2000 loss = 0.066709.\n", + "Step = 3000 loss = 0.063267.\n", + "Step = 4000 loss = 0.061116.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.230553637514782, while loss addition is 0.06111552566289902\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0002087385058693, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.9920573700518855 * u{power: 1.0} * sin{power: 1.0, freq: 0.9994008793823961, dim: 0.0} + 0.9913974368933153 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000435081000999, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.039807285653034735}}\n", + "Step = 1000 loss = 0.000164.\n", + "Step = 2000 loss = 0.000081.\n", + "Step = 3000 loss = 0.000046.\n", + "Step = 4000 loss = 0.000030.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8786289302547553, while loss addition is 2.9566199373221025e-05\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + -0.10431743331005502 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999499949882903, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.11897665134495441 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08087833070575669}}\n", + "Step = 1000 loss = 0.279747.\n", + "Step = 2000 loss = 0.099224.\n", + "Step = 3000 loss = 0.031915.\n", + "Step = 4000 loss = 0.012341.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.253237599963915, while loss addition is 0.012341426685452461\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.9918526521093713 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9991274431398925, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + -1.2946106647266606 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.049187795914149596}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07471791812512772, while loss addition is 9.621608114684932e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.058036004095149606 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.6634081477325183 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0003455283152076, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08253947041746633}}\n", + "Step = 1000 loss = 0.070061.\n", + "Step = 2000 loss = 0.047233.\n", + "Step = 3000 loss = 0.037734.\n", + "Step = 4000 loss = 0.032410.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.09097540125339, while loss addition is 0.032409749925136566\n", + "solving equation:\n", + "6.062631430332547 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.009422619057554928 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24443230130539215}}\n", + "Step = 1000 loss = 0.539839.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 30.936165229836654, while loss addition is 0.5325209498405457\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990599486066488, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9991341739964719, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.00624818737690858 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + -0.6625768612708607 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03308036607985773}}\n", + "Step = 1000 loss = 0.070326.\n", + "Step = 2000 loss = 0.048683.\n", + "Step = 3000 loss = 0.039334.\n", + "Step = 4000 loss = 0.033978.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 22.415141995016125, while loss addition is 0.03397756814956665\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 1.2891944235730195 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002927799769696, dim: 0.0} + 0.9974470062528851 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001357656712089, dim: 0.0} + -1.2924502006190994 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023382896532139125}}\n", + "Step = 1000 loss = 0.000269.\n", + "Step = 2000 loss = 0.000139.\n", + "Step = 3000 loss = 0.000084.\n", + "Step = 4000 loss = 0.000072.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.1780360441110005, while loss addition is 7.188160816440359e-05\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9999741993243513, dim: 0.0} + 1.0310525354794673 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.00044174520216, dim: 0.0} + 0.0 * u{power: 1.0} + -0.00024911316347864254 * du/dx0{power: 1.0} + -0.013432415743211079 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.6439072415867677 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0630942000617119}}\n", + "Step = 1000 loss = 0.136753.\n", + "Step = 2000 loss = 0.116281.\n", + "Step = 3000 loss = 0.113618.\n", + "Step = 4000 loss = 0.112020.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.871973058398424, while loss addition is 0.11202035844326019\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9993600696758357, dim: 0.0} + 0.014891658927305373 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07976678673997872}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.107622352124688 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000435081000999, dim: 0.0} + -0.28490034636341244 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.999409088723351, dim: 0.0} + -0.016795755651377524 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0418062026797262}}\n", + "Step = 1000 loss = 0.042040.\n", + "Step = 2000 loss = 0.037534.\n", + "Step = 3000 loss = 0.037181.\n", + "Step = 4000 loss = 0.037041.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.9089903778405755, while loss addition is 0.03704134747385979\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0003455283152076, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 7.907008006881454 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -1.9813628461059163 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08103116329560509}}\n", + "Step = 1000 loss = 0.756272.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 37.012845141564085, while loss addition is 0.7523482441902161\n", + "solving equation:\n", + "-0.41974901237928713 * u{power: 1.0} + 0.15923186138277373 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0003895374791196, dim: 0.0} + 0.16680334721683016 * x_0{power: 1.0, dim: 0.0} + 0.017044518294450996 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.00914520890510398 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -1.0807222674156018 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023625688849108904}}\n", + "Step = 1000 loss = 0.090262.\n", + "Step = 2000 loss = 0.040506.\n", + "Step = 3000 loss = 0.024970.\n", + "Step = 4000 loss = 0.015359.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 10.39353795414253, while loss addition is 0.015359265729784966\n", + "solving equation:\n", + "0.1299095106457692 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001350567488827, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0} + -0.020710130221777274 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24541318353456693}}\n", + "Step = 1000 loss = 0.177363.\n", + "Step = 2000 loss = 0.077524.\n", + "Step = 3000 loss = 0.050162.\n", + "Step = 4000 loss = 0.033635.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.19771196245512, while loss addition is 0.03363480791449547\n", + "solving equation:\n", + "0.024148942474449462 * du/dx0{power: 1.0} + 0.018965673815353006 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.000646875997239, dim: 0.0} + -0.002001696548919317 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9995392462888972, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0001826944772696, dim: 0.0} + 0.0 * u{power: 1.0} + 0.47704978463389397 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000435081000999, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.048081149125839835}}\n", + "Step = 1000 loss = 0.075756.\n", + "Step = 2000 loss = 0.054094.\n", + "Step = 3000 loss = 0.043826.\n", + "Step = 4000 loss = 0.037229.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.142042033703296, while loss addition is 0.03722893074154854\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "-5.737011260187494 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9998071992574434, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9991274431398925, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0005986097491657, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9999385406974011, dim: 0.0} + 7.8474377722083455 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.7717909383364057 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023156767331512597}}\n", + "Step = 1000 loss = 0.732624.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.59074303072544, while loss addition is 0.7551468014717102\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 1.791791357476002 * u{power: 1.0} * sin{power: 1.0, freq: 1.0007966304463645, dim: 0.0} + -1.6149880556112106 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.047509639701500524}}\n", + "Step = 1000 loss = 0.523017.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 35.57769921502772, while loss addition is 0.52306067943573\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2448195500862925}}\n", + "Step = 1000 loss = 0.122399.\n", + "Step = 2000 loss = 0.046284.\n", + "Step = 3000 loss = 0.031117.\n", + "Step = 4000 loss = 0.025382.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.849336092457227, while loss addition is 0.025381609797477722\n", + "solving equation:\n", + "0.9921270744389858 * u{power: 1.0} * cos{power: 1.0, freq: 0.9991043704586571, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9992796417314715, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992967203327513, dim: 0.0} + -1.2926864239838354 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04098530893873792}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07453355639644833, while loss addition is 9.105770914175082e-06\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + -0.002471361670100449 * x_0{power: 2.0, dim: 0.0} + 0.26525260085593483 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002497530275887, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000435081000999, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.13787513522326486 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.040497566410361605}}\n", + "Step = 1000 loss = 0.258123.\n", + "Step = 2000 loss = 0.082810.\n", + "Step = 3000 loss = 0.026655.\n", + "Step = 4000 loss = 0.012003.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 15.196568118675655, while loss addition is 0.012002978473901749\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 1.2767209828917787 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9997406730229493, dim: 0.0} + 0.9917184118391029 * u{power: 1.0} * du/dx0{power: 1.0} + -1.2875868302492965 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0003455283152076, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07910402572761921}}\n", + "Step = 1000 loss = 0.000264.\n", + "Step = 2000 loss = 0.000196.\n", + "Step = 3000 loss = 0.000083.\n", + "Step = 4000 loss = 0.000055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.18960781107890057, while loss addition is 5.474721183418296e-05\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0001539527804055, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2439472087953479}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.03531060675854293 * du/dx0{power: 1.0} + -0.03939073799583344 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.02129733893496008 * x_0{power: 1.0, dim: 0.0} + 0.379198457314298 * u{power: 1.0} * du/dx0{power: 1.0} + -0.7836105452452271 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9998596707204838, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03330035532637996}}\n", + "Step = 1000 loss = 0.068666.\n", + "Step = 2000 loss = 0.059217.\n", + "Step = 3000 loss = 0.055439.\n", + "Step = 4000 loss = 0.052852.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.3613496864434973, while loss addition is 0.052851561456918716\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.07942828948310941 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + -0.2021714438187452 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9996939702552495, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 3.451584807871475 * u{power: 1.0} + 3.4289604353120464 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.048284032749896975}}\n", + "Step = 1000 loss = 0.643892.\n", + "Step = 2000 loss = 0.619559.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.738034111740963, while loss addition is 0.618874728679657\n", + "solving equation:\n", + "-3.159072411727584 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 4.178087060749122 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -1.2877837942425925 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992967203327513, dim: 0.0} + 5.3885432286066415 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03145833890045115}}\n", + "Step = 1000 loss = 0.000641.\n", + "Step = 2000 loss = 0.000392.\n", + "Step = 3000 loss = 0.000265.\n", + "Step = 4000 loss = 0.000189.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.6087095380638264, while loss addition is 0.00018942684982903302\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0008052526586058, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.999162776631529, dim: 0.0} + 0.10726241705601383 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + -0.2845365828370747 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9992061877201545, dim: 0.0} + 0.0 * u{power: 1.0} + -0.016680431146845098 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24582051111578643}}\n", + "Step = 1000 loss = 0.042125.\n", + "Step = 2000 loss = 0.037565.\n", + "Step = 3000 loss = 0.037204.\n", + "Step = 4000 loss = 0.037065.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.9213081496344904, while loss addition is 0.037064917385578156\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.04504640849728478 * du/dx0{power: 1.0} * u{power: 1.0} + -0.1689337634789101 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.026480274893622396 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + -0.13450440272981815 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.030919214320554077}}\n", + "Step = 1000 loss = 0.189644.\n", + "Step = 2000 loss = 0.128938.\n", + "Step = 3000 loss = 0.089050.\n", + "Step = 4000 loss = 0.057503.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 12.026922345132785, while loss addition is 0.05750323086977005\n", "Multiobjective optimization : 8-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 6.062631430332547 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9992967203327513, dim: 0.0} + 0.009422619057554928 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24544344146839342}}\n", + "Step = 1000 loss = 0.539846.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 30.945998022393955, while loss addition is 0.5325430631637573\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.7698188987319743 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0} + 0.7720669813590333 * du/dx0{power: 1.0} * u{power: 1.0} + -3.303675688215435e-05 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996430602007684, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.039762404575849175}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.09386188749669458, while loss addition is 9.756174222275149e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.05796655755693672 * u{power: 1.0} + 1.3277838161617612 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9990438553953849, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.6728669579598899 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02255289549327665}}\n", + "Step = 1000 loss = 0.100783.\n", + "Step = 2000 loss = 0.091992.\n", + "Step = 3000 loss = 0.088569.\n", + "Step = 4000 loss = 0.085881.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.261047605894553, while loss addition is 0.08588139712810516\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -0.01895579463582304 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.040455809442600826 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9991274431398925, dim: 0.0} + 0.00387896027509771 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04947899952271343}}\n", + "Step = 1000 loss = 0.094367.\n", + "Step = 2000 loss = 0.037184.\n", + "Step = 3000 loss = 0.015283.\n", + "Step = 4000 loss = 0.005680.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.547333283382972, while loss addition is 0.005679897964000702\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0004555569017575, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.0724588616460097 * x_0{power: 1.0, dim: 0.0} + 0.1430852013858967 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.47209461285391585 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06583552234556506}}\n", + "Step = 1000 loss = 0.101082.\n", + "Step = 2000 loss = 0.047780.\n", + "Step = 3000 loss = 0.023287.\n", + "Step = 4000 loss = 0.011436.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.427309375572065, while loss addition is 0.011435629799962044\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.9994969338763193 * u{power: 1.0} * du/dx0{power: 1.0} + 1.2873633387066101 * u{power: 1.0} * sin{power: 1.0, freq: 1.0000659856480183, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -1.2880704087341501 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000297444436924, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.039337725132748906}}\n", + "Step = 1000 loss = 0.000271.\n", + "Step = 2000 loss = 0.000140.\n", + "Step = 3000 loss = 0.000084.\n", + "Step = 4000 loss = 0.000055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.17194451229461988, while loss addition is 5.509952097781934e-05\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0003455283152076, dim: 0.0} + -0.0017150247282133268 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.47370739159584896 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996653640069806, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07932355023539635}}\n", + "Step = 1000 loss = 0.075684.\n", + "Step = 2000 loss = 0.052925.\n", + "Step = 3000 loss = 0.041910.\n", + "Step = 4000 loss = 0.035158.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.72379521136892, while loss addition is 0.035158321261405945\n", + "solving equation:\n", + "-1.714066749067528 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9997070056686738, dim: 0.0} + 0.07272930530114737 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 4.938089216166267 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0006276198293396, dim: 0.0} + 0.19824083324153743 = x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023777212055044537}}\n", + "Step = 1000 loss = 0.547799.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 42.02363216363668, while loss addition is 0.5477710962295532\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 1.317049474067846 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0005939749456352, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.6669862011472093 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07936809514522244}}\n", + "Step = 1000 loss = 0.096782.\n", + "Step = 2000 loss = 0.087296.\n", + "Step = 3000 loss = 0.083933.\n", + "Step = 4000 loss = 0.081102.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.205625250014204, while loss addition is 0.08110193908214569\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0001358932871025, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.0 * u{power: 1.0} + 0.10704828636998344 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.28432057282609985 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + -0.016611775530067097 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02460185024016826}}\n", + "Step = 1000 loss = 0.042175.\n", + "Step = 2000 loss = 0.037582.\n", + "Step = 3000 loss = 0.037219.\n", + "Step = 4000 loss = 0.037079.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.9286484379420763, while loss addition is 0.03707899525761604\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.16293263735448654 * x_0{power: 1.0, dim: 0.0} + 0.01647475219227143 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.3175894800902756 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + -0.9730719972216175 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0600293958688262}}\n", + "Step = 1000 loss = 0.058182.\n", + "Step = 2000 loss = 0.027401.\n", + "Step = 3000 loss = 0.021534.\n", + "Step = 4000 loss = 0.019822.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.6333524377360735, while loss addition is 0.019821591675281525\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.13914805910330846 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2449724556845033}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + -5.74260025335318 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9992167039288732, dim: 0.0} + 7.847526036695081 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0002625856085443, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.7609616124989689 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022424506758298376}}\n", + "Step = 1000 loss = 0.732860.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.598927489242122, while loss addition is 0.7555979490280151\n", + "solving equation:\n", + "-0.3960842980768151 * u{power: 1.0} + 0.15688087676487364 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0005381203869137, dim: 0.0} + 0.044412079232266755 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + -0.012786359523638269 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.9449618165148329 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0624990307786957}}\n", + "Step = 1000 loss = 0.076928.\n", + "Step = 2000 loss = 0.028554.\n", + "Step = 3000 loss = 0.011147.\n", + "Step = 4000 loss = 0.004203.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.4425181755082725, while loss addition is 0.004202608950436115\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0000584031944244, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 0.13914805910330846 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.999939450785825, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.019572064790843714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07913431624335093}}\n", + "Step = 1000 loss = 0.055250.\n", + "Step = 2000 loss = 0.026317.\n", + "Step = 3000 loss = 0.014251.\n", + "Step = 4000 loss = 0.008811.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.88622918898008, while loss addition is 0.00881137140095234\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.017308305252491225 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.06946909495388143 * u{power: 1.0} + 0.1738376435673663 * x_0{power: 1.0, dim: 0.0} + -0.3155122309968879 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + -1.0397999637315816 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.024308195098712952}}\n", + "Step = 1000 loss = 0.067801.\n", + "Step = 2000 loss = 0.031770.\n", + "Step = 3000 loss = 0.025401.\n", + "Step = 4000 loss = 0.023651.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 2.8970246843103267, while loss addition is 0.023651188239455223\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.2491209779531707 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.24439729146676004 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06462971177652439}}\n", + "Step = 1000 loss = 0.243136.\n", + "Step = 2000 loss = 0.075268.\n", + "Step = 3000 loss = 0.021464.\n", + "Step = 4000 loss = 0.006883.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.26533082492448, while loss addition is 0.006882966961711645\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.0816992929298097 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 1.032560861699034 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.00044174520216, dim: 0.0} + 0.644765811390289 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0814048901752649}}\n", + "Step = 1000 loss = 0.140662.\n", + "Step = 2000 loss = 0.120374.\n", + "Step = 3000 loss = 0.117627.\n", + "Step = 4000 loss = 0.115948.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.821384864898555, while loss addition is 0.11594773828983307\n", + "solving equation:\n", + "-0.006205394955195675 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0006622129401601, dim: 0.0} + -1.3151533002298668 * u{power: 1.0} * sin{power: 1.0, freq: 0.9994388219315796, dim: 0.0} + 0.6379736494340873 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.049717188875272544}}\n", + "Step = 1000 loss = 0.075965.\n", + "Step = 2000 loss = 0.066989.\n", + "Step = 3000 loss = 0.063652.\n", + "Step = 4000 loss = 0.061627.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.1370094003193807, while loss addition is 0.06162676587700844\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9991274431398925, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.05807839048749517 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0004263416530421, dim: 0.0} + -0.6672762963289868 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.041004312167459034}}\n", + "Step = 1000 loss = 0.069903.\n", + "Step = 2000 loss = 0.047088.\n", + "Step = 3000 loss = 0.037567.\n", + "Step = 4000 loss = 0.032250.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 24.041947810386294, while loss addition is 0.032250165939331055\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "-0.038274223409630596 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.3873028448276306 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 2.923441716081384 * du/dx0{power: 1.0} + -0.14279236947975618 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9998478598402849, dim: 0.0} + 1.7438762169148083 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06185807025156036}}\n", + "Step = 1000 loss = 0.015542.\n", + "Step = 2000 loss = 0.004878.\n", + "Step = 3000 loss = 0.003219.\n", + "Step = 4000 loss = 0.002164.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 1.8802395111666268, while loss addition is 0.0021643906366080046\n", + "solving equation:\n", + "-0.0032507824913972725 * x_0{power: 2.0, dim: 0.0} + -0.20844048700944806 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + -0.008994416927727844 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.24980864622220125 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + -0.13993579467312092 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03035430320925492}}\n", + "Step = 1000 loss = 0.321497.\n", + "Step = 2000 loss = 0.117086.\n", + "Step = 3000 loss = 0.059823.\n", + "Step = 4000 loss = 0.043973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 13.632718996864279, while loss addition is 0.04397294670343399\n", + "solving equation:\n", + "2.8152682926628994 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.20579875002222317 * x_0{power: 1.0, dim: 0.0} + -0.15478738295184813 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0007633971619982, dim: 0.0} + 0.05646384442826264 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + 1.466572006753893 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05119115020328511}}\n", + "Step = 1000 loss = 0.020332.\n", + "Step = 2000 loss = 0.009289.\n", + "Step = 3000 loss = 0.005826.\n", + "Step = 4000 loss = 0.004098.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 1.7918548613714858, while loss addition is 0.004097720608115196\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0009067151008533, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0004047256575637, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24526509399229005}}\n", + "Step = 1000 loss = 0.162288.\n", + "Step = 2000 loss = 0.055793.\n", + "Step = 3000 loss = 0.020853.\n", + "Step = 4000 loss = 0.010106.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.363113259911746, while loss addition is 0.0101064033806324\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "1.301642601275773 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.999094390838924, dim: 0.0} + 0.9929607982329164 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9996687713683586, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9991274431398925, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.00027353988959677444 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06360187248491235}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.08874817376913703, while loss addition is 9.612417670723516e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.9919514240039421 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -1.293748509820688 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993374790208489, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04942208085733874}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07353857800566864, while loss addition is 9.299337762058713e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9995432805955173, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.00171784126954171 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.4767604473990076 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0004331900598753, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.019138906842586515}}\n", + "Step = 1000 loss = 0.075798.\n", + "Step = 2000 loss = 0.053070.\n", + "Step = 3000 loss = 0.042064.\n", + "Step = 4000 loss = 0.035338.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.660198528473327, while loss addition is 0.03533783555030823\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.9918046101972277 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + 0.9941834938048565 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0009200951387052, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02175880488165256}}\n", + "Step = 1000 loss = 0.000167.\n", + "Step = 2000 loss = 0.000084.\n", + "Step = 3000 loss = 0.000060.\n", + "Step = 4000 loss = 0.000032.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8587041600843162, while loss addition is 3.202289008186199e-05\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.9920763717107901 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.9954183968554098 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9998029960196677, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07932591519808187}}\n", + "Step = 1000 loss = 0.000159.\n", + "Step = 2000 loss = 0.000078.\n", + "Step = 3000 loss = 0.000045.\n", + "Step = 4000 loss = 0.000029.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8814922799529641, while loss addition is 2.884480454667937e-05\n", + "solving equation:\n", + "0.016403781154306567 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + -0.011229825355274146 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9998054413825541, dim: 0.0} + 0.0037038022719284554 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0007569529360025, dim: 0.0} + 0.4795848068216014 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9998311373156964, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03234207429381151}}\n", + "Step = 1000 loss = 0.072496.\n", + "Step = 2000 loss = 0.046365.\n", + "Step = 3000 loss = 0.033735.\n", + "Step = 4000 loss = 0.026828.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.892909876978727, while loss addition is 0.026827501133084297\n", + "solving equation:\n", + "0.0724588616460097 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.1430852013858967 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.47209461285391585 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07365190595758259}}\n", + "Step = 1000 loss = 0.101082.\n", + "Step = 2000 loss = 0.047780.\n", + "Step = 3000 loss = 0.023287.\n", + "Step = 4000 loss = 0.011436.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 16.427309375572065, while loss addition is 0.011435629799962044\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9993970849744858, dim: 0.0} + 0.989906137110987 * u{power: 1.0} * cos{power: 1.0, freq: 1.0009641951459598, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.000102644985817, dim: 0.0} + 1.2935749520124364 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9996838849892749, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -1.28040136629824 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24705756064074486}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.0638875199271428, while loss addition is 9.702535862743389e-06\n", "Multiobjective optimization : 9-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 10-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 11-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 12-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 13-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 14-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 15-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 16-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 17-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 18-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 19-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 20-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 21-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n" + "solving equation:\n", + "-1.3004502090963435 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 2.6826020201385927 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9995706094806313, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0003309551443484, dim: 0.0} + 0.0 * u{power: 1.0} + 0.011369696603551577 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04039998871569154}}\n", + "Step = 1000 loss = 1.056034.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.1384434611234065, while loss addition is 1.0495656728744507\n", + "solving equation:\n", + "-0.24069833672103522 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.04087882710696346 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.13214081265945216 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0004748491072657, dim: 0.0} + 0.6346099022858119 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0001635988750188, dim: 0.0} + 0.15615274692861922 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02420633333546427}}\n", + "Step = 1000 loss = 0.216649.\n", + "Step = 2000 loss = 0.150425.\n", + "Step = 3000 loss = 0.098445.\n", + "Step = 4000 loss = 0.057193.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.28617575770546, while loss addition is 0.0571933351457119\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990070456058737, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9995250773269212, dim: 0.0} + 0.0 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24581893040515507}}\n", + "Step = 1000 loss = 0.161607.\n", + "Step = 2000 loss = 0.055601.\n", + "Step = 3000 loss = 0.020829.\n", + "Step = 4000 loss = 0.010099.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.254482290066992, while loss addition is 0.01009858027100563\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0001808555460427, dim: 0.0} + -0.004109438138858109 * x_0{power: 2.0, dim: 0.0} + -0.17042939754528402 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.11074116635231843 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + 0.013235608777592556 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0340389074093836}}\n", + "Step = 1000 loss = 0.147575.\n", + "Step = 2000 loss = 0.083161.\n", + "Step = 3000 loss = 0.057465.\n", + "Step = 4000 loss = 0.038479.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.294837928369198, while loss addition is 0.038478534668684006\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.7370656067300055 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9998664267060112, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 10.71619693569602 * u{power: 1.0} + -11.965174326627672 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + -0.1988525360814395 = x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0238323541989093}}\n", + "Step = 1000 loss = 1.053201.\n", + "Step = 2000 loss = 1.049055.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.86178846143434, while loss addition is 1.0506373643875122\n", + "solving equation:\n", + "-0.6226104052974143 * u{power: 1.0} + 0.20031679107179898 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.999963147657132, dim: 0.0} + 0.0064066560719441 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0006652057884857, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + -0.01581592029609585 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.045092074079162846}}\n", + "Step = 1000 loss = 0.118498.\n", + "Step = 2000 loss = 0.079955.\n", + "Step = 3000 loss = 0.053182.\n", + "Step = 4000 loss = 0.032765.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 5.5094733164218, while loss addition is 0.032764580100774765\n", + "solving equation:\n", + "0.04005180752675567 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + -0.017778206161633406 * x_0{power: 2.0, dim: 0.0} * u{power: 1.0} + -0.06079763347640613 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9997122891758284, dim: 0.0} + 0.0034485143161285606 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02226986811197696}}\n", + "Step = 1000 loss = 0.093551.\n", + "Step = 2000 loss = 0.035680.\n", + "Step = 3000 loss = 0.013936.\n", + "Step = 4000 loss = 0.004899.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.06227913811951, while loss addition is 0.004898805171251297\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.7671539048955188 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + 0.7656401543809437 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 7.072333739643888e-06 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0002272944064605, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.039996988977074446}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.08611624868165434, while loss addition is 9.332351510238368e-06\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "4.021686487646299 * du/dx0{power: 1.0} + 1.7165691155661487 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.000565226817999, dim: 0.0} + -0.0943566005874501 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0003667883354161, dim: 0.0} + -0.001807078705843677 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02314100708967871}}\n", + "Step = 1000 loss = 0.366235.\n", + "Step = 2000 loss = 0.049817.\n", + "Step = 3000 loss = 0.006415.\n", + "Step = 4000 loss = 0.002706.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 11.120883692578337, while loss addition is 0.002706328174099326\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.06576318298547892 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0003455283152076, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.015341523080443509 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.4148393537527632 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08052940002732017}}\n", + "Step = 1000 loss = 0.155620.\n", + "Step = 2000 loss = 0.047051.\n", + "Step = 3000 loss = 0.015374.\n", + "Step = 4000 loss = 0.007549.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.967731207227068, while loss addition is 0.00754914153367281\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24512800462866835}}\n", + "Step = 1000 loss = 0.122399.\n", + "Step = 2000 loss = 0.046284.\n", + "Step = 3000 loss = 0.031117.\n", + "Step = 4000 loss = 0.025382.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 8.849336092457227, while loss addition is 0.025381609797477722\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993374790208489, dim: 0.0} + -4.41848174169014 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 21.764264528336742 * u{power: 1.0} * sin{power: 1.0, freq: 0.9991412060832743, dim: 0.0} + 9.274162369852151 = du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.061679142509413046}}\n", + "Step = 1000 loss = 0.749521.\n", + "Step = 2000 loss = 0.759087.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 10.395418241006395, while loss addition is 0.7560441493988037\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9997496737948767, dim: 0.0} + 0.0 * u{power: 1.0} + 0.014891658927305373 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + -0.0050128837681541714 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03890872310984797}}\n", + "Step = 1000 loss = 0.115687.\n", + "Step = 2000 loss = 0.033332.\n", + "Step = 3000 loss = 0.010100.\n", + "Step = 4000 loss = 0.004973.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.571401701965046, while loss addition is 0.004972618073225021\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.056058704756324615 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 0.9990852415513995, dim: 0.0} + 0.0008172527759745772 * x_0{power: 2.0, dim: 0.0} + -0.03100964326376943 * u{power: 1.0} * sin{power: 1.0, freq: 0.9991901459148603, dim: 0.0} + -0.6870089833043439 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9997043526450603, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.022256609270367315}}\n", + "Step = 1000 loss = 0.070557.\n", + "Step = 2000 loss = 0.047360.\n", + "Step = 3000 loss = 0.038209.\n", + "Step = 4000 loss = 0.033114.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 19.38647867628163, while loss addition is 0.03311393782496452\n", + "solving equation:\n", + "-28.601345194521304 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 29.85759810446051 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993374790208489, dim: 0.0} + 0.0 * u{power: 1.0} + -0.08191105826358791 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 37.9457422190769 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.046816734918584235}}\n", + "Step = 1000 loss = 0.009575.\n", + "Step = 2000 loss = 0.007579.\n", + "Step = 3000 loss = 0.006772.\n", + "Step = 4000 loss = 0.005814.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.2954929138605888, while loss addition is 0.005814261268824339\n", + "solving equation:\n", + "-0.005100746442368773 * du/dx0{power: 1.0} + -0.011132053718072835 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 1.0289810114701003 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0006644228124928, dim: 0.0} + 0.0 * u{power: 1.0} + -0.0006077167246481933 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 1.0001136842959095, dim: 0.0} + 0.6380540063311015 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023556236252937502}}\n", + "Step = 1000 loss = 0.137770.\n", + "Step = 2000 loss = 0.117226.\n", + "Step = 3000 loss = 0.114564.\n", + "Step = 4000 loss = 0.113015.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.922037017168345, while loss addition is 0.11301453411579132\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + -0.05772115138597717 * u{power: 1.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * du/dx0{power: 1.0} + 0.0167552368844561 * x_0{power: 1.0, dim: 0.0} + -0.7715723906370626 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993374790208489, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.023296166102013616}}\n", + "Step = 1000 loss = 0.071963.\n", + "Step = 2000 loss = 0.045624.\n", + "Step = 3000 loss = 0.035674.\n", + "Step = 4000 loss = 0.030513.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.196577831015773, while loss addition is 0.030513446778059006\n", + "solving equation:\n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.9922444543612746 * u{power: 1.0} * sin{power: 1.0, freq: 0.9996388854957587, dim: 0.0} + 0.9905899072073858 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9999649587849929, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04826886397357628}}\n", + "Step = 1000 loss = 0.000159.\n", + "Step = 2000 loss = 0.000089.\n", + "Step = 3000 loss = 0.000044.\n", + "Step = 4000 loss = 0.000028.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.8903219292841019, while loss addition is 2.8482538255047984e-05\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.0041630316338083 * x_0{power: 2.0, dim: 0.0} * cos{power: 1.0, freq: 0.9991274431398925, dim: 0.0} + 1.0103134603677864 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9990175959700729, dim: 0.0} + -0.005050151184488927 * x_0{power: 2.0, dim: 0.0} + 0.8434382960647181 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04156341969426353}}\n", + "Step = 1000 loss = 0.177728.\n", + "Step = 2000 loss = 0.140188.\n", + "Step = 3000 loss = 0.132932.\n", + "Step = 4000 loss = 0.129741.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 6.321576713568408, while loss addition is 0.12974059581756592\n", + "solving equation:\n", + "1.9291199720028347 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0004772636281185, dim: 0.0} + -0.10689916791967845 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 0.9998270702837013, dim: 0.0} + 3.761669406586614 * du/dx0{power: 1.0} + -0.6770351879366114 * x_0{power: 1.0, dim: 0.0} + 4.254903596250068 = du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0647403272974263}}\n", + "Step = 1000 loss = 0.258079.\n", + "Step = 2000 loss = 0.041136.\n", + "Step = 3000 loss = 0.006450.\n", + "Step = 4000 loss = 0.002932.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.356743349911138, while loss addition is 0.002931589027866721\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "1.322561200769392 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.9995962447733019, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 2.0, dim: 0.0} + -0.6644066021357896 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.08979153375381366}}\n", + "Step = 1000 loss = 0.095560.\n", + "Step = 2000 loss = 0.086293.\n", + "Step = 3000 loss = 0.082940.\n", + "Step = 4000 loss = 0.080081.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.189596396557459, while loss addition is 0.0800807923078537\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24683468927311508}}\n", + "Step = 1000 loss = 0.161607.\n", + "Step = 2000 loss = 0.055601.\n", + "Step = 3000 loss = 0.020829.\n", + "Step = 4000 loss = 0.010099.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.254482290066992, while loss addition is 0.01009858027100563\n", + "solving equation:\n", + "1.0351164008244305 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007910977049115, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.0003112408834505, dim: 0.0} + -0.08155960648371852 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 0.9997438287252872, dim: 0.0} + 0.6453926103229294 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06356975958477099}}\n", + "Step = 1000 loss = 0.140397.\n", + "Step = 2000 loss = 0.120205.\n", + "Step = 3000 loss = 0.117454.\n", + "Step = 4000 loss = 0.115753.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 4.8120901850388424, while loss addition is 0.11575332283973694\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0008304339466179, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.9921831537397393 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + -1.2900357650392855 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02947616570466715}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.07223108141941993, while loss addition is 7.925616955617443e-06\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.2491209779531707 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + -0.24439729146676004 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0803149926566774}}\n", + "Step = 1000 loss = 0.243136.\n", + "Step = 2000 loss = 0.075268.\n", + "Step = 3000 loss = 0.021464.\n", + "Step = 4000 loss = 0.006883.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.26533082492448, while loss addition is 0.006882966961711645\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0008121751645507, dim: 0.0} + 0.016755236884456085 * x_0{power: 1.0, dim: 0.0} + -0.05772115138597711 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.7715723906370628 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 0.9993374790208489, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.051718778692616804}}\n", + "Step = 1000 loss = 0.071963.\n", + "Step = 2000 loss = 0.045624.\n", + "Step = 3000 loss = 0.035674.\n", + "Step = 4000 loss = 0.030513.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 25.196577831015773, while loss addition is 0.030513446778059006\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + -0.16856506500826723 * x_0{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.0002383617303825, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} * sin{power: 1.0, freq: 1.0001241582867575, dim: 0.0} + -0.12167896475486614 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24713115021783216}}\n", + "Step = 1000 loss = 0.188975.\n", + "Step = 2000 loss = 0.134243.\n", + "Step = 3000 loss = 0.093830.\n", + "Step = 4000 loss = 0.061244.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 10.40582517522412, while loss addition is 0.06124395132064819\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 0.999490286864639, dim: 0.0} + 0.015341523080443523 * du/dx0{power: 1.0} * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 1.0} + 0.06576318298547892 * x_0{power: 1.0, dim: 0.0} + -0.4148393537527631 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.07395583624019623}}\n", + "Step = 1000 loss = 0.155620.\n", + "Step = 2000 loss = 0.047051.\n", + "Step = 3000 loss = 0.015374.\n", + "Step = 4000 loss = 0.007549.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 14.967731207227068, while loss addition is 0.00754914153367281\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.2671136187045576 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000424429702002, dim: 0.0} + -0.0024621647162917817 * x_0{power: 2.0, dim: 0.0} + 0.12756825513426548 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.04098359295625319}}\n", + "Step = 1000 loss = 0.293411.\n", + "Step = 2000 loss = 0.071337.\n", + "Step = 3000 loss = 0.026259.\n", + "Step = 4000 loss = 0.014484.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 21.879071853524085, while loss addition is 0.014484263025224209\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.3789517477990886 * du/dx0{power: 1.0} * u{power: 1.0} + -0.645250055694077 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24451471840679423}}\n", + "Step = 1000 loss = 0.056629.\n", + "Step = 2000 loss = 0.049018.\n", + "Step = 3000 loss = 0.046059.\n", + "Step = 4000 loss = 0.044072.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 3.3157303255591524, while loss addition is 0.04407238960266113\n", + "solving equation:\n", + "0.7664811159478224 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.765960913160351 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.9992418342814514 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0009303579078952, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06419175002688814}}\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 0.08230094156972319, while loss addition is 9.826424502534792e-06\n", + "solving equation:\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.25117164679451837 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.000782852739645, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + 0.006576752247816488 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0781805891806318}}\n", + "Step = 1000 loss = 0.273726.\n", + "Step = 2000 loss = 0.065070.\n", + "Step = 3000 loss = 0.021249.\n", + "Step = 4000 loss = 0.009821.\n", + "solution shape (200, 1)\n", + "solution[..., eq_idx] (200,), eq_idx 0\n", + "fitness error is 18.004171450838395, while loss addition is 0.009820576757192612\n", + "The optimization has been conducted.\n" ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 22-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 23-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 24-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 25-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 26-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 27-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 28-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 29-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 30-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 31-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 32-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 33-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 34-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 35-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 36-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 37-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 38-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 39-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 40-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 41-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 42-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 43-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 44-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 45-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 46-th epoch.\n", - "During MO : processing 0-th weight.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 47-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 48-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 49-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "The optimization has been conducted.\n" - ] - } - ], - "source": [ - "factors_max_number = {'factors_num' : [1, 2], 'probas' : [0.65, 0.35]} # 1 factor with P = 0.65, 2 with P = 0.35\n", - "\n", - "epde_search_obj.fit(data=[x,], variable_names=['u',], max_deriv_order=(1,), derivs=[x_dot.reshape((-1, 1)),],\n", - " equation_terms_max_number=4, data_fun_pow = 1,\n", - " additional_tokens=[trig_tokens, grid_tokens],\n", - " equation_factors_max_number=factors_max_number,\n", - " eq_sparsity_interval=(1e-6, 1e-2))" - ] - }, - { - "cell_type": "markdown", - "id": "ed7998ee", - "metadata": {}, - "source": [ - "The discovered equations can be accessed with ``EpdeSearch.equations()`` method. If the ``only_print = True``, than the equations will be printed in their text forms. \n", - "\n", - "Otherwise, they will be return: if the followup argument ``only_str`` is ``True``, than the equations are returned only in their symbolic string-form. If ``only_str = False``, than the equation in its program implementation ``epde.structure.main_structures.SoEq`` are returned." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "4e30fe8f", - "metadata": {}, - "outputs": [ + } + ], + "source": [ + "factors_max_number = {'factors_num' : [1, 2], 'probas' : [0.5, 0.5]} # 1 factor with P = 0.65, 2 with P = 0.35\n", + "\n", + "epde_search_obj.fit(data=[x,], variable_names=['u',], max_deriv_order=(1,), #derivs=[x_dot.reshape((-1, 1)),],\n", + " equation_terms_max_number=6, data_fun_pow = 1,\n", + " additional_tokens=[trig_tokens, grid_tokens],\n", + " equation_factors_max_number=factors_max_number,\n", + " eq_sparsity_interval=(1e-2, 5*1e-1))" + ] + }, + { + "cell_type": "markdown", + "id": "ed7998ee", + "metadata": {}, + "source": [ + "The discovered equations can be accessed with ``EpdeSearch.equations()`` method. If the ``only_print = True``, than the equations will be printed in their text forms. \n", + "\n", + "Otherwise, they will be return: if the followup argument ``only_str`` is ``True``, than the equations are returned only in their symbolic string-form. If ``only_str = False``, than the equation in its program implementation ``epde.structure.main_structures.SoEq`` are returned." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "90405acb", + "metadata": {}, + "outputs": [ { "name": "stdout", "output_type": "stream", @@ -735,21 +3340,33 @@ "0-th non-dominated level\n", "\n", "\n", - "0.0 * x_0{power: 1.0, dim: 0.0} + 0.025217005432275823 * u{power: 1.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.6669677405245148 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0499000873630582, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 4}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005657086033085299}} , with objective function values of [5.89152992 2. ] \n", + "0.0 * x_0{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.0007642843460587, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.2565749587239569}} , with objective function values of [606.24215567 1. ] \n", "\n", - "0.00888228979551941 * x_0{power: 1.0, dim: 0.0} + 0.006798153402721441 * u{power: 1.0} * cos{power: 1.0, freq: 0.9974444910634812, dim: 0.0} + -1.0004120960660567 * u{power: 1.0} * sin{power: 1.0, freq: 1.0076995322921416, dim: 0.0} + 0.9973172059164307 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.006623183604285, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 4}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005319516674076823}} , with objective function values of [0.0147906 4. ] \n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.2491209779531707 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} + -0.24439729146676004 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.06462971177652439}} , with objective function values of [83.09500044 2. ] \n", "\n", - "0.0 * u{power: 1.0} + -0.999037871228366 * u{power: 1.0} * sin{power: 1.0, freq: 1.000001467098697, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 1.0076499665982352 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.0020254178539623, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 4}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.001500514159705181}} , with objective function values of [0.08473055 2.5 ] \n", + "0.0 * u{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.0 * x_0{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.0008304339466179, dim: 0.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * x_0{power: 1.0, dim: 0.0} + 0.9921831537397393 * u{power: 1.0} * cos{power: 1.0, freq: 0.9999180518018077, dim: 0.0} + -1.2900357650392855 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.000024490072661, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02947616570466715}} , with objective function values of [0.15148725 2.5 ] \n", + "\n", + "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * x_0{power: 2.0, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 0.9993543112082846, dim: 0.0} + 0.2491209779531707 * u{power: 1.0} * sin{power: 1.0, freq: 1.0009986411490692, dim: 0.0} + -0.24439729146676004 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.5, 0.5]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0803149926566774}} , with objective function values of [83.09500044 2. ] \n", "\n" ] } ], "source": [ - "epde_search_obj.equations(only_print = True)\n", - "res = epde_search_obj.equations(False, only_str = False)" + "epde_search_obj.equations()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "a39874c4", + "metadata": {}, + "outputs": [], + "source": [ + "res = epde_search_obj.equations(False)" ] }, { @@ -765,12 +3382,12 @@ "id": "045ca958", "metadata": {}, "source": [ - "We can use visual analysis of the Pareto-optimal set of candidate equations, placed in the complexity-quality criteria space, to select the optimal equation. Method ``EpdeSearch.visualize_solutions()`` illustrates the first non-dominated set of solutions (i.e. Pareto frontier). With its help we can see, that equations like $x' \\cdot \\sin{(1.001 \\; t)} = 9.997\\cdot 10^{-1} x \\cdot cos{(1.006 \\; t)} + -1.274$ (due to stochastic nature of evolutionary optimization) matches the knee point of a Pareto frontier curve." + "We can use visual analysis of the Pareto-optimal set of candidate equations, placed in the complexity-quality criteria space, to select the optimal equation. Method ``EpdeSearch.visualize_solutions()`` illustrates the first non-dominated set of solutions (i.e. Pareto frontier). With its help we can see, that equations like $x' \\cdot \\sin{(1.001 \\; t)} = 9.997\\cdot 10^{-1} x \\cdot cos{(1.006 \\; t)} + -1.274$ (may differ due to stochastic nature of evolutionary optimization) matches the knee point of a Pareto frontier curve. *Note* In some cases, only two equations are discovered, or if multiple simple equations are proposed, thus no knee point analysis." ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "id": "010064f1", "metadata": { "scrolled": false @@ -780,14 +3397,54 @@ "name": "stdout", "output_type": "stream", "text": [ - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot cos^{1.0}(1.05 x_{0.0}) = 2.522\\cdot 10^{-2} u + 6.67\\cdot 10^{-1} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot cos^{1.0}(1.007 x_{0.0}) = 8.882\\cdot 10^{-3} x_0 + 6.798\\cdot 10^{-3} u \\cdot cos^{1.0}(9.974\\cdot 10^{-1} x_{0.0}) + -1.0u \\cdot sin^{1.0}(1.008 x_{0.0}) + 9.973\\cdot 10^{-1} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot cos^{1.0}(1.002 x_{0.0}) = -9.99\\cdot 10^{-1} u \\cdot sin^{1.0}(1.0 x_{0.0}) + 1.008 \\end{eqnarray*}$\n" + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = -1.165u + 3.735\\cdot 10^{-1} x_0 \\cdot cos^{1.0}(1.0 x_{0.0}) + 1.896\\cdot 10^{-1} x_0 + -1.21 \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = -1.148u + 1.448\\cdot 10^{-2} (x_0)^{2.0} + 3.799\\cdot 10^{-1} x_0 \\cdot cos^{1.0}(1.0 x_{0.0}) + -6.791\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = -1.686\\cdot 10^{-1} x_0 \\cdot sin^{1.0}(1.0 x_{0.0}) + -1.217\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} u \\cdot \\frac{\\partial u}{\\partial x_0} = 1.295\\frac{\\partial u}{\\partial x_0} \\cdot cos^{1.0}(9.998\\cdot 10^{-1} x_{0.0}) + 9.908\\cdot 10^{-1} u \\cdot cos^{1.0}(1.001 x_{0.0}) + -1.283 \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot sin^{1.0}(9.998\\cdot 10^{-1} x_{0.0}) = 9.921\\cdot 10^{-1} u \\cdot cos^{1.0}(1.001 x_{0.0}) + -1.288 \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} u \\cdot \\frac{\\partial u}{\\partial x_0} = 0.0 \\end{eqnarray*}$\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "Failed to process string with tex because latex could not be found", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\texmanager.py:233\u001b[0m, in \u001b[0;36mTexManager._run_checked_subprocess\u001b[1;34m(self, command, tex, cwd)\u001b[0m\n\u001b[0;32m 232\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 233\u001b[0m report \u001b[38;5;241m=\u001b[39m \u001b[43msubprocess\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcheck_output\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 234\u001b[0m \u001b[43m \u001b[49m\u001b[43mcommand\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcwd\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtexcache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 235\u001b[0m \u001b[43m \u001b[49m\u001b[43mstderr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msubprocess\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSTDOUT\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 236\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mFileNotFoundError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\subprocess.py:424\u001b[0m, in \u001b[0;36mcheck_output\u001b[1;34m(timeout, *popenargs, **kwargs)\u001b[0m\n\u001b[0;32m 422\u001b[0m kwargs[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124minput\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m empty\n\u001b[1;32m--> 424\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m run(\u001b[38;5;241m*\u001b[39mpopenargs, stdout\u001b[38;5;241m=\u001b[39mPIPE, timeout\u001b[38;5;241m=\u001b[39mtimeout, check\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[0;32m 425\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\u001b[38;5;241m.\u001b[39mstdout\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\subprocess.py:505\u001b[0m, in \u001b[0;36mrun\u001b[1;34m(input, capture_output, timeout, check, *popenargs, **kwargs)\u001b[0m\n\u001b[0;32m 503\u001b[0m kwargs[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mstderr\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m PIPE\n\u001b[1;32m--> 505\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m Popen(\u001b[38;5;241m*\u001b[39mpopenargs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;28;01mas\u001b[39;00m process:\n\u001b[0;32m 506\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\subprocess.py:951\u001b[0m, in \u001b[0;36mPopen.__init__\u001b[1;34m(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags, restore_signals, start_new_session, pass_fds, user, group, extra_groups, encoding, errors, text, umask)\u001b[0m\n\u001b[0;32m 948\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstderr \u001b[38;5;241m=\u001b[39m io\u001b[38;5;241m.\u001b[39mTextIOWrapper(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstderr,\n\u001b[0;32m 949\u001b[0m encoding\u001b[38;5;241m=\u001b[39mencoding, errors\u001b[38;5;241m=\u001b[39merrors)\n\u001b[1;32m--> 951\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execute_child\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecutable\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpreexec_fn\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mclose_fds\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 952\u001b[0m \u001b[43m \u001b[49m\u001b[43mpass_fds\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43menv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 953\u001b[0m \u001b[43m \u001b[49m\u001b[43mstartupinfo\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreationflags\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mshell\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 954\u001b[0m \u001b[43m \u001b[49m\u001b[43mp2cread\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mp2cwrite\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 955\u001b[0m \u001b[43m \u001b[49m\u001b[43mc2pread\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mc2pwrite\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 956\u001b[0m \u001b[43m \u001b[49m\u001b[43merrread\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43merrwrite\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 957\u001b[0m \u001b[43m \u001b[49m\u001b[43mrestore_signals\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 958\u001b[0m \u001b[43m \u001b[49m\u001b[43mgid\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgids\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43muid\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mumask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 959\u001b[0m \u001b[43m \u001b[49m\u001b[43mstart_new_session\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 960\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[0;32m 961\u001b[0m \u001b[38;5;66;03m# Cleanup if the child failed starting.\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\subprocess.py:1420\u001b[0m, in \u001b[0;36mPopen._execute_child\u001b[1;34m(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, unused_restore_signals, unused_gid, unused_gids, unused_uid, unused_umask, unused_start_new_session)\u001b[0m\n\u001b[0;32m 1419\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1420\u001b[0m hp, ht, pid, tid \u001b[38;5;241m=\u001b[39m \u001b[43m_winapi\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mCreateProcess\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexecutable\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1421\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# no special security\u001b[39;49;00m\n\u001b[0;32m 1422\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 1423\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mint\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mclose_fds\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1424\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreationflags\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1425\u001b[0m \u001b[43m \u001b[49m\u001b[43menv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1426\u001b[0m \u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1427\u001b[0m \u001b[43m \u001b[49m\u001b[43mstartupinfo\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1428\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 1429\u001b[0m \u001b[38;5;66;03m# Child is launched. Close the parent's copy of those pipe\u001b[39;00m\n\u001b[0;32m 1430\u001b[0m \u001b[38;5;66;03m# handles that only the child should have open. You need\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1433\u001b[0m \u001b[38;5;66;03m# pipe will not close when the child process exits and the\u001b[39;00m\n\u001b[0;32m 1434\u001b[0m \u001b[38;5;66;03m# ReadFile will hang.\u001b[39;00m\n", + "\u001b[1;31mFileNotFoundError\u001b[0m: [WinError 2] The system cannot find the file specified", + "\nThe above exception was the direct cause of the following exception:\n", + "\u001b[1;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python39\\site-packages\\IPython\\core\\formatters.py:340\u001b[0m, in \u001b[0;36mBaseFormatter.__call__\u001b[1;34m(self, obj)\u001b[0m\n\u001b[0;32m 338\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n\u001b[0;32m 339\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 340\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mprinter\u001b[49m\u001b[43m(\u001b[49m\u001b[43mobj\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 341\u001b[0m \u001b[38;5;66;03m# Finally look for special method names\u001b[39;00m\n\u001b[0;32m 342\u001b[0m method \u001b[38;5;241m=\u001b[39m get_real_method(obj, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprint_method)\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python39\\site-packages\\IPython\\core\\pylabtools.py:152\u001b[0m, in \u001b[0;36mprint_figure\u001b[1;34m(fig, fmt, bbox_inches, base64, **kwargs)\u001b[0m\n\u001b[0;32m 149\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mmatplotlib\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mbackend_bases\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FigureCanvasBase\n\u001b[0;32m 150\u001b[0m FigureCanvasBase(fig)\n\u001b[1;32m--> 152\u001b[0m fig\u001b[38;5;241m.\u001b[39mcanvas\u001b[38;5;241m.\u001b[39mprint_figure(bytes_io, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkw)\n\u001b[0;32m 153\u001b[0m data \u001b[38;5;241m=\u001b[39m bytes_io\u001b[38;5;241m.\u001b[39mgetvalue()\n\u001b[0;32m 154\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m fmt \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124msvg\u001b[39m\u001b[38;5;124m'\u001b[39m:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\backend_bases.py:2295\u001b[0m, in \u001b[0;36mFigureCanvasBase.print_figure\u001b[1;34m(self, filename, dpi, facecolor, edgecolor, orientation, format, bbox_inches, pad_inches, bbox_extra_artists, backend, **kwargs)\u001b[0m\n\u001b[0;32m 2289\u001b[0m renderer \u001b[38;5;241m=\u001b[39m _get_renderer(\n\u001b[0;32m 2290\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfigure,\n\u001b[0;32m 2291\u001b[0m functools\u001b[38;5;241m.\u001b[39mpartial(\n\u001b[0;32m 2292\u001b[0m print_method, orientation\u001b[38;5;241m=\u001b[39morientation)\n\u001b[0;32m 2293\u001b[0m )\n\u001b[0;32m 2294\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mgetattr\u001b[39m(renderer, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m_draw_disabled\u001b[39m\u001b[38;5;124m\"\u001b[39m, nullcontext)():\n\u001b[1;32m-> 2295\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfigure\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdraw\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 2297\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m bbox_inches:\n\u001b[0;32m 2298\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m bbox_inches \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtight\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\artist.py:74\u001b[0m, in \u001b[0;36m_finalize_rasterization..draw_wrapper\u001b[1;34m(artist, renderer, *args, **kwargs)\u001b[0m\n\u001b[0;32m 72\u001b[0m \u001b[38;5;129m@wraps\u001b[39m(draw)\n\u001b[0;32m 73\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mdraw_wrapper\u001b[39m(artist, renderer, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m---> 74\u001b[0m result \u001b[38;5;241m=\u001b[39m draw(artist, renderer, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 75\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m renderer\u001b[38;5;241m.\u001b[39m_rasterizing:\n\u001b[0;32m 76\u001b[0m renderer\u001b[38;5;241m.\u001b[39mstop_rasterizing()\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\artist.py:51\u001b[0m, in \u001b[0;36mallow_rasterization..draw_wrapper\u001b[1;34m(artist, renderer)\u001b[0m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m artist\u001b[38;5;241m.\u001b[39mget_agg_filter() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 49\u001b[0m renderer\u001b[38;5;241m.\u001b[39mstart_filter()\n\u001b[1;32m---> 51\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mdraw\u001b[49m\u001b[43m(\u001b[49m\u001b[43martist\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 52\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m artist\u001b[38;5;241m.\u001b[39mget_agg_filter() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\figure.py:2845\u001b[0m, in \u001b[0;36mFigure.draw\u001b[1;34m(self, renderer)\u001b[0m\n\u001b[0;32m 2842\u001b[0m \u001b[38;5;66;03m# ValueError can occur when resizing a window.\u001b[39;00m\n\u001b[0;32m 2844\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpatch\u001b[38;5;241m.\u001b[39mdraw(renderer)\n\u001b[1;32m-> 2845\u001b[0m \u001b[43mmimage\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_draw_list_compositing_images\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 2846\u001b[0m \u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43martists\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msuppressComposite\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 2848\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m sfig \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msubfigs:\n\u001b[0;32m 2849\u001b[0m sfig\u001b[38;5;241m.\u001b[39mdraw(renderer)\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\image.py:132\u001b[0m, in \u001b[0;36m_draw_list_compositing_images\u001b[1;34m(renderer, parent, artists, suppress_composite)\u001b[0m\n\u001b[0;32m 130\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m not_composite \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m has_images:\n\u001b[0;32m 131\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m a \u001b[38;5;129;01min\u001b[39;00m artists:\n\u001b[1;32m--> 132\u001b[0m \u001b[43ma\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdraw\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 133\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 134\u001b[0m \u001b[38;5;66;03m# Composite any adjacent images together\u001b[39;00m\n\u001b[0;32m 135\u001b[0m image_group \u001b[38;5;241m=\u001b[39m []\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\artist.py:51\u001b[0m, in \u001b[0;36mallow_rasterization..draw_wrapper\u001b[1;34m(artist, renderer)\u001b[0m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m artist\u001b[38;5;241m.\u001b[39mget_agg_filter() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 49\u001b[0m renderer\u001b[38;5;241m.\u001b[39mstart_filter()\n\u001b[1;32m---> 51\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mdraw\u001b[49m\u001b[43m(\u001b[49m\u001b[43martist\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 52\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m artist\u001b[38;5;241m.\u001b[39mget_agg_filter() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\axes\\_base.py:3091\u001b[0m, in \u001b[0;36m_AxesBase.draw\u001b[1;34m(self, renderer)\u001b[0m\n\u001b[0;32m 3088\u001b[0m a\u001b[38;5;241m.\u001b[39mdraw(renderer)\n\u001b[0;32m 3089\u001b[0m renderer\u001b[38;5;241m.\u001b[39mstop_rasterizing()\n\u001b[1;32m-> 3091\u001b[0m \u001b[43mmimage\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_draw_list_compositing_images\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 3092\u001b[0m \u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43martists\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfigure\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msuppressComposite\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 3094\u001b[0m renderer\u001b[38;5;241m.\u001b[39mclose_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124maxes\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m 3095\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstale \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\image.py:132\u001b[0m, in \u001b[0;36m_draw_list_compositing_images\u001b[1;34m(renderer, parent, artists, suppress_composite)\u001b[0m\n\u001b[0;32m 130\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m not_composite \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m has_images:\n\u001b[0;32m 131\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m a \u001b[38;5;129;01min\u001b[39;00m artists:\n\u001b[1;32m--> 132\u001b[0m \u001b[43ma\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdraw\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 133\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 134\u001b[0m \u001b[38;5;66;03m# Composite any adjacent images together\u001b[39;00m\n\u001b[0;32m 135\u001b[0m image_group \u001b[38;5;241m=\u001b[39m []\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\artist.py:51\u001b[0m, in \u001b[0;36mallow_rasterization..draw_wrapper\u001b[1;34m(artist, renderer)\u001b[0m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m artist\u001b[38;5;241m.\u001b[39mget_agg_filter() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 49\u001b[0m renderer\u001b[38;5;241m.\u001b[39mstart_filter()\n\u001b[1;32m---> 51\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mdraw\u001b[49m\u001b[43m(\u001b[49m\u001b[43martist\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 52\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m artist\u001b[38;5;241m.\u001b[39mget_agg_filter() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\axis.py:1159\u001b[0m, in \u001b[0;36mAxis.draw\u001b[1;34m(self, renderer, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1156\u001b[0m renderer\u001b[38;5;241m.\u001b[39mopen_group(\u001b[38;5;18m__name__\u001b[39m, gid\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_gid())\n\u001b[0;32m 1158\u001b[0m ticks_to_draw \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_update_ticks()\n\u001b[1;32m-> 1159\u001b[0m ticklabelBoxes, ticklabelBoxes2 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_tick_bboxes\u001b[49m\u001b[43m(\u001b[49m\u001b[43mticks_to_draw\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1160\u001b[0m \u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1162\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m tick \u001b[38;5;129;01min\u001b[39;00m ticks_to_draw:\n\u001b[0;32m 1163\u001b[0m tick\u001b[38;5;241m.\u001b[39mdraw(renderer)\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\axis.py:1085\u001b[0m, in \u001b[0;36mAxis._get_tick_bboxes\u001b[1;34m(self, ticks, renderer)\u001b[0m\n\u001b[0;32m 1083\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_get_tick_bboxes\u001b[39m(\u001b[38;5;28mself\u001b[39m, ticks, renderer):\n\u001b[0;32m 1084\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Return lists of bboxes for ticks' label1's and label2's.\"\"\"\u001b[39;00m\n\u001b[1;32m-> 1085\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ([tick\u001b[38;5;241m.\u001b[39mlabel1\u001b[38;5;241m.\u001b[39mget_window_extent(renderer)\n\u001b[0;32m 1086\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m tick \u001b[38;5;129;01min\u001b[39;00m ticks \u001b[38;5;28;01mif\u001b[39;00m tick\u001b[38;5;241m.\u001b[39mlabel1\u001b[38;5;241m.\u001b[39mget_visible()],\n\u001b[0;32m 1087\u001b[0m [tick\u001b[38;5;241m.\u001b[39mlabel2\u001b[38;5;241m.\u001b[39mget_window_extent(renderer)\n\u001b[0;32m 1088\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m tick \u001b[38;5;129;01min\u001b[39;00m ticks \u001b[38;5;28;01mif\u001b[39;00m tick\u001b[38;5;241m.\u001b[39mlabel2\u001b[38;5;241m.\u001b[39mget_visible()])\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\axis.py:1085\u001b[0m, in \u001b[0;36m\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 1083\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_get_tick_bboxes\u001b[39m(\u001b[38;5;28mself\u001b[39m, ticks, renderer):\n\u001b[0;32m 1084\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Return lists of bboxes for ticks' label1's and label2's.\"\"\"\u001b[39;00m\n\u001b[1;32m-> 1085\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ([\u001b[43mtick\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlabel1\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_window_extent\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrenderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1086\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m tick \u001b[38;5;129;01min\u001b[39;00m ticks \u001b[38;5;28;01mif\u001b[39;00m tick\u001b[38;5;241m.\u001b[39mlabel1\u001b[38;5;241m.\u001b[39mget_visible()],\n\u001b[0;32m 1087\u001b[0m [tick\u001b[38;5;241m.\u001b[39mlabel2\u001b[38;5;241m.\u001b[39mget_window_extent(renderer)\n\u001b[0;32m 1088\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m tick \u001b[38;5;129;01min\u001b[39;00m ticks \u001b[38;5;28;01mif\u001b[39;00m tick\u001b[38;5;241m.\u001b[39mlabel2\u001b[38;5;241m.\u001b[39mget_visible()])\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\text.py:910\u001b[0m, in \u001b[0;36mText.get_window_extent\u001b[1;34m(self, renderer, dpi)\u001b[0m\n\u001b[0;32m 907\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mCannot get window extent w/o renderer\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m 909\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m cbook\u001b[38;5;241m.\u001b[39m_setattr_cm(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfigure, dpi\u001b[38;5;241m=\u001b[39mdpi):\n\u001b[1;32m--> 910\u001b[0m bbox, info, descent \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_layout\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_renderer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 911\u001b[0m x, y \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_unitless_position()\n\u001b[0;32m 912\u001b[0m x, y \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_transform()\u001b[38;5;241m.\u001b[39mtransform((x, y))\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\text.py:309\u001b[0m, in \u001b[0;36mText._get_layout\u001b[1;34m(self, renderer)\u001b[0m\n\u001b[0;32m 306\u001b[0m ys \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m 308\u001b[0m \u001b[38;5;66;03m# Full vertical extent of font, including ascenders and descenders:\u001b[39;00m\n\u001b[1;32m--> 309\u001b[0m _, lp_h, lp_d \u001b[38;5;241m=\u001b[39m \u001b[43mrenderer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_text_width_height_descent\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 310\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlp\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_fontproperties\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 311\u001b[0m \u001b[43m \u001b[49m\u001b[43mismath\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mTeX\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_usetex\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[0;32m 312\u001b[0m min_dy \u001b[38;5;241m=\u001b[39m (lp_h \u001b[38;5;241m-\u001b[39m lp_d) \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_linespacing\n\u001b[0;32m 314\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i, line \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(lines):\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\backends\\backend_agg.py:259\u001b[0m, in \u001b[0;36mRendererAgg.get_text_width_height_descent\u001b[1;34m(self, s, prop, ismath)\u001b[0m\n\u001b[0;32m 257\u001b[0m texmanager \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_texmanager()\n\u001b[0;32m 258\u001b[0m fontsize \u001b[38;5;241m=\u001b[39m prop\u001b[38;5;241m.\u001b[39mget_size_in_points()\n\u001b[1;32m--> 259\u001b[0m w, h, d \u001b[38;5;241m=\u001b[39m \u001b[43mtexmanager\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_text_width_height_descent\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43ms\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfontsize\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrenderer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m w, h, d\n\u001b[0;32m 263\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m ismath:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\texmanager.py:335\u001b[0m, in \u001b[0;36mTexManager.get_text_width_height_descent\u001b[1;34m(self, tex, fontsize, renderer)\u001b[0m\n\u001b[0;32m 333\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m tex\u001b[38;5;241m.\u001b[39mstrip() \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[0;32m 334\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m0\u001b[39m\n\u001b[1;32m--> 335\u001b[0m dvifile \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmake_dvi\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtex\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfontsize\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 336\u001b[0m dpi_fraction \u001b[38;5;241m=\u001b[39m renderer\u001b[38;5;241m.\u001b[39mpoints_to_pixels(\u001b[38;5;241m1.\u001b[39m) \u001b[38;5;28;01mif\u001b[39;00m renderer \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 337\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m dviread\u001b[38;5;241m.\u001b[39mDvi(dvifile, \u001b[38;5;241m72\u001b[39m \u001b[38;5;241m*\u001b[39m dpi_fraction) \u001b[38;5;28;01mas\u001b[39;00m dvi:\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\texmanager.py:271\u001b[0m, in \u001b[0;36mTexManager.make_dvi\u001b[1;34m(self, tex, fontsize)\u001b[0m\n\u001b[0;32m 262\u001b[0m \u001b[38;5;66;03m# Generate the dvi in a temporary directory to avoid race\u001b[39;00m\n\u001b[0;32m 263\u001b[0m \u001b[38;5;66;03m# conditions e.g. if multiple processes try to process the same tex\u001b[39;00m\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# string at the same time. Having tmpdir be a subdirectory of the\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 268\u001b[0m \u001b[38;5;66;03m# the absolute path may contain characters (e.g. ~) that TeX does\u001b[39;00m\n\u001b[0;32m 269\u001b[0m \u001b[38;5;66;03m# not support.)\u001b[39;00m\n\u001b[0;32m 270\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m TemporaryDirectory(\u001b[38;5;28mdir\u001b[39m\u001b[38;5;241m=\u001b[39mPath(dvifile)\u001b[38;5;241m.\u001b[39mparent) \u001b[38;5;28;01mas\u001b[39;00m tmpdir:\n\u001b[1;32m--> 271\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run_checked_subprocess\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 272\u001b[0m \u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlatex\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m-interaction=nonstopmode\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m--halt-on-error\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 273\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43mf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m../\u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43mtexfile\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtex\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtmpdir\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 274\u001b[0m (Path(tmpdir) \u001b[38;5;241m/\u001b[39m Path(dvifile)\u001b[38;5;241m.\u001b[39mname)\u001b[38;5;241m.\u001b[39mreplace(dvifile)\n\u001b[0;32m 275\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m dvifile\n", + "File \u001b[1;32mc:\\Users\\Mike\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\matplotlib\\texmanager.py:237\u001b[0m, in \u001b[0;36mTexManager._run_checked_subprocess\u001b[1;34m(self, command, tex, cwd)\u001b[0m\n\u001b[0;32m 233\u001b[0m report \u001b[38;5;241m=\u001b[39m subprocess\u001b[38;5;241m.\u001b[39mcheck_output(\n\u001b[0;32m 234\u001b[0m command, cwd\u001b[38;5;241m=\u001b[39mcwd \u001b[38;5;28;01mif\u001b[39;00m cwd \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtexcache,\n\u001b[0;32m 235\u001b[0m stderr\u001b[38;5;241m=\u001b[39msubprocess\u001b[38;5;241m.\u001b[39mSTDOUT)\n\u001b[0;32m 236\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mFileNotFoundError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m--> 237\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 238\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mFailed to process string with tex because \u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m could not be \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m 239\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfound\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mformat(command[\u001b[38;5;241m0\u001b[39m])) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mexc\u001b[39;00m\n\u001b[0;32m 240\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m subprocess\u001b[38;5;241m.\u001b[39mCalledProcessError \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[0;32m 241\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 242\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{prog}\u001b[39;00m\u001b[38;5;124m was not able to process the following string:\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m 243\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{tex!r}\u001b[39;00m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 247\u001b[0m tex\u001b[38;5;241m=\u001b[39mtex\u001b[38;5;241m.\u001b[39mencode(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124municode_escape\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[0;32m 248\u001b[0m exc\u001b[38;5;241m=\u001b[39mexc\u001b[38;5;241m.\u001b[39moutput\u001b[38;5;241m.\u001b[39mdecode(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mutf-8\u001b[39m\u001b[38;5;124m'\u001b[39m))) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mexc\u001b[39;00m\n", + "\u001b[1;31mRuntimeError\u001b[0m: Failed to process string with tex because latex could not be found" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA8oAAAGyCAYAAAA8gT2xAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABXTElEQVR4nO3df4wj933f/9dKXhwsQbezvP7jREJ9s1X/K9Bw9/pHEQeJb6gACnRBZXIXCOB/Eh+J9p8ELsrx5p9G/5Qimz/iv2LynPwjIOkux4YhIwJ8HPkL1EaBYklWRf9UOacisf2Plzu3hpyeFhK/f2xnPMPfy+WQ3N3nAzhInJ+f/czP93x+rfV6vZ4AAAAAAIAk6bllJwAAAAAAgFVCoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEfG7ZCVi0zz77TD/96U/10ksvaW1tbdnJAQAAU+j1evrFL36hX/u1X9Nzz/GdHwCQrBsXKP/0pz/VK6+8suxkAACAGfz93/+9Xn755WUnAwBwzd24QPmll16SdP6gvX37diL7ODs70+PHj/Xaa69pfX09kX3cdORx8sjj5JHHySOPk7XI/D09PdUrr7wSPscBAEjSjQuUg+rWt2/fTjRQfuGFF3T79m1ezBJCHiePPE4eeZw88jhZy8hfmk0BABaBRj4AAAAAAEQQKAMAAAAAEEGgDAAAAABABIEyAAAAAAARBMoAAAAAAESsVKCcyWQmLuO6rnK5nGq1mlzXlW3bchxnAakDAAAAANwEKzM8lOM4cl134nK+78t1XTmOI9M0Zdu2stnsAlIIAAAAALgJViJQ9n1f3W536uWfPHkiwzCSSxAAAAAA4MZa6/V6vWUnolaraXd3V5ubm5qUHMdxZFnWzIHy6empNjY29PTpU92+fXumbUxydnam9957T6+//rrW19cT2cc4X/3qV/X06dOF73eRer2ePv74Y7344otaW1tbdnKuJfI4eeRx8sjjZC0yfz/77DN9+OGHevXVV/XccyvVcgwAcA1sbGzonXfeCX8vvUTZdV1ZlnWhdQ4PD5VKpdTtdtXpdFQul0cu++zZMz179iz8fXp6Kuk8mD07O5st0RME201q+5P4vq/vf//7S9k3AAAAAFw1Dx48iP1eeqDs+75M05Tv+1Mtn06nJUmmaUo6L43O5XKq1+tDly+VSnrrrbcGpj9+/FgvvPDCbImeUqPRSHT7o3z88cdL2S8AAAAAXAdLrXpdq9WUz+clnQfM01S97hesd3JyMrQ69rAS5VdeeUU///nPE6163Wg0lMlkllL1+s0336REGQAAAACm9ODBA7377rvh76WVKLfbbe3s7Fx4PcdxYr1cB8Gx53lhaXPUrVu3dOvWrYHp6+vriQexi9jHMBdtJxb0JJ5Op8OSegAAAAC4qZYWKHe7XbXb7XBIqE6nI0mqVCoyTXPokE++7yuXy6nT6YQBXVBlmwBvNq7ryvd9WZalZrOpcrmsarW67GQBAAAAwNIsLVC2LCvWiVe73VatVlOxWAyneZ4nx3HCaYZhqFgsxoLiWq2mbDbLcFEz8H1fjUYj7AzNsiz5vi/btsd2kAYAAAAA19lKjK/gOI5KpZIkybbtsJTZdd2B0s39/X1VKpXw3/Hx8ciOvDCe67phXgcsy1KtVltSigAAAABg+VZiHOVFugnjKPc3RL+otbW1kZ2jAQAAAMB1szKdeWE11Go1pVIpSeftxoPq8N1uV81mU77vq9vtxnonv3//vlqt1tLSDAAAAABJIlC+wTKZjMrlcqy38EKhEP5/KpWSaZrKZDJhoOy6bhhYAwAAAMB1tBJtlLF4tm0rnU4PDKnVbDZlWVY43JbjOLFlgvGhAQAAAOC6IlC+oSqVivb29game56nQqEQVsE+ODiIlTI3m82h41UDAAAAwHVBoHwDBT1d9we87XZbvu+HY1j7vq92uz0wjFf0NwAAAABcN7RRvqGiY1EHSqVSbDguz/Niy7Xb7fC367pXImD2fT8c7io6RneU4ziSzjswM03zSvxdQJTjODJNU81mU5LCPgVwdV2nY8p9eLmmyX/cbFyjuK4ue/8jUL6BLMtSt9uNTQtugNGXMcMwYkNEHRwchKXQnucln9A5cF1Xx8fHunPnztD5nuep0WiEHwgymQw3f1wpvu+rVCqp1WrJNE1tbm5e6aAK1++Ych9erkn5D3CN4rq67P2Pqtc3VKvVkm3bqlQqqlQq6na7qtfrsWVM09TOzo4qlYocxwnbNNdqtSvz0pbNZrW1tTVyvuu6sY8BhmGEVdOH8X1/jqm7em7637+KDMMIh2vzPI+Xl2vguh3Ted6HuQdd3KT8TxrHbPVxjc7PTf/7V81l73+UKN9QpmmqXC5PXC5aFVvSQDB91XU6ndhXplQqNfImZ9v2VHl2ndVqNWWz2aFV97FctVpNjUbj2l2jN9lNOabT3oe5B19NPDcWJ7huokHtPHCNTo/z/XohUAb69FdLl85vfNHev6XzB9Lh4aHq9boajcZU265UKuEDzPf9gfYS4+bncjnt7e3JNM2Bh+CibsjFYlGFQmHgA0qSarWafN+XYRjqdDra39+f+BIwaZ1J8yuViqTzlwNp8IPRRU06VyadF9PI5/MyTVO2bS/0+MzKtu3wK28qlQo7ERxmmnM/OGaSdHx8PPCyNst5NA7HNFn99+Fh9+BJx3yYcetc9B6byWSmvvePs4hzaZmW8dy4iCSe5cvy8OFD7e3tjb2fzss01+ii35OCdMzzXn9RizzfF30PDI6ndP5+5HmeHj16tPLP0ssgUMaNtrW1FfsqGnRSEeV5nlqtVqy6ebvdVrPZlO/7QwPrYYKbU7Ad13VjN9NJ89vtdtiWPCqbzS60xCmXy6lSqSzkZlWpVJTP52M3yYcPH479eyetM2l+/xfxQqFwqRfiSefKNOfF8fHxwHp37twJj0HwUmBZlnK5nHK53EKr6/Z3/DeO7/u6f/++3n//fRmGoXa7re3tbfV6vZHrTDr3c7mcMplMmIe1Wi12HGc5j8a5Ccd0kSbdh4fdgycd82EmrXORe6zjOGOb6UxrEefSKpj1uXGRe8sskniWL1NS7wKzXKPLeE+a971+Vos435dxD7RtW7Zth2ksFArK5XIr/X50ab0b5unTpz1JvadPnya2j08++aT3ve99r/fJJ58kto9x3njjjaXsd1VVq9VeuVyOTTs5Oen1er1ep9PpZbPZcHo6nR5Yv1gs9jqdztBt1+v1oesMYxhGuN9A9BKcNL//b+j1zv+2ZZj2b74sy7KmmnaRdcbNPzk56VmWFTsOrVarJ2nkOTCtUefKpOM+SbVa7RWLxfC3aZq9Vqs1czpnkc/nL7Rs/7ncaDTGrjPu3O90Oj1JsTw8OTmJTZvlPJrGdT6m83aZ+3D/PXiaY95vmnWmvceenJz0qtXqhY7pJEmdS4Fh+b9oszw3LnJvuYx5Psuvqnleo1GLfE9K6l4/iyTP92XdAy3Lii1TLpd7hmFMleZxVun+1x9D0ZkXVkbQaZjrugNftCqVimq1Wvgvyvf92LrRr/yu66rRaKjRaMS2ub29Ld/3ZZqm9vb25DiOarWa9vf3B9Lluu6lv2h7nheWEA3b/qT5kgaqUrmuq52dndg0x3HkOI4KhYI8zwu/Fs67l3LTNNVut+e6zWEMw1Amkwm/ZE/ztXXSOpPmN5vNWH4F85LooGOa4z7J7u6u7t27J9d1Zdu2CoXCwBjpizovphG03/I8L/wbJ5WUjjv3g7+hv6MZSeHQSrOcR7NaxDEN7pHRe6Hv+9re3r50+lfxPtx/D57mmPebZp1p7rGSdHh4qN3d3aH7mad5nEvBssPyP3DdnhtJmuaYJHV9TrrG+vcpKXYsg2Vs2x5Ybt7X6Czm8Z40zb3+Opzvy7oHNhqNWEnt0dFRYjWdFnX/m+jCYfkVR4ny6jk5Oeml0+nwa2Sj0Yh9WbIsK/alMp1Ox0pX8vl8OL//q+dldTqdsV8jp/1S2mg0hn4FMwyjV6/XJ84flq7+ko5qtRp+ecvn82E+WJY1dBuXsajSiZOTk55pmj1JvWKxOFUJ+qR1LrrNer0+9ivttIadKxc97rNYxHlx0a/g9Xq912q1ep1Op5fP5yeWKPdvI3rMhn0l7/XOvzoHy81yHk1jGce00+mEeWeaZiwtlyk5WdX78LB78DTHfNh2LrLOsHtsr3eeL51OJyyJmZfrfH+I7uuiz41VK1GedEySuj57vdHX2Kh9BtPK5XLs2g7SOy+r9J406V5/Xc73Zd4DA8E5fdl3o2Bby7j/DdMfQ9FGGUtn23bYeYAk7ezsxNqLptPp2BfBnZ0dua4blrA0m82wjYVpmnNtixJ8SU1KKpVSt9sd2RFCML9fuVweaA+VSqXC7XieF3aqEW07EnxNC9oXDfsSOM0yqVQq7OhqlP5OPUbZ3t4eOdyYYRiybVuNRkOVSkWWZWl3d3dsxxGT1rnoNkulkqrV6kI7Axl13Gfd1jzOi3mIftEOrt9yuay7d+/q5ORkqm30n/tBel3XDb+G939tnuU8mrd5HdNguKhKpRIrZW40GspkMjNvd1Xvw8PuwdMc834XXWfYPTaaniRqmExrFe8P83puXFXBMUnq+pRGX2PBPmu1Wmyf3W5X6XRanU5HlmUlVjtqld6TJt3rr8v5vux74OHhoXzfVy6XW+hzVJrv/W8aBMo3xNra2kL20xvSIU9Q1WfYDcP3fdVqtdg8wzDCC69SqYTjiQY8z4uNiVYul5XL5bS1taV0Oj2w/GV4npfoTWDSxT5s/qiqPNEqM81mc+BF1fM8NRqN8MaXyWQGbu7TLCOd33APDg7Gpn0eHZvYtq1MJqN6vS7P85TL5bS9vT324TNpnYtsMwgexo0bPu78ntU8HwLzOC+ihn0ACTrYiMpkMiN7Xo1W5TIMQ77vy3XdiQH6qHO/0WjItm11u12lUqnwpS3470XPo1U+pkEeHRwcxDptaTabyuVyI9e7qvfhUffgScd8mGnXGXWe1Wq1sfeCfkmcR9Lq3R/m9dy47L3FcZyJzyVJ2t/fH2ieclnBMUni+gyMusaCfVar1dg+g+mu68aO66gmBbNapfekSff663S+L+MeKJ0/G6KdgW1uburJkydDz4FVfpZOi0D5hhgWwC6KaZojH0rNZlOGYQy9sNvtdqzkKeC67sDD4OTkRO12W7lcTo7jLGRohIsYdeMKvsROmh9VrVbHDp4+rB2KdJ5v/W1T+oOTaZaRFN5kkxS0Twn2bZqmWq2Wtre3Rx7jSeuk0+mpt+k4jra2tia+GI87vye5yHG/rMucF1HDPoBM2+PrqL/JMIyp2oeNO/ej94SgxGRnZ2em82jVj6nv+2q327Fj1P97WLqu23141DG/7DrDzrN2u33h4OIy51Gw/jCrdn+Y13PjMvcW6TwISvqcm+aYzPv6DIy7xjzPC0uW+9PleV5s2wcHB2OD9mW57HvSRe711+F8lxZ7D/R9X6VSKTbclmVZ4Yfuq/gsnQadeSFx6XR6bDW8YSd80FV8/00mCHaCC29rayu84aXTaRUKhbl+2QxuvvPYzqhgIKgSNW5+VP9Nut+wm7p0PubdnTt3wumpVGqgCtY0y0jnx2dcsC6d3/Cn+dffqUhg1FfqcVW6J60z7TaDPAuC5OBlY5hJ5/c4Fznul3WZ82Jegped/r/X9/2pgpBR537/1+/goR3k7UXPo1U/pv0d1LTb7fD3qKp0V/U+POoePO6YjzLtOsPOs263K9d1ValUVKlUZNu2pF91fjbMZc4j6ercH+b53Fh10xyTJK7PSdeY4zhhB3PRfTSbzaHvELu7u2q323O516/Ke9JF7vXX4Xxf9D3Q8zxVKpVYiW7wd4/a56o/S6dBoHxD+b4vx3GW0uttVHBziwp6Z7Qsa6CKRbVaDS863/eVzWZjD6R598A36QEwqgpIcEOJ2t/fjz3AHMeJlVZOmh/d9rBeHIP2T41GI3yx9X1/7INwmiosw5aZpqpVtVqd6t+oElvLsoY+yFutVuwrejSfJ60zzTbb7bba7bbS6XT4lb5Wq126BH1UXk973GeR5Hkxq3K5HKuO5jiOLMsKg65h105gVG/VuVwulofRKojTHPNZLeOYSvFq0dJ5CVE0/y5qle/Do+7B4455kP7+82jSOtF1+88zy7JULBbDf8HLd7FYnEsp5nW7P8z63FiWeT7L5319TnONHR0dhb09R59VjUYjVqLneV7YTjeoSXJZq/KeNOlef93O90XfA9PptIrFYmx6cG5f9n6/rGfpNKh6fQO5rhtWT2k2myMb7C9KvV4PO74IqlQELx7BvOBGXK/XwxuPYRi6d+9erFOFR48ezTVthmEMDZA8zwvbQ7Xbbdm2rXv37sU6SKhWq7Fu9IvFYqz04ejoKJbvk+YHTNMcSJNpmspkMnIcR48ePZJt2+F2gjRtbW3FHgZBJxRR0ywTpG3eeT1MvV5XqVTSnTt3ZPy/tqzRm/qwfJ60zrj5vu/r/v378n0/LDEKzDp4/aRzZdrjPot5nRfzlM1m1e12wwf48fFxrCOVYcc0+vcMux6r1ara7bY8z1On01G1Wo39DZPOiYta5jGVzvNhZ2cnvDfu7e2pVCpduA1t1Kreh0fdgycd82Hn0aR1AqPOs0C0LWzQJnLWF8XrcH9YtefGRSTxLJ/39TnNNba3t6dutztQguz7fqxENdqh07w+Zq3Se9K4e/11O9+XcQ/c39+PBd++7+v999+f+W9Y9rN0Kon1r72ibvrwUCcnJ71isRibVq/XB6bhV8rl8oWGr1lV/UO2RLviD7r3H7dM1DyH4MJyTXvMx1nUEC64ma7LPfgqWvZzg3vL1XBdrlHOd/THUGu93hJ7eVqC09NTbWxs6OnTp7p9+3Yi+zg7O9N7772n119/Xevr64nsY5wHDx7o3XffHTrPcRyVSqVYj6S+719oeJabKJfLzXXYqWWJfpFOpVKxr6itVkuGYYxcJhCUOi2yjQiSNemYTzKqSjQwL9flHnwVLfO5wb3l6rgu1yjn+83WH0MRKCdglQPlUdbW1nRycrKy7YeWrb96zk0V9Hp4mWqrAHBR3IOvLp4bNwPX6DnO96utP4aiM68bqFaryXEcOY6jWq0WdsIQNKYP5hUKhbAjI9u2l97x1zJFO4+6yWq1Gjd/AAvHPfjq4rlxM3CNnuN8v17ozOuGyWQyKpfLsV4Qg44eTNNUrVbT7u6uDMMIByav1+vKZDKxIQ5uopv+lVSavUMrALgs7sFXE8+Nm4NrlPP9uiFQvkFs246NfRmI9pIYDBsgnX8VDILoaK+00bYZQQ+KAAAAAHBdECjfIJVKJdaJV8DzPO3v70uKfw1sNpsDHTN4nqdGoxF2z36ZYTEAAAAAYBXRRvmGCAbs7i9NDgZn768uE7Qx6e/cy3Xd2DTDMGKDgQMAAADAVUegfIMMa19cKpWGDt7dPxh9EAx3Oh3duXMnnJ5KpWIDrwMAAADAVUegfENYlhX2ah0I2hrn8/nwdyaTkXTeJjmVSkk67+p+XDDcv10AAAAAuMpoo3yDtFot2bYdlggbhhFrg2yapjKZjBzH0aNHj2Tb9sC4eFtbW7GgOejQCwAAAACuCwLlG8Q0zbFju/X3iD2sSrZlWbJtO/zteR6deQEAAAC4VgiUcSGmaWpvb0+O46jb7Ya9ZQMAAADAdUGgjAtjQHkAAAAA1xmdeQEAAAAAEEGgDAAAAABABIEyAAAAAAARBMoAAAAAAETQmdc1tLGxoQcPHiw7GYnq9Xr6+OOP9eKLL2ptbW3ZybmWyOPkkcfJI4+Ttcj8/eyzz/Thhx/q1Vdf1XPP8Z0fADBfGxsbsd8EytfQO++8s+wkJO7s7EzvvfeeXn/9da2vry87OdcSeZw88jh55HGyFpm/p6en2tjY0NHRkW7fvp3ovgAA4JMsAAAAAAARBMoAAAAAAEQQKAMAAAAAEEGgDAAAAABABIEyAAAAAAARBMoAAAAAAEQQKAMAAAAAEEGgDAAAAABABIEyAAAAAAARBMoAAAAAAEQQKAMAAAAAEEGgDAAAAABABIEyAAAAAAARBMoAAAAAAER8btkJiMpkMmo0GhOXq1QqMgxDkuT7vorFYsIpu6BPPz3/r+NIX/iC9KUvSc8/v9w0AQAAAACmsjIlyo7jyHXdictVKhVJUj6fVz6fVzqdVqFQSDp50/vud6V/8S/O//+P/kj6nd+RvvjF8+kAAAAAgJW3EoGy7/vqdrtTLVsqlZTP58PflmWpVqsllbSL+e53pWxW+slP4tN/8pPz6QTLAAAAALDyViJQPjw81O7u7sTlPM+T7/thteuoaUqjE/Xpp9If/7HU6w3OC6b9yZ/8qlo2AAAAAGAlLb2Nsuu6sixrqmU9zxs63TAM+b4/dN6zZ8/07Nmz8Pfp6akk6ezsTGdnZxdL7Dg//rF0fCx9/vM6+/znz/fx//4b+vnPpf/6X6Xf/M357feGCo7dXI8hYsjj5JHHySOPk7XI/OUYAgAWaemBsu/7Mk1zZKA7jVQqNbLqdqlU0ltvvTUw/fHjx3rhhRdm3udQf/u3sZ+Nv/7rwWVOT6X33pvvfm+waTp/w+WQx8kjj5NHHidrEfn7y1/+MvF9AAAQWGqgXKvVYu2NZzWuffP+/r6+/vWvh79PT0/1yiuv6LXXXtPt27cvve/Qj38s/d7vSTovSW789V8r84d/qPV//Mf4cn/3d5Qoz8HZ2ZkajYYymYzW19eXnZxriTxOHnmcPPI4WYvM36BGGAAAi7C0QLndbmtnZ+dC65imOXR6UCo9zK1bt3Tr1q2B6evr6/N9qP/Wb0l37sQ68lr/x3/8VaC8tia9/PL5cgwVNTdzP44YQB4njzxOHnmcrEXkL8cPALBISwuUu92u2u122AlXp9ORdD78k2maymazA+uYpinDMOR53kBgPG0758Q8/7z0zW+e9269thafF/z+i78gSAYAAACAFbe0QNmyrFhw2263VavVVCwWw2me58lxnNi0/f19ua4bVtl2HGcu1bfn4s03JceRbDs+/eWXz4PkN99cSrIAAAAAANNbieGhHMdRqVSSJNm2HZYyu66rarUaW7ZYLMr3fTmOI8dxdHR0NLDMUr35pvS//tf5///VX0n/3/8nPXlCkAwAAAAAV8TSe72WpGw2O7SqdT6fH1paHC1hHrbe0gXVq7NZiTZVAAAAAHClrESJMgAAAAAAq4JAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACACAJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACAiM8tc+e+7+vw8FCS1Ol05HmeHj16JMMwRq7juq6q1aoymYxM01Sj0dC9e/eUzWYXlGoAAAAAwHW21BJl27ZlWZby+bzK5bJSqZRyudzYdXzfl+u6KhQKKhQK2traIkgGAAAAAMzNUgNlz/PkOE74e2trS81mc+J6T548Ua/XU6fTUT6fTzKJAAAAAIAbZqlVrxuNRuz30dGRLMtaUmoAAAAAAFhyoBzlOI5831e9Xp+47OHhoVKplLrdrjqdjsrl8shlnz17pmfPnoW/T09PJUlnZ2c6Ozu7fMKHCLab1PZBHi8CeZw88jh55HGyFpm/HEMAwCKt9Xq93jITEHTo5fu+DMOYWJXa8zxJkmmakqRaraZGozEywP6zP/szvfXWWwPT/+Zv/kYvvPDCJVMPAAAW4Ze//KX+4A/+QE+fPtXt27eXnRwAwDV34UD59PRUzWZTOzs7Qx9UP/zhD/XlL395psTUajXZtq0nT56M7fk6yvd9bW5u6uTkZOg6w0qUX3nlFf385z9P7EF7dnamRqOhTCaj9fX1RPZx05HHySOPk0ceJ488TtYi8/f09FT/5J/8EwJlAMBCXKjq9d7enhzHUa/X09rammzb1n/6T/8pnP/06VNlMhl9+umnE7fl+75KpZL29/fDANeyrLBX61E9WTuOE5sXrOt5ntLp9MDyt27d0q1btwamr6+vJ/5QX8Q+bjryOHnkcfLI4+SRx8la1DMVAIBFmbrX62984xtqtVp6/PixTk5O9IMf/EDNZlN/+qd/Gltu2gJqz/NUqVTU7XbDab7vS9LI0mTf95XL5cLq19F1gqrYAAAAAABcxtSB8ne+8x3VajXdv39fGxsbsixLjx8/1v/+3/9b+/v74XJra2tTbS+dTqtYLMYC3IODA6XT6bDn6yCYDhiGMbBOrVZTNpuduqo2AAAAAADjTF31+vj4WDs7OwPTDw8Ptbu7q7/6q78aWV16lP39/Vgg7Pu+3n///fC367qqVqsqFosj1zk+Pp6qp2wAAAAAAKYxdaBsWZYODw/1ta99bWBeECyfnJxcaOdBCfEo+Xx+oBfsSesAAAAAAHAZU1e9fvTokR4/fqzf/d3f1UcffTQw//DwUP/9v//3eaYNAAAAAICFm7pEeWNjQ4eHh3ry5Im++MUvDl2mXq/ryZMn80obAAAAAAALN3WJcuDu3buXmg8AAAAAwCq7cKAMAAAAAMB1RqAMAAAAAEAEgTIAAAAAABEEygAAAAAARBAoAwAAAAAQQaAMAAAAAEDEzIHyBx98oG984xv63d/93XDan//5n+uDDz6YR7oAAAAAAFiKmQLlR48e6f79+9ra2lKz2Qyn3717V7Ztzy1xAAAAAAAs2kyBcqVSUavV0sOHD2PTv/KVr8QCZwAAAAAArpqZAuXj42PduXNnYPqTJ0/U6/UunSgAAAAAAJZlpkA5l8spl8vp9PQ0nHZ6eqpCoaB8Pj+3xAEAAAAAsGgzBcrValUvvfSSDMPQycmJ7t27p83NTW1tbentt9+edxoBAAAAAFiYz826Yr1e15MnT9RutyVJ6XRad+/enVvCAAAAAABYhpkC5eeee067u7va29vTV77ylXmnCQAAAACApZmp6nWz2ZRhGPqjP/ojPf/889rb29MPf/jDeacNAAAAAICFmylQTqfT+ta3vqVut6ujoyN98YtfVD6f1/PPP69/9+/+3bzTCAAAAADAwswUKEel02mVy2VVq1Xdv39f1Wp1HukCAAAAAGApLhUof/e739Xe3p6ef/557e7uant7W81mc15pAwAAAABg4WbqzGt3d1ff+c53tLGxod3dXTWbTf3Gb/zGvNMGAAAAAMDCzRQop1IpPX78WPfv3593egAAAAAAWKqZAuVvfetb804HAAAAAAArYapA+d/+23+rXC6nL3/5y5Kk/f39scuXSqXLpwwAAAAAgCWYKlA+OjpSJpMJf7darZHLrq2tXT5VAAAAAAAsyVSBcn9P1o8fP04kMQAAAAAALNtMw0Odnp4Onf7RRx/po48+ukx6AAAAAABYqpkC5c3NzaHTO52OCoXCpRIEAAAAAMAyzRQo93q9odN3dnYGqmkDAAAAAHCVXGh4qH/2z/6Z1tbWtLa2pldffXVgvud5SqfTc0scAAAAAACLdqFAuVqtqtfr6bXXXtPbb789MN80Tf3Gb/zG3BIHAAAAAMCiXShQvn//viQpm83qK1/5SiIJAgAAAABgmWZqo1woFPTd7353YPr+/r4++OCDy6YJAAAAAIClmSlQ/sY3viHDMAam7+zsyLbty6YJAAAAAIClmSlQbrVa2tnZGZhuWZZc1710ogAAAAAAWJaZAmXTNPXkyZOB6d1uV3fv3r10ogAAAAAAWJaZAuV8Pq+vfe1r+j//5/+E0z766CPt7u4ql8vNLXEAAAAAACzahXq9DhSLRXU6Hd29e1ebm5uSJN/39fDhQ5VKpbkmEAAAAACARZopUJbOx1SuVCphm+R0Ok21awAAAADAlTdzoPzBBx/o4OBA7XZbP/jBDyRJf/7nfy7LsvQv/+W/nFf6AAAAAABYqJnaKD969Ej379+XaZpqNpvh9Lt37zI8FAAAAADgSpspUK5UKmq1Wnr48KF6vV44/Stf+UoscAYAAAAA4KqZKVA+Pj7WnTt3JElra2vh9CdPnsQCZwAAAAAArpqZAuVcLqdcLqfT09Nw2unpqQqFgvL5/NwSBwAAAADAos0UKFerVb300ksyDEMnJye6d++eNjc3tbW1pbfffnveaQQAAAAAYGFm7vW6Xq/L8zz9j//xPyQxPBQAAAAA4HqYOVCWJNM0ZZrmvNICAAAAAMDSTVX1+vnnn9e3v/3tX6303HN6/vnnJ/67d++ePvjgg6TSDgAAAADA3E1Vovz2229rZ2cn/N1oNKba+OHhoXK5nD788MPZUgcAAAAAwIJNFSj/h//wH2K/79+/P9XGd3Z2tLm5efFUAQAAAACwJDO3Uf7oo49UrVbleZ4k6V/9q3+lhw8f6vbt2+EyzWZT2Wz28qkEAAAAAGBBZhoe6jvf+Y5M01S9Xtfm5qY2Nzf1l3/5l9rc3NT//J//M1zu/v37Ojw8nFtiAQAAAABI2kwlyrZtq1gsDoyZXCgU9LWvfU1HR0dzSRwAAAAAAIs2U6Dc7Xb1p3/6pwPTy+WyUqnU1NvxfT8sce50OvI8T48ePZJhGGPXq1Qq4TK+76tYLE69TwAAAAAAxpmp6vXu7q6ePHkyMP2jjz66UJtk27ZlWZby+XwYZOdyubHrVCoVSVI+n1c+n1c6nVahULjYHwAAAAAAwAhTlSjv7+8PTPvyl7+sfD4fm1ar1bS7uzv1zj3Pk+M4YYnw1tbWxDbNpVIpFqRblqVMJqNqtTr1fgEAAAAAGGWt1+v1Ji302muvTb3Bra0t/eVf/uVMiQlKk+v1+tD5nudpa2tL/UleW1tTo9GQZVkT93F6eqqNjQ09ffo01kP3PJ2dnem9997T66+/rvX19UT2cRFf/epX9fTp02UnY656vZ4+/vhjvfjii1pbW1t2cq4l8jh55HHyyONkLTJ/P/vsM3344Yd69dVX9dxzwyvEbWxs6J133kk0HQCAm2GqEuXHjx8nnQ45jiPf90cGyZLCoaj6GYYh3/eHznv27JmePXsW/j49PZV0HsyenZ3NnuAxgu0mtf2L8n1f3//+95edDAAAEvXgwYNlJwEAcE3MPI7yvAQdevm+r1wuN7Ejr2FSqZS63e7QeaVSSW+99dbA9MePH+uFF1648L4uotFoJLr9aX388cfLTgIAAAAAXBkXCpRPT09VKpXkuq7a7bYkyTRNZTIZvf322zNVZTYMI2zrXKvVtLm5qSdPnlwoYB4VJEvn7au//vWvx/6GV155Ra+99lqiVa8bjYYymcxKVL3+9re/vewkAAAAAMCVMXWg/MMf/lDZbFapVErZbFb5fF6+76vT6ei//Jf/omq1Ktd19Tu/8ztTbc/3fZVKJe3v74dBsWVZ8n1frusO7T3bNM2R2xo179atW7p169bA9PX19cSD2EXsYxoXbTcWHIN0Oj0yXwEAAADgupoqUH7y5Imy2azK5bIePnw4MP9b3/pWONST53n6p//0n07cpud5qlQqKhQKsTGRJY0sTTZNU4ZhyPO8gQBumo68MJnruvJ9X5Zlqdlsqlwu06M4AAAAgBtlqnGUv/GNbyifzw8NkgPlcllf+9rXwqGeJkmn0yoWi7GA9+DgQOl0Ogx6g2A6an9/X67rhr8dxxkYpgqz8X1fjUZD2WxWhmGEQ2/Ztr3spAEAAADAwkxVouy6rlqt1sTlbNvWvXv3pt75/v5+LBD2fV/vv/9+bL/VajUWfBeLRVUqFTmOI0k6OjqixHNOXNeNfYSQzkvqHz58qHK5vKRUAQAAAMBiTTWOciqVUrvd1he/+MWxyz158kQ7Ozs6Pj6eV/rm7iaOo/zgwQO9++67M6+/tramk5OTmXokBwBgUS77vAMAIDBVibJlWfrOd76jf//v//3Y5Wq1mu7fvz+XhGE5arWaUqmUpPPexINq8N1uV81mU77vq9vthtXdfd/X/fv3p6pxAAAAAABXwVSB8ttvv62dnR2Zpql/82/+zdBl/vN//s+qVCrqdDpzTSAWJ5PJqFwuK51Oh9MKhUL4/6lUKhwOLAiUXdcNA2sAAAAAuA6mCpRN09Th4aFee+01bW9vy7Is3bt3T91uV51OR47jyPM8HR4eTqyejdVk27bS6XQsSJakZrMZ9mZuWZYqlUpsmWC8aAAAAAC4LqYeR9myLHW7Xdm2rXq9HnbuZJpmOJTQxsZGYglFsiqVytDq057naX9/P6yCfXBwEOvYq9lsKpfLLSydAAAAAJC0qQNl6Xx8Y3qYvn6Cnq77S5Pb7bZ831c2m5V03h653W7Hxqzu/w0AAAAAV92FAmVcX9HxrAOlUin2YcTzvNhy7XY7/O267pUImH3fV61Wk6SRY34HQ491u92wxgRw1U1z7l8lXMsAACBJBMoIq9VHBS+YQadd0nmNgugQUQcHB2EptOd5ySd0DlzX1fHxse7cuTN0vud5ajQa4QeCTCbDyzWuhUnn/lXDtQwAAJL03LITgNXQarVk27YqlYoqlYq63a7q9XpsGdM0tbOzo0qlIsdxtLe3J+l8SKloQL3Kstmstra2Rs53XTf2McAwjLBq+jC+788xdVfPTf/7r5JJ5/5VM89r+aqfx1c9/QAArCJKlCHpPAiOdtI1Sn8b9f5g+qrrdDqxEqpUKjXyJdS27any7Dqr1WrKZrNDq+4D0wiur2hQOw/TXsvX4TrmOgQAYP4IlIEJ+qulS+cvptExpqXzF/7Dw0PV63U1Go2ptl2pVMIAwff9gbaW08yXFI5fvujO9orFogqFwsp38hfkkyQdHx9PFRhNWmeWbY4z6fyZdC5cVQ8fPtTe3l7YaWCS+q/lRV3HSVvkdbiM+1ytVpPv+zIMQ51OR/v7+3P/sAIAQD8CZSBia2srVuoUdAIU5XmeWq1WrLp5u91Ws9mU7/tDA+thgkAr2I7rurGX3Unz+0vCCoWCMpnM1C+v85LL5VSpVFY2cMvlcspkMmE+1mq1iaWIk9aZZZvjTDp/pjlXjo+PB9a7c+fOyh6XQFK1UiZdy4u6jhdl1uuwv5PGcZZxn6tUKsrn87FA+uHDh9euNhMAYAX1bpinT5/2JPWePn2a2D4++eST3ve+973eJ598ktg+LuKNN95YdhJWSrVa7ZXL5di0k5OTXq/X63U6nV42mw2np9PpgfWLxWKv0+kM3Xa9Xh+6zjCGYYT7DUQvyXHzT05OepZlxea3Wq2epJFpS9K0f/OidTqdnqRYPp2cnAxMu8g6s2xzWqPOn0nnyrSGnftX2WWu5UVdx4s0y3WYz+cvvM6i7nO9Xq9nWdbANodNC/C8AwDMCyXKWFmVSkWmacowjNh4zsG8aNW7aKlQMGxMsK6ksLdb13XVaDTk+75M0wy3ub29rVarJdM0tbe3J8dx1O12tb+/P5Au13UvXc3W87ywKuGw7ZumOXb+zs6Oms2mPM8Lex4PSoWipWhB7+WNRkO2bct1XXU6HRUKhbm2ZzRNU+12e2As7mULemPv79RJkprN5tBekCet0z9tmm1exqRzZdr9jTr3g3lBKWFwLfm+r/v376vVas2c9knXYv8+pfPS+eAcDf72o6MjbW1txZa77LW8iOs4+Ftv6nV42fucZVkyDEOZTEb1el2GYVyoBBwAgMsgUMbKCV7Q6/W6TNOU67oqlUrhi3Amk1G1Wg1flra3t7WzsxO+HNq2Ldu2ZZqmPM+TbdvhC6tlWUMDi6CNr6SxbSU9z1Mqlbr03zhqOK3go8Ck+YZh6OTkJDYv6NE3yJdarabd3V0ZhhG+oNfrdWUymdgY2POQyWTkuu7KvKAHoh8P+l/GR+XxpHWC8+ci27yMSefCtEad+8E5bZpmrDq567qXPtdHXYuj9tlut2VZlnzfVy6XC+8B2WxWm5ubsUD5Mtfyoq5j6WZfh5e9z0nSo0ePtL29rc3NTRWLRW1tba18nwgAgOuBQBkrx7Zt7e3thS+QOzs7YXs027aVTqdjL5c7Ozuxl8Nmsxm2FzVNc65t2YLSq6SkUil1u92RHdUE84cplUqqVqvhuqlUKvx/z/PCTouibZiDkq6g/eawwGOaZVKpVCxAGaa/06RRtre35zbcWJBe13XDoGnccF/TrDPLNpMw7ly4iCD4r1QqsQCr0Wgok8lcatujrsVgn7VaLbbPbrerdDqtTqcjy7KG1pKYh0Vdx8H/r9J1uAoucp8zDEO2bavRaKhSqciyrPDDAwAASSJQvqHW1tYWsp9erzcwzXEc2bY99IUuqKoZnWcYRvhSVKlUBqqCep4XG0+1XC4rl8tpa2tL6XT6UlVH+3mel+gL2qTAZ9T84ONCNMCMlqY1m82BDwae56nRaISlM5lMZuDle5plpPPg8eDgYGza51EK5DjOxP1I0v7+fhiABaV43W43LMUM0jzKpHUuus1x5/ys5hEkS7+qCn1wcBCritxsNpXL5UauN83fNOpaDPZZrVZj+4xWy46er0Fzg3lZ5HW8StfhsI9VQedZUZlMJtFeyC9yn7NtO6x67Xmecrmctre3r8QHAQDA1UagfEMNC2AXxTTNkVUDm82mDMMYGnC0220ZhjGwbn9bQ8uydHJyona7rVwuJ8dxFjL0zEWMCqiCkq5J86Mcxxlovxk1rM2tdJ5v/e1s+9u8TrOMpDBgTFo2m53pWEbPj6B0clLgNWmdi2xz3Dk/yUXOhVn5vh9Wew70/x6Wrkl/07hr0fO8WFX2aFqibe+l8yB+XNC+LBc5NqtwHQ77WJVkD92Xvc8FbZyDv9M0TbVaLW1vb6/kfR0AcL08t+wE4OZJp9Njq0MPe3kKOv3pfwl0HEfpdDp8qd7a2gpfSNPptAqFwlxLjoKXt3lsJ+iYpl9Q5XTc/EBQ5TfaAVP/OsNeuqXztpx37twJp6dSqYEqrtMsE+w3Wqo/TKFQmOpfrVYbu52Larfbsd9Blelx58WkdS66zUnn/DjTnguX0d9BUrTt7Khq5ZP+pknXouM42t3dHdjHsA7RXNfV7u6u2u32XKphL+o6jlqV63CRLnufG1XyP20zDgAALoNAGZLOX7Acx0mkM6KLCF6OojzPC18y+6vsVavV8GU96Bk7+sJ/dHQ0116IJ71gj6pS6HleOF5oYH9/PxYgOI4TKxWeNL/dboc93Aalc7VaTalUSo7jhO1LG41G+IHB9/2xgcY01XmHLTNNVdZqtTrVv3m1Tw7kcrlYPvZX9x12bCatM2n+rEbl/6Rz4bKizRuk8xLc4OPTLPeEaa7Fo6MjZTIZ+b4f+wDWaDRipclBx1uGYYQ1Ti5rUdfxKl6H87Co+5xlWUM/jrRaLUqTAQCJo+o1wmFaLMtSs9lUuVxeaq+i9Xo97Fiof+iXYF7wohsMGSKdv+zfu3cv1unNo0eP5po2wzCGVm30PC9sP9tut2Xbtu7duxfr7KlarapYLIbrFItFVSqVML1HR0exfB83P+gZ3Pd92bYdS0uxWAx7E3YcR48ePZJt2+F2gjRtbW3FXkCDToKiplkmSNu883peqtWq2u22PM9Tp9OJ9ZguDT82k9aZNP+iJp0/k86VyzJNUzs7O+G1tbe3p1KppFqtNlNAPs21uLe3p263O1CC7Pt+rMQw2nnavD56Leo6vm7X4aLvc9L5Pb9UKunOnTthb9jz+CgFAMBEyx7IedGePn3ak9R7+vRpYvv45JNPet/73vd6n3zySWL7uIg33nhj5LyTk5NesViMTavX6wPT8CvlcrnXaDSWnYxL63Q6vWw2G/5Op9Ph/5+cnExcJiq6DHAVrMp1vOzrMJ/PX3idVTbueQcAwEWs9XpL7NVpCU5PT7WxsaGnT5/q9u3biezj7OxM7733nl5//XWtr68nso+LePDggd59992h8xzHUalUivUM7fu+7t69OzBOL34lGOP1qouW+KVSqVgpV6vVkmEYI5cJBKX/86ziDizCqlzHy7wO+9unX3XjnncAAFwEVa9vuGG9CAfV23zfZ6zKEfb29q5Fr6uj0h8demXc3+j7vo6PjwmScSWtynW8zOvwOgXJAADME515QbVaTY7jyHEc1Wq1sJOboMOWYF6hUAg7jLJte+kdfy1TdIibm6xWq9FeEFfWdbmOuQ4BAJg/SpRvuEwmo3K5HOtlNuhIxzRN1Wo17e7uyjAMNRoN2bater2uTCYTG0LmJlp2KdQqiHbaA1xF1+E65joEAGD+CJRvMNu2Y2MQB6K90AbDskjnpS5BEN1oNMLlo23ngh5qAQAAAOCqIlC+wSqVSqwTr4Dnedrf35cUL21pNpsDHd94nqdGoxEO55HJZAiUAQAAAFxptFG+oVzXlaSB0uR2uy3f9weqIwZt+Po793JdNzbNMIxw2wAAAABwFREo32DD2heXSqWwdDjKdd1YSXEQDHc6Hd25cyecnkql5Pv+/BMLAAAAAAtCoHxDWZYV9modCNoa5/P58Hcmk5F03iY5lUpJUjh01Cj92wUAAACAq4Q2yjdYq9WSbdthibBhGLE2yKZpKpPJyHEcPXr0SLZth8F0UDV7a2srFjQHHXoBAAAAwFVFoHyDmaY5duzN/h6xh1XJtixLtm2Hvz3PozMvAAAAAFcagTIuxTRN7e3tyXEcdbvdsLdsAAAAALiqCJRxaf09ZAMAAADAVUZnXgAAAAAARBAoAwAAAAAQQaAMAAAAAEAEgTIAAAAAABF05nUDbGxs6MGDB8tOxlz1ej19/PHHevHFF7W2trbs5FxL5HHyyOPkkcfJWmT+fvbZZ/rwww/16quv6rnnhn/n39jYSDQNAICbg0D5BnjnnXeWnYS5Ozs703vvvafXX39d6+vry07OtUQeJ488Th55nKxF5u/p6ak2NjZ0dHSk27dvJ7ovAACoeg0AAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEZ9bdgIqlYokqdPpSJKq1erY5V3XVbVaVSaTkWmaajQaunfvnrLZbOJpncmnn0o/+pH0s59JX/iC9KUvSc8/v+xUAQAAAABGWGqgbNu2yuVy+LtQKCiTyajRaIxcx/d9ua4rx3FkmqZs217dIPn735f++I+lf/iHX017+WXpm9+U3nxzeekCAAAAAIy0tEDZ93212235vi/DMCSdB8rb29vyPE+maY5c98mTJ+E6K+2rX5V++cv4tJ/8RMpmJcchWAYAAACAFbTUEuVmsynP85ROpyUpDI59319iqubg00/P/9vrDc7r9aS1NelP/kT6/d+nGjaAhfjqV7+qp0+fLjsZC9fr9fTxxx/r29/+ttbW1padnGtnkfn72Wef6Z//83+uP/iDP9Bzz9HFCq62jY0NvfPOO8tOBoAxlhYoG4ahk5OT2DTXdSVpbGmyJB0eHiqVSqnb7arT6cSqb/d79uyZnj17Fv4+PT2VJJ2dnens7GzW5I919t/+2/l/P//50Qv9/OfSf/2v0m/+ZiJpuO6CY5fUMQR5vAiLzGPf9/X9738/8f0AACZ78ODBspMAYIK1Xm9YsedybG9vq1AoKJ/Pj1zG8zxJvwqma7WaGo2G6vX60OX/7M/+TG+99dbA9L/5m7/RCy+8MIdUA8Dq++Y3v6kf/vCHy04GAEDngfK777677GQAGGNlAmXbtnXnzh0Vi8ULref7vjY3N3VycjK03fKwEuVXXnlFP//5z3X79u3LJnuosx/9SI1f/EKZP/xDrf/jP45e8O/+jhLlGZ2dnanRaCiTyWh9fX3ZybmWyOPkLTKP33zzTUqUAWBFECgDq2/pw0NJkuM42traGluSHF022st1EBxH2zpH3bp1S7du3RqYvr6+ntyL6b/+19IPfqD1//t/hwfKa2vnvV//1m/RRvmSEj2OkEQeL8Ii8vii7UeDEQbS6fTE5jAAAADXzdJ7wwjaJQdBsu/7YfXqfr7vK5fLxeYHHX+t1ItcNPjtfzkNfv/FXxAkA1hJruvKdV1ZliXP81QoFJadJAAAgIVaaqDcbrfVbreVTqfleZ48z1OtVlMqlZJ0XkpcqVTC5Q3DULFYjAXFtVpN2Wx2NYeLeucd6dd/PT7t5ZcZGgrAyvJ9X41GI7yvWpalTCYj27aXnTQAAICFWeo4yvfv35fv+wMvYEE7Zdd1Va1WY+2W9/f3Y8Hz8fHxyI68lu6NN86HgPrRj6Sf/Uz6whekL32JkmQAKysoTY6yLEsPHz4cO8IAAADAdbJSw0P1y+fzA+2Wg1LlK+P556Xf/u1lpwIAppLNZmP9QEjn913f9+X7/mrW3gEAAJizlejMCwCwOqJNYLrdrizLCv+/2WzK9311u91Y3xL3799Xq9VaWpoBAADmiUAZABDKZDIql8uxUQSinXmlUimZpqlMJhMGyq7rhoE1AADAdbD0Xq8BAKvBtm2l0+mBofaazWbYA3Y6nZbjOLFlgrGgAQAArgsCZQCAJKlSqWhvb29gejBEVFAF++DgIFbK3Gw2h45jDwAAcFURKAMAwp6u+wPedrst3/fDDr5831e73Q6D5mCZ6G8AAICrjjbKAABJio1RHyiVSqpWq+Fvz/Niy7Xb7fC367pXJmD2fV+1Wk2SRo6k4DiOpPNOzEzTvDJ/G24Gx3HU7XbVarWUy+U4P1fENPcWAFcDgTIAQJZlqdvtxqYFgWJ0mD7DMGJDRB0cHISl0J7nJZ/QOXFdV8fHx7pz587Q+Z7nqdFohB8JMpkMgQhWRrvdlnR+bfq+r7t3704cchOLMeneAuDqoOo1AECS1Gq1ZNu2KpWKKpWKut2u6vV6bBnTNLWzs6NKpSLHccI2zbVabWDc+1WWzWa1tbU1cr7rurEPAoZhhNXT+/m+P+fUrZbr/vddRd1uV41GQ9L5uZlKpcLgGcs16d4C4OqgRBkAIOk8CC6XyxOXi1bFljQQTF8HnU4nViKUSqWGBoy2bU+VZ1dZrVZTNpsdWjUfy2FZVqyGQ7fbpUM9AJgzAmUAAKbQXzW9VqvFev+WzktfDw8PVa/XwxK/SSqVSlh67ft+rF2j67qqVqvKZDIyTVONRkP37t0LO1dbhGKxqEKhMPCBJEmVSkXS+QcLafDjTL9p8mnSNi+6z0kmnQvjjvtFFAoFPXr06DJJnSvbtsMS1VQqNdW5OmqdXC6nvb09maYZq+EhDe9TYRqLOi4Arj4CZQAA+mxtbcVKkIMOvQKe56nVasWqm7fbbTWbTfm+PxBUjxIEZ8F2XNeNBaW+78t1XTmOI9M0Zdv2QoPkQC6XU6VSWUjQ0F9KXygUlMlkxn54mJRPk7Y5yz7HmXQuTDrulUpFx8fHA+vduXMndgwcx1Emk0nsnOjvvG8c3/d1//59vf/++zIMQ+12W9vb2+r1ejOv0263w74SorLZ7Ew1WRZ1XABcE70b5unTpz1JvadPnya2j08++aT3ve99r/fJJ58kto+bjjxOHnmcvEXm8RtvvJH4Pq6aarXaK5fLsWknJye9Xq/X63Q6vWw2G05Pp9Ox5YrFYq/T6Qzdbr1eH1h+FMMwwn0Goo/mer0+MH9Zpv2bLuPk5KRnWVbsb261Wj1JI/O71xufT5O2Oes+pzHqXJh03KfRaDR6jUaj1+udp/eyaR0mn89faNn+6ylI36zr9M/r9c6v28tK8rj0esPvLf24JwOrjxJlAMBKq1QqYdXL6JjOwbxolcxoCW8wTEu02mbQrtN1XTUaDfm+L9M0w21ub2+r1WrJNE3t7e2FQ/Ds7+/H0uS67qXbJnueJ9/3B6qUBtuftpftoMSt0WjItm25rqtOp6NCoTDXdsWmaardbifeFrbZbMrzvHA/wd9wmU7Fxm0zlUolss9R5nHcPc9TLpcLf/u+Hyu5dV03LDUNromg9LbVal3+jxiiVqup0+nI8zx5njfQjnqWdfpLyl3X1c7OTiLpn9f1OOreAuDqIVAGAKyk4MW+Xq/LNE25rqtSqRS+eGYyGVWr1TCo2d7e1s7OThjs2LYt27ZlmqY8z5Nt2+HL7qiX+KB9qjT4kh7wPE+pVOrSf9+o4bSCDwKBw8NDpVIpdbtddTqdWIBeq9W0u7srwzDCQLleryuTycTGuJ6HTCYj13UTDZQNwxgY5ijobXzS3zIqnyZt8zL7nMW0x30c0zRHDgcVnJ+maSqTycSqEc/jvB21T+lX46qbpqlCoTB2fOdp1ulv7hAE00n+Df0uclyk0fcWAFcPgTIAYCXZth125CNJOzs7YbtE27aVTqdjL9I7OzuxQK7ZbIZtT03TnFvv3EFJUVKCYE/SQAlnrVZTLpcL/5ZUKhWWgHmeF3YuFm1bG5Q4B+2sh73ET7NMKpWKfUgYpr9zs1G2t7enHk6sVCqpWq0OLekLTMqni25zmn3OW/S4X0YQTFYqldhHjUajoUwmc+ntj9qndB5UBvssl8tjx3e+6DrlcnmhHcoF5nVcAFw9BMoAcEOtra0tZD+9IZ35OI4j27ZHBl5BtenofMMwwsClUqkMVCH1PC82fmm5XFYul9PW1pbS6fTcqpx6npdoABV9Ke8PyHd3d1UoFMIqotFS72azORAYep6nRqMRBhiZTGYgCJ5mmSAtBwcHY9M+70Am+FgyKaielE8X2eY0+5x0/s5iXsFYcOwODg5itQ+azWasuna/YR85gs6sosZ1HhatFh2UxE6qtjzNOtOOEb3KxwXA1UOgDAA31LAAdlFM0xxbhbfZbMowjKElt+12O1YKFehvN2xZlk5OTtRut5XL5eQ4zkq1FxxVKh0tse5Pc7T0OPr3R0vnolzXjU0zDGMgCJlmGek8YEiq6u4wjuNoa2trqpLnafNp0jan3eek83fSusPMs6aC7/tqt9sDwea4gHXYR45phwUblW7DMEZWab7IOtVqNfYRbFw6Vvm4ALhaCJQBAAuXTqcnVoUe9nIadFDUH7A5jqN0Oh2+JG9tbanRaIQvzoVCYW6lwNOUrE67nSAo6P9bLcuS7/vK5XLqdDoDnUv1Lz8s+LUsS51OR3fu3Amnp1KpgfaW0ywT7HtSsDKvqtdBG+FoR1T9Q3RF0zVNPk3a5kX2Oc35O8qk4z4P/duOtle/SMdU0wraGPd/mPB9f2TnWxdZx3VdbW9vT0zHqh8XAFfLc8tOAABgNfi+L8dxRpYALZJlWQPp8DwvfMnvrw5ZrVbDF+SgZ+zoy+7R0dHcXnaDl/tRRlXV9DwvHKc1sL+/HwZo0nnAHwRqhmGoWCzG/o5araZsNivDMMIxdKXz9qfBxwPf98d2PjRNVdJhy0xT5bxarU71b1yQ3G63w961gw6carVa+Pf15+OkfJpmm5Pmz2pUXo877vMQbaYgnVfDDoLRpK7vcrkc+4DkOI4sy4rtt//8n7RO4CLjOU9jWccFwNVCiTIAIBxOxrIsNZvNpXWcE1Wv18MOifqHWgnmBUFrvV4PAwPDMHTv3r1YB1WPHj2aW7oMwxgaQHmeJ8dxdHBwoHa7Ldu2de/evTDNruuqWq2qWCyG6xSLRVUqlTCtR0dHsXzf39+PBRfHx8fhB4GgV2PHcfTo0SPZth1uJ9jn1tZWLGgeVkI6zTJB2uaZj8MEPZ37vi/btmPzgnwblo/j8mnSNqfZ50VNOhcmHffLMk1TOzs74TWyt7enUqmkWq2WWOCXzWbV7XbD43B8fBzrVG7YcZu0TvTvmVdP88s8LgCulrXeMhupLcHp6ak2Njb09OlT3b59O5F9nJ2d6b333tPrr7+u9fX1RPZx05HHySOPk7fIPH7w4IHefffdofN831epVIq173UcR0dHR5ceK/i6CgL4Va+SGQyLFQSNwTjRksKOrsYtEzWuF2lcT9O2UcbFjbsnA1gNVL0GgBvOdd1YdUPpvOpzrVZbUopWX7FYvBIBRFCa6DiOarWa9vf3w3nb29thSf2oZQKVSmXq9se4PvpL2AHgJqHqNQDccNlsdqA36GCYlmHD6+BcEFyuUk/aw4xKX3QInXF/g+/7Oj4+XvnSc8wfvT0DuMkIlAEAsY6Lut1uGBR1u92w4yjpvNMo27bluq46nY4KhcKNfZnOZrNh52fXOQ9qtRpV8AEANw6BMgDccJlMRuVyOdbTbFDN1jRN1Wo17e7uyjCMMFCu1+vKZDKxYWduolUvTZ6HWTu0AgDgKiNQBoAbzLbt2PjDgWazGZYqp1KpsPq153lhEB3tnTbaw7RpmlTTBQAAVxqBMgDcYJVKZWgPx57nhZ06RUtNm83mQM/Hnuep0WiEnVtlMhkCZQAAcKXR6zUA3FBBT9f9pcntdlu+7w9UK/Y8T5IGOvdyXTc2zTCMgV60AQAArhICZQC4wYa1Ly6VSkOHPnJdN1ZSHATDnU5Hd+7cCaenUin5vj//xAIAACwIgTIA3FCWZanb7camBW2N8/l8+DuTyUg6b5Mc9IwdDB01Sv92AQAArhLaKAPADdZqtWTbdlgibBhGrA2yaZrKZDJyHEePHj2SbdthMB1Uzd7a2ooFzUGHXgAAAFcVgTIA3GCmaY4dI7e/R+xhVbIty5Jt2+Fvz/PozAsAAFxpBMoAgEsxTVN7e3tyHEfdbjfsLRsAAOCqIlAGAFxafw/ZAAAAVxmdeQEAAAAAEEGgDAAAAABABIEyAAAAAAARBMoAAAAAAETQmRcA3AAbGxt68ODBspOxcL1eTx9//LFefPFFra2tLTs5184i8/ezzz7Thx9+qFdffVXPPcd3flxtGxsby04CgAkIlAHgBnjnnXeWnYSlODs703vvvafXX39d6+vry07OtbPI/D09PdXGxoaOjo50+/btRPcFAACfZAEAAAAAiCBQBgAAAAAggkAZAAAAAIAIAmUAAAAAACIIlAEAAAAAiCBQBgAAAAAggkAZAAAAAIAIAmUAAAAAACIIlAEAAAAAiCBQBgAAAAAggkAZAAAAAIAIAmUAAAAAACIIlAEAAAAAiCBQBgAAAAAg4nPLTkClUpEkdTodSVK1Wp1qHcMwJEm+76tYLCaWPgC4tE8/lX70I+lnP5O+8AXpS1+Snn9+2akCAADACEsNlG3bVrlcDn8XCgVlMhk1Go2R6wSBdT6flyS5rqtCoTBVgA0AC/fd70p//MfSP/zDr6a9/LL0zW9Kb7yxvHQBAABgpKVVvfZ9X+12W77vh9MKhYJc15XneSPXK5VKYZAsSZZlqVarJZlUAJjNd78rZbPxIFmSfvKT8+nf//5y0gUAAICxltpGudlsxoJi0zQlKRY8R3meJ9/3w2rXUa7rJpFEAJjNp5+elyT3eoPzgmnf+MZi0wQAAICpLK3qtWEYOjk5iU0Lgt0gYO43qqTZMIyRwfWzZ8/07Nmz8Pfp6akk6ezsTGdnZxdN9lSC7Sa1fZDHi0AeX9KPfywdH0uf//zIRc6Oj8//Sx4nhvM4WYvMX44hAGCRlt6ZV1SpVFK1Wh1aYjxOKpVSt9sduc233nprYPrjx4/1wgsvzJLMqY1ra435II+TRx5fwt/+7VSLkcfJI4+TtYj8/eUvf5n4PgAACKxMoGzbtvb29mLtj6c1KkiWpP39fX39618Pf5+enuqVV17Ra6+9ptu3b8+U1knOzs7UaDSUyWS0vr6eyD5uOvI4eeTxJf34x9Lv/d7YRc4+/3k1/vqvyeMEcR4na5H5G9QIAwBgEVYiUHYcR1tbWxOD5FFVsn3fHznv1q1bunXr1sD09fX1xB/qi9jHTUceJ488ntFv/ZZ05855x13D2imvrZ33fi3yeBHI42Qt6pkKAMCiLLUzL+lX7ZKDINn3/ZFtkU3TlGEYQ+dblpVcIgHgop5//nwIKOk8KI4Kfr/99mLTBAAAgKksNVBut9tqt9tKp9PyPE+e56lWqymVSkk677wrGDc5sL+/H+vh2nGcmaprA0Di3nxTchzp1389Pv3ll8+nM44yAADASlpa1Wvf93X//n35vi/btmPzisWipPPS5mq1Gv4O5lUqFTmOI0k6OjpStVpdXMIB4CLefFP6/d+XfvQj6Wc/k77wBelLXzovcaYXXwAAgJW0UsND9cvn80NLi6OBczabnXvaAGCunn9e+u3fXnYqAAAAMKWlt1EGAAAAAGCVECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABDxuWUnYNF6vZ4k6fT0NLF9nJ2d6Ze//KVOT0+1vr6e2H5uMvI4eeRx8sjj5JHHyVpk/gbP7eA5DgBAkm5coPyLX/xCkvTKK68sOSUAAOCifvGLX2hjY2PZyQAAXHNrvRv2afazzz7TT3/6U7300ktaW1tLZB+np6d65ZVX9Pd///e6fft2Ivu46cjj5JHHySOPk0ceJ2uR+dvr9fSLX/xCv/Zrv6bnnqPlGAAgWTeuRPm5557Tyy+/vJB93b59mxezhJHHySOPk0ceJ488Ttai8peSZADAovBJFgAAAACACAJlAAAAAAAiCJQTcOvWLf3H//gfdevWrWUn5doij5NHHiePPE4eeZws8hcAcF3duM68AAAAAAAYhxJlAAAAAAAiCJQBAAAAAIggUAYAAAAAIIJAGQAAAACAiM8tOwHXTaVSkWEYkiTf91UsFpeboGuoUqlIkjqdjiSpWq0uMznXXiaTUaPRWHYyriXbtrW1tSVJSqVSymazS07R9VKr1eT7vgzDUKfT0f7+fnh/xsX4vq/Dw0PV6/Wh9wOefQCA64ZAeY6CAC6fz0uSXNdVoVAgkJsj27ZVLpfD34VCgUAuQY7jyHXdZSfj2vF9X/fv39f7778vwzDUbre1vb0tBiGYn0qlonw+HwveHj58qHq9vtyEXUHtdlvNZlO+76vb7Q7M59kHALiOGB5qjjY3N/XkyZNYicXa2hovv3Pi+75yuZzq9XqYx0GA0el0ZJrmchN4zQQlSIVCgXN4zgqFgra2tmKlbq7ryrKsJabqehn2AY2PapfjOI5KpZJarVZsOs8+AMB1RBvlOfE8L6zi148SuflpNpvyPC/8HQTHvu8vKUXX1+HhoXZ3d5edjGupVqspm83K87zw/kCQPF+GYSiTyYT3Bs/z+JiWAJ59AIDrikB5TqLBW5RhGARxc2IYhk5OTpROp8NpwYsYL8DzRelmcoJ7Rbvdlu/7Mk1ThUKBoGLOHj16JM/ztLm5Kdu25bouVYETwLMPAHBdESgnLJVKDW3ThfkolUqqVqt00DNnQQCH+QsCC8MwlE6nZZqmyuWycrncklN2vRiGIdu2lc1mValUVK/XCdwWiGcfAOCqI1BOGC8KybFtW3t7e2EHMpiPoFowkrWzsxP+f1D6Rqny/Ni2LdM0Va/X1el01O12tb29vexk3Rg8+wAAVx2B8pyMKn2jZC4ZjuMMdIaEy2u327EADvM36n5gGMbIaqy4mKDdbNB8wDRNtVotGYYhx3GWnLrrhWcfAOC6YnioOTFNM3zR7X85oK3nfAWlbkFJcjBkCS9ll9ftdtVut8M8DsaqrlQqMk2TkuY5ME1TpmnK87xYe3vf9/lIMSee5w1tjlEoFBafmGuOZx8A4LqiRHmO9vf3Y1UnHcehWvCctdtttdttpdNpeZ4nz/NUq9WUSqWWnbRrwbIsFYvF8F8QWBSLRYLkOSqXyzo4OAh/O44jy7JigTNmZ1lW2FlaVKvV4jy+hFHVqXn2AQCuI8ZRnrOg5E2Sjo6OVC6Xl5yi68P3fd29e3dohzycxvPnOI4ODg7kOI6KxaIymQwlRHNUq9XCc/n4+Jh7xZz5vq9SqaQ7d+6EbcDz+Twd/83A87zwftBut1UsFnXv3r3YRweefQCA64ZAGQAAAACACKpeAwAAAAAQQaAMAAAAAEAEgTIAAAAAABEEygAAAAAARBAoAwAAAAAQQaAMAAAAAEAEgTKAS/E8T7lcTpubm9rc3FQul5PneQPLZTIZ2bY9cjuVSkVra2uJpXN7e1uFQiGx7QMAAOD6IFAGMDPXdbW9va179+6p1Wqp1WrJNE1tb2/Ldd0LbcuyLFWr1YRSKu3v7yuXyyW2/SjHcZTJZBayLwAAAMzf55adAABXk+/7ymQyqtfrymaz4fRyuaytrS3lcjk9efJEhmFMtb10Oq10On3pdLmuq0KhoE6nE5seTWNSbNtWrVZTKpVKfF8AAABIDiXKAGZi27bS6fTQADSfzyuVSqlUKi0hZctTLpd1cnIytoo5AAAAVh+BMoCZuK4ry7JGzs9mswPVr33fV6FQ0Obmpra2tuQ4Tmx7/W2Uo8vWarXYvEqloq2tLa2trYVVvXO5nDKZjDzP09ramtbW1uT7vqR4G+lCoTBQDbvdbsf2P27fAAAAuN4IlAHMxPM83bt3b+T8ra0ttdvt2LTDw0MVCgU9efJE2Wx2ZMdfksJ5T548UaPRkG3b4fYKhYIODg5Ur9d1cnKicrks3/dVr9dVr9dlmqZ6vZ56vd7Qqt+5XC4WpEtStVoNS8fH7RsAAADXH22UAcys2+2OnBeU5Ebl8/mwHXK5XJbjOKpWqyqXy7HlPM+T4zg6OTmRYRgyDEPlclkHBwcyTVO1Wk2dTkemaUrS2JLtYSzLkmEYchwnDI4PDw/16NGjsfueRxtqAAAArD4CZQAzMU1zoMOsqGggO4plWUNLlIPS27t378am7+zsyHVdGYYxcduT7O7u6uDgQNlsVu12W77vK5vNhiXNw/YNAACAm4Gq1wBmYlnWQPXlqMPDwwuX9Eal02mdnJzE/jUajZm3169QKITpDwLmRe0bAAAAq41AGcBMyuWyPM9TpVIZmGfbtnzfH6hS3c913aHtnNPpdFjKO2ye7/sj2zZPK51OyzAMua4rx3FUKBQm7hsAAAA3A4EygJkYhqF6vS7btmXbtjzPk+d5KhQKqlQqajQaAx1p1Wq1MAgtFAryPE/5fH5g26ZpKp/Pxzr7chxHlUplYJ7v+3IcJ+zR2jTNcLrrumMD6nw+Hwb8Qen3uH0DAADgZiBQBjCzbDarTqcjz/O0vb2t7e1tdbtddTqdgWrXpmlqd3dXpVJJm5ubajabarVaQ3ulls57oU6n09re3tbm5qaq1Wq4zeD/M5lMOG9vb0/SeYlwOp3W3bt3J5Zo7+3tyXXdgWB93L7HqdVqWltbCz8CrK2taWtra+J6AAAAWC1rvV6vt+xEAIDruspkMuKWBAAAgGWjRBkAAAAAgAgCZQBL5bqufN9XvV5nnGIAAACsBAJlAEtVrVa1ubkp13X16NGjZScHAAAAoI0yAAAAAABRlCgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABBBoAwAAAAAQASBMgAAAAAAEQTKAAAAAABEECgDAAAAABDx/wMFBAxtUE2upQAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] @@ -818,7 +3475,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "id": "7d4ec062", "metadata": {}, "outputs": [], @@ -849,7 +3506,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "id": "e91321ff", "metadata": {}, "outputs": [], @@ -867,13 +3524,13 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "id": "5b50e0c4", "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAGyCAYAAAAYveVYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAACnBElEQVR4nOydd3xT573/31qWl2zJ2xgDttmEEMzIJgkrexXIbHrbJoEm7U170xZC219vc9uUQPcMIW2TjqQJkNHsgElCFkmwzd5eYBtvW5aHrHl+f5wcYfC2NY7k5/165dUiHT3P90iyzud8p0aSJAmBQCAQCASCCEEbagMEAoFAIBAI/IkQNwKBQCAQCCIKIW4EAoFAIBBEFELcCAQCgUAgiCiEuBEIBAKBQBBRCHEjEAgEAoEgohDiRiAQCAQCQUQhxI1AIBAIBIKIQh9qA4KN1+vl9OnTmEwmNBpNqM0RCAQCgUAwCCRJoq2tjTFjxqDV9u+bGXXi5vTp02RnZ4faDIFAIBAIBMOgsrKSsWPH9nvMqBM3JpMJkN+chIQEv67tcrnYtm0bS5cuxWAw+HVtNRDp5weRf46Rfn4Q+ecY6ecHkX+OkX5+EJhztNlsZGdn+67j/THqxI0SikpISAiIuImNjSUhISEiv7CRfn4Q+ecY6ecHkX+OkX5+EPnnGOnnB4E9x8GklIiEYoFAIBAIBBGFEDcCgUAgEAgiCiFuBAKBQCAQRBRC3AgEAoFAIIgoRl1C8VCQJAmPx4Pb7R7U8S6XC71eT1dXFx6PJ8DWBR9/np/BYECn0/nJMoFAIBAIziDETS9IkoTVaqWhoWFIF3FJksjIyKCysjIiGwT6+/zMZjMZGRkR+V4JBAKBIHQIcdMLtbW1WK1WX7m4Xq8f1AXY6/XS3t5OfHz8gN0TwxF/nZ8kSXR2dlJfXw9AZmamv0wUCAQCgUCIm3PxeDy0traSmppKSkrKkF7r9XpxOp1ER0dHrLjx1/nFxMQAUF9fT1pamghRCQQCgcBvRN4VeIS4XC4kSSIuLi7UpkQ8sbGxgPyeCwQCgUDgL4S46QORBxJ4xHssEAgEgkAgxI1AIBAIBIKIQogbgUAgEAgEfkOSpFCbIMSNoHeKi4uxWq39HlNQUBAcYwQCgUAQFpTZy7jn2D3s6dgTUjuEuBH0oKCggHXr1mE2m/s9rqysjFWrVgXHKIFAIBCoGkmS+EXVLzjSeYR/N/47pLYIcSM4i7KyMlasWMFTTz014LErV66ksLCQTZs2BcEygUAgEKiZ96zv8Xnb50Rpovh25rdDaosQN4KzWL9+PStXrjzLa1NQUEBeXl6fx69fvz5I1gkEAoFAjXgkD7+t/i0A96TfQ1ZUVkjtEU38BokkSXR5u/o9xuv1YvfaMXgMaKXA6sZobbTfS6mtViubNm2iqKho0K9ZvHgxAFu3bmX58uV+tUcgEAgE4cH71vepdlaTqEvka+lfA29o7RHiZpB0ebu4bN9loTbDx0ezPiJGFzOk16xZs4atW7dSWlrqe2zVqlUUFhZSVFREYWEhZrOZ/Px83/MrVqxg69atAL4uwk1NTSQlJfmOyc/PZ/v27ULcCAQCwSjl2fpnAViRuoIYXQwub2ibs4qw1Chi/fr15Obm+pKACwoK2Lx5Mzt27ABg+/bt5ObmnvWaLVu2sGXLFnJzc/F4PLS0tPRINF6yZImonBIIBIJRysGOg+zr2IdBY2BF6opQmwMIz82gidZG89Gsj/o9xuv1YmuzkWBKCPhsqWht9LBet2XLFnJycsjLy2PdunVs2bLFJ1bKysp6iJvBkJSURHNz87DsEQgEAkF481rTawAssSwhxTC0mYyBQoibQaLRaAYMA3k1XlxaFzG6GNUOzjSbzezYsYM5c+awevVqX84MyDk3wxE3ZrN5wJ44AoFAIIg8XF4X21u2A3B90vUhtuYM6rwCCwKKkltTXFx81uMD9bURCAQCgaA7H9s+ptXTSrI+mXmmeaE2x4cQN6OMsrIy1qxZ46uI2rBhg++54YaXysrKhDASCASCUchbzW8BcE3SNeg0uhBbcwYhbkYZS5Ys8SUWb9myhXXr1vmSgfPy8igrK+vxmtzcXMrKyrBarbz//vs9jrFarcydOzco9gsEAoFAHTi8Dj62fQzA1ZarQ2zN2QhxM4pYsWIFubm5rFy5EpDDUE899RQrVqzAarWyePHiHqEqkEu98/PzycvL43e/+12P57dv335W+bhAIBAIIp/CtkLsXjuphlSmxU4LtTlnEZYJxZs2bcJqtWI2myktLWXt2rUiLDIItmzZ0uOx5cuX+/rT5OfnYzabKSgoOCvRGKCoqEiuBrPZSEhIOOu5goIC0aVYIBAIRhk7W3cCsCBxAVqNunwlYSduNmzYcNZ4AKvVyv3339/rhVswdNauXcv69et7iJu+2Lp1K4sXLxaeG4FAIBhFeCUvH7R+AMAViVeE2JqeqEtqDYLt27ef5aURZcj+ZfXq1ZSVlfUanuqNNWvWCK+NQCAQjDKO2Y/R4GogRhvDXJP6ci7DTtyYzWaWLFniEzTDbTwn6JuioiLuv//+AUXjihUrWL9+vfDaCAQCwSjjM9tnAMyNn4tRawyxNT0Ju7DUU089xZw5c7BYLKxevZq8vDyefPLJPo93OBw4HA7fv202GwAulwuXq+fsC5fLhSRJeL1evN6hTf6SJMn3v0N9rZpISEhg9+7dAGedx7nn98ILL/Q4Zih4vV4kScLlcvnmVoUa5TvR23cjEoj084PIP8dIPz+I/HOMhPPb1boLgLlxc/u8lnb/X38wlLU0knLFCiM2bdrE9u3bffke3UcInMtPfvITHn300R6PP/fcc8TGxvZ4XK/Xk5GRQXZ2NlFRUf42XdANp9NJZWUltbW1uN3uUJsjEAgEgkHg0rjYkLUBj8bDAzUPkOpODcq+nZ2d3HXXXbS2tvYobDmXsBM3a9asYcmSJSxevJiysjJfGXP3Sdfd6c1zk52dTWNjY69vTldXF5WVlUyYMIHo6KHNb5Ikiba2NkwmExqNZmgnFgb4+/y6urqoqKggOzt7yO91oHC5XGzfvp0lS5ZgMBhCbY7fifTzg8g/x0g/P4j8cwz38/us7TO+XfFtUvWpvDr11V6vB4E4R5vNRkpKyqDETViFpZRGckolT25uLkVFRcyZM4etW7f6Spq7YzQaMRp7xgMNBkOvb7jH40Gj0aDVaoc8H0oJzyivjzT8fX5arRaNRtPnZxFK1GiTP4n084PIP8dIPz+I/HMM1/MrtssFJxclXDRghMOf5ziUdcLqCtxXm/9Vq1YF3xiBQCAQCEYhRW3y+B41VkkphJW4UTronlvFU1RU1KvXRiAQCAQCgf/o9HRypPMIAHPi54TYmr4Jq7AU4JuHlJyc7OtxI/qsCAQCgUAQePZ37MeDh4yoDDKNmaE2p0/CTtyYzWYhZgQCgUAgCAHF7XK+jZq9NhBmYSlBaCkuLqa1tbXfY5QJ4wKBQCCIPPa07wFgdvzsEFvSP0LcCHxs2rQJi8XS63MFBQU8/vjjJCYm9rtGWVmZSPAWCASCCMTldXGw4yAgxI0gjJg7dy5JSUk9Hlf6CW3atGnANVauXElhYeGgjhUIBAJB+HDMfgyn5CRRl8h44/hQm9MvQtwIzqK3qrP169efNYkdZE9OXl5er2usX79e5EUJBAJBhHGg4wAAM+Nmqr5RbdglFIcKSZJwdfY/10LySrg6XLh0LjTawH7whliD379cZrOZtWvXnvWY1Wpl06ZNFBUVDXodpcliX40VBQKBQBB+dBc3akeIm0Hi6nTx+/jfh9oMHw+1P0RU3NBmX61Zs4atW7eeNapi1apVFBYWUlRU1Ot09cLCQsxmM/n5+b4OxbfddhsvvvgigE9gtbS0nOXZyc/PZ/v27ULcCAQCQYSg5NucF3deiC0ZGBGWGkWsX7+e3NxcX8JvQUEBmzdvZseOHX2+Zvv27T1Ez+bNm9myZQu5ublIkoQkST06Ry9ZskRUTgkEAkGE0OxqptpZjQYNM+JmhNqcARGem0FiiDXwUPtD/R4jeSVsNhsJCQlBCUsNhy1btpCTk0NeXh7r1q3rd6I6yMnEvXl0BiIpKYnm5uZh2SgQCAQCdaF4bXKiczDpTCG2ZmCEuBkkGo1mwDCQ1+vF4DFgiDOodnCm2Wxmx44dzJkzh9WrV/vyY/rCarUOS9wo3aMFAoFAEP6EU74NiLDUqETJoykuLh7w2P68OgKBQCAYHRzolMVNOOTbgBA3o46ysjLWrFnjq37asGFDv8cPN7zU1wR3gUAgEIQXHsnDoY5DQPh4bkRYapSxZMkSX2Kxkn+Tn5/fZ3gqLy+PwsLCHo/n5uZSVlaG1WqlsLCQ3Nzcs8JXVquVuXPnBuw8BAKBQBAcyrvK6fR2EquNJTd66GkKoUB4bkYRK1asIDc3l5UrVwJyyOmpp55ixYoVfebHLF68uNfwVX5+Pvn5+eTk5PTasG/79u3k5+f71X6BQCAQBB8l32Z67HR0Gl2IrRkcwnMzitiyZUuPx5YvX95vL5r8/HzMZjMFBQUsXLjwrOf6a+xXUFAguhQLBAJBBBBuycQgPDeCQbB27dohCZWtW7eyePFi4bkRCASqRpIknF5nqM1QPeHUvE9BiBvBgKxevZqysrJBVVeB3AlZeG0EAoFaKbeX893S73Lhngu5eO/F3HXkLl5veh1JkkJtmuqwe+2Ud5UDMCNW/c37FIS4EQyKoqIiVq1aRWtra7/HrVixgvXr1wuvjUAgUCW7bLu46+hdvN/6Ph48gDzt+n9P/i8/PfVTXFL/MwRHGyc6T+DFS7I+mdSo1FCbM2hEzo1gUJjNZnbv3o3NZuv3uN7yegQCgUAN7Gvfx3dLv4tTcjLfNJ/vZH2HJEMSrzS+wqaaTfyn6T+4JTePjn9U9VOvg8WRziMATIudFmJLhobw3AgEAoEg4rF77Py/iv+HQ3JwWcJl/D7v90yJnUKqIZX7M+/nl7m/RIeON5rfYEujuElTUMTN1NipIbZkaAhx0wci9hp4xHssEAiCxRM1T1DtrCbdkM5jOY9h0J49n+8K8xV8K+tbAPym6jdUdlWGwkzVITw3EYJOJ9fwu1wi7hpo3G43AHq9iI4KBILAUemo5Pn65wH44bgfEq+L7/W4e9Lu4SLTRTglJxuqNoz6G7Aub5cvmViImzDHYDBgNBppbW0d9V/sQGOz2dDpdD5BKRAIBIHg6dqn8eDhkoRLuDTx0j6P02g0rMleg0Fj4BPbJ3zQ+kEQrVQfJ+wn8OAhSZ9EmiEt1OYMCXHL3AspKSlUV1dTVVVFYmIiBoNhUMllXq8Xp9NJV1eXaqeCjwR/nZ8kSXR0dGCz2cjMzBSJewKBIGBUO6p5vel1AFZmrhzw+HHR47g77W6eqXuGjTUbuTzxcrSayPs9HwzdQ1Lh9jstxE0vJCQkANDY2Eh1dfWgXydJEna7nZiYmLD7IgwGf56fRqPBbDaTmJjoJ+sEAoGgJ5sbNuPBw4WmCwfdYfee9HvY0rCF4/bjvN/6PgvNCwd+UQRytPMoEH7JxCDETZ8kJCSQkJCAy+XC4/EM6jUul4sPPviABQsWYDAYBn5BmOHP8zMYDCIcJRAIAorD6+C1ptcAuCP1jkG/zqw3c0faHfy19q/8peYvXJV4VUTesA5EuCYTgxA3A2IwGAZ9IdfpdLjdbqKjoyNS3ET6+QkEgsjiXeu7tHpaSTekc0niJUN67V1pd/Gvun9xzH6M4vZi5pjmBMhKdeLwOii1lwLhKW5GZyBRIBAIBBHPy40vA3BLyi3oNUO7lzfrzdyQfAMAz9U/53fb1I6STGzWm0k3pIfanCEjxI1AIBAIIo4GZwPF7fI8vBuTbhzWGnem3gnAztadVDsGn38ZCYRzMjEIcSMQCASCCGS7dTsSErPiZpFpzBzWGjkxOVxouhAJiVebXvWzherGJ25iwi8kBULcCAQCgSAC2d6yHYAlliUjWufm5JsBeL35dbySd8R2hQtKpVQ45tuAEDcCgUAgiDBqnDXs79iPBg2LzYtHtNYV5iuI18VT66xld9tuP1mobpxeJyX2EkCIG4FAIBAIVMEHVrmz8Ky4WaRGpY5orWhtNFdbrgbwlZVHOiX2Ejx4SNQlkhGVEWpzhoUQNwKBQCCIKJSxCVeYr/DLejcl3wTIpeVtnja/rKlmwj2ZGIS4EQgEAkEE0e5pp7C9EIDLEy/3y5ozYmeQG52LQ3L4cnkimXBu3qcgxI1AIBAIIoZPbZ/iltyMM45jgnGCX9bUaDTcmCyXk4+GqilF3ITj2AUFIW4EAoFAEDF81PoRIHtt/BlSuS7pOnToONBxgMquSr+tqzZcXhclXeGdTAxC3AgEAoEgQpAkiU/bPgXg0oRL/bp2iiGFuaa5gNxDJ1Ip6SrBLblJ0CUwJmpMqM0ZNkLcCAQCgSAiKOsqo8HVgFFj5IL4C/y+vtIzp6ClwO9rq4VISCYGIW4EAoFAECF81vYZALPjZ2PUGv2+/lXmq9Ch45j9GCe7Tvp9fTUQCfk2IMSNQCAQCCKET21ySOqihIsCsr5Zb2Z+wnwgcr03kVApBULcCAQCgSACcHldFLUXAXCRKTDiBmCJWQ5NRWLejcvrCvvOxApC3AgEAoEg7DnceZgubxcWvYWJMRMDts9V5qvQa/ScsJ+gvKs8YPuEgpKuElySiwRdAllRWaE2Z0QIcSMQCASCsEfx2uTH5wc0ETZBn8CFpguByAtNRUoyMQhxIxAIBIIIYE/7HkBOJg40StVUpHUrViaBh3syMQhxIxAIBIIwxy252du+F4A58XMCvt+ViVeiQ0dpV2lEVU1FSjIxgD7UBgyXNWvWkJeXB0BSUhLLly8PsUUCgUAgCAXHO4/T6e3EpDORF5MX8P1MehPzE+azy7aLd63v8rWMrwV8z0Dj8ro4YT8BCHETEqxWK4sWLWLHjh2YzWaKi4uZM2cOkiSF2jSBQCAQhAAl32Z2/Gx0Gl1Q9lxoXsgu2y7es74XEeImkpKJIQzDUmvWrOH222/HbDYDkJ+fz/btkRX3FAgEAsHgKW4vBoKTb6NwZeKVaNBwqPMQNc6aoO0bKLo37wv3ZGIIQ3GzadMmli9fTllZGQUFcqb64sWLQ2yVQCAQCEKBV/L6konz4/ODtm+SIcknpt6zvhe0fQOFkkwcCSEpCLOwVFlZGQDFxcXk5uaSm5vLqlWrWLFiRZ8Cx+Fw4HA4fP+22WwAuFwuXC6XX+1T1vP3umoh0s8PIv8cI/38IPLPMdLPD4Z2jifsJ2jztBGjjSHPkBfU9+UK0xUUtxezo3kHKywrBv06NX6GhzsOAzApapJf7ArEOQ5lLY0URskqBQUFLFmyhO3bt/vEjNVqJScnh5aWll5f85Of/IRHH320x+PPPfccsbGxAbVXIBAIBIHl8/jPedvyNrn2XL7c+OWg7t2qa+V3Y34HEjx8+mHivfFB3d9fePDw+NjH8Wg8fOv0t0jyJIXapF7p7OzkrrvuorW1lYSEhH6PDSvPjcLcuXN9/99sNmO1WikoKOjVe7N27Voefvhh379tNhvZ2dksXbp0wDdnqLhcLrZv386SJUswGAx+XVsNRPr5QeSfY6SfH0T+OUb6+cHQzvHDkx+CDZZOWMp1868LkoVn2F6yncP2w0TNj+K6pMHtr7bP8Jj9GJ4SDyatibuX3u2XnJtAnKMSeRkMYSVucnNze33cbDb7QlbnYjQaMRp7Toc1GAwB+1IFcm01EOnnB5F/jpF+fhD55xjp5wcDn6MkSezv3A/A3MS5IXk/FlkWcdh+mPdt77MiffChKVDPZ3iiVS4Bnxo3laioKL+u7c9zHMo6YZVQrOTZnCtkrFbrWd4cgUAgEEQ+1c5qmtxN6DV6psdOD4kNV5mvAmB3225s7sF7FtSEr3lfTGQkE0OYiRuA9evX88ILL/j+vXXrVhYvXkx+fvCy5AUCgUAQevZ3yF6bKTFTMGp7euiDwfjo8eRF5+HBwwetH4TEhpESaZVSEGZhKYDly5fT3NzMhg0bAGhqahJ9bgQCgWAUcqDjAADnx50fUjsWmRdRWlvKu9Z3uSH5hpDaMlRcUmR1JlYIO3EDsHLlylCbIBAIBIIQs79d9tyEWtwsNC9kU+0mPrV9Sqenk1hd+FTiltnLcEpO4nXxjDWODbU5fiMsxY1AIBBEMsVtxexs3UmTq4ncmFwWmxczLnpcqM1SFXaP3edxmBk/M6S2TIyZSLYxm0pHJR/bPvZNDQ8HDnUeAmSvTSR0JlYQ4kYgEAhUQpunjf+t+F92tu4882ALbDy9kXsz7uXezHtDZ5zKONx5GA8eUg2pZBgyQmqLRqNhoXkhf6/7O+9Z3wsrcXOw4yAAM2NDKxD9TdglFAsEAkEk0upu5cETD7KzdSc6dNyQdAPfHPNN5pvm48HDptpN/KD8B7gld6hNVQVKMvH5ceerwuOgVE192PohDq9jgKPVg5K3NDMussSN8NwIBAJBiPFKXn5Q/gMOdx7GrDfzp4l/YmrsVAC+nvF13mp+i0dPPsoO6w5iNbFcwAWhNVgFqO2iPCN2BumGdOpcdXzW9hkLEheE2qQBafO0Ud5VDsB5ceeF2Br/Ijw3AoFAEGKeb3ieT9s+xagxsnHiRp+wUbg26Vp+nvNzdOh4reU19sfuD5Gl6kCSpLM8N2pAq9FypflKIHwGaR7uOIyERFZUFkkGdY5cGC5C3AgEAkEIqXHW8MfqPwLwP2P/h0mxk3o9bqF5IasyVwHwluUtqp3VQbNRbVQ5q2hxt6DX6HsIwVCyyLwIgJ3Wnbgk9QzF7AvF+xVpXhsQ4kYgEAhCysbTG3FIDvLj81mesrzfY7+a8VVmx83GqXXy69O/DpKF6uNAu3xRnhozNWTN+3rjgvgLsOgttHpa2dO2J9TmDIjaQnv+RIgbgUAgCBEnOk/wRvMbAHw769sDJsbqNDoeyXoEraTl47aPw7Yj7kjxhaTi1RGSUtBpdFyReAUAO6w7QmxN/0iSxMHOLyqlhLgRCAQCgb94uu5pJCQWmxcPOjQw3jiei9ouAuC3Vb8dldVTirhR40VZCU29Z30Pr+QNsTV9U+Wswuq2YtAYmBwzOdTm+B0hbgQCgSAE1DprKWgpAOBrGV8b0msvt11Ooi6Rk46TvNX8ViDMUy12j50SewmgnmTi7swzzSNeF0+Tu8kX9lEjim1TY6cSpfXvJHA1IMSNQCAQhIAX6l/Ag4e58XOHnBRrlIzck3oPAE/VPBUWyav+Qmnel2ZIIyMqtM37esOgNfjKwNUcmvI171Oh98sfCHEjEAgEQcbhdfBK0ysA3J1297DWWJ68nGR9MtXOara3jJ7hwWoOSSkoDf3es76HJEkhtqZ3IrUzsYIQNwKBQBBkdrbuxOaxkW5I59LES4e1RrQ2mttTbwfg2bpnVXsR9TdqmQTeHxcnXEy0NprTztMctR8NtTk9cHgdHLMfAyKzDByEuBEIBIKg82rTqwDckHwDOo1u2Ot8KfVLGDVGjtqPsqdd/aXHI0WNzft6I0Ybw6UJsmhVY0O/Qx2HcEtukvXJZEZlhtqcgCDEjUAgEASROmcdn9o+BeDG5BtHtJZFb+H65OsBeLb+2RHbpnaU5n0GjUFVzft6QwlNvWt9N8SW9KSovQiAOaY5qpjLFQiEuBEIBIIgsq1lGxISs+Nnk23MHvF6d6XeBcihrkpH5YjXUzP722WvTThU+FyWeBl6jZ7yrnLK7eWhNucsFHGTH58fYksChxA3AoFAEESU5N+llqV+WS8nJodLEy5FQuLf9f/2y5pqJRxCUgomnYkLTRcC6vLeuLwun0icEz8nxNYEDiFuBAKBIEhUO6o51HkILVpfszd/cFea7L15telV2j3tfltXbYTbuICF5oWAukrCD3UewiE5sOgt5ETnhNqcgCHEjUAgEAQJxWszxzSHZEOy39a90HQh443jsXvtbGvZ5rd11USnp5MT9hNAeHhuAK4wX4EWLcfsx6h2qGPQaXF7MSCHpCI13waEuBEIBIKgoYQnFpsX+3VdjUbDLSm3APBK4yt+XVstHOo8hBcv6YZ00qPSQ23OoLDoLb68FrVUTRW2FQJ9h6RcnS52/PcOtq3aRsuJlmCa5leEuBEIBIIg0OBq4FDnIUC+o/c3NyTdgA4dhzoPcaLzhN/XDzVKnki4eG0UlNCUGvJuXJLLl7eUb+qZTNzZ2MnzVzzPnj/uYf+m/fxt2t8of0ddydCDRYgbgUAgCAIftn4IwIzYGaQaUv2+fpIhiSvNVwL4uh9HEmqdBD4QSkn4vo59NDgbQmrL0c6j2L12EnWJ5EXn9Xj+4x99TF1hHTHJMYxdMBbJI/He/7yH16PeAaB9IcSNQCAQBIGd1p0AXJHof6+Nwi3JtwDwZvObOLyOgO0TbCRJCrtkYoW0qDRmxc0C4O2Wt0NqS1GbXAI+O342Ws3Zl//20+0cfFoeyXDTizdx66u3Em2JpvlIM4f/dTjoto4UIW4EAoEgwNg9dj5v+xwITEhK4cKEC0k3pGPz2FST4+EPTjlO0eppJUoTxdQYdTfv643rk+RGi683vR7SMRndm/edS+GvC/E4PWRdlkX2FdkYE43Mf2Q+AJ8//nlQ7fQHQtwIBAJBgClqL8IpOcmMyuw1HOAvdBodNyffDERWaEoJSU2LnYZBawixNUNniWUJUZooSrpKOG4/HhIbXF4Xe9v3Aj2b93lcHg78VfaMXbj2Qt/js74xC61BS/PRZpqPNwfNVn8gxI1AIBAEmF22XYA8UDHQ5bc3Jd8EwO623dQ4agK6V7AIh2GZ/ZGgT2BB4gIA3mh+IyQ27O/YT6e3E4vewuSYyWc9V/VBFQ6rg5jUGCZcPcH3uDHBSPYVchft0tdKg2nuiBHiRiAQCAKMMkvqItNFAd8r05jJPNM8AF5vfj3g+wWDfe37gPAVN4BvBthbzW/hltxB3/8T2yeA/B08N9+m5D8lAOTdmIdWd/ZzeTfKnkYhbgQCgUDgo8ZZQ4WjAh065pvmB2XPG5PkgZyvN4c2x8MftHvaKe2SL6wz48Mrmbg7FydcjEVvodndzGdtnwV9f8V7eEnCJWc9LkmST9xMvHlij9fl3pgLQPVH1XS1dAXYSv8hxI1AIBAEEOWicl7ceZj0pqDsudC8kFhtLFWOKvZ27A3KnoHiUMchJCQyozIDUkIfLAwaA9dYrgHgTeubQd270dXIMfsxAC5KONt72LC/gbZTbehj9IxfPL7Ha805ZpKnJyN5JCrfD5/BrELcCAQCQQDxhaQSAh+SUojRxbDYIndBfq3ptaDtGwjCaVjmQNyQfAMAH9o+pFPbGbR9lZDU1JipJBmSznqu8j1ZsGRfmY0htvdk7axLswCo3V0bQCv9ixA3AoFAECDckttXAn5xwsVB3VsJTRW0FGD32oO6tz8J1/42vTElZgpTYqbglJzsjdsbtH2VHktKUnN3qj6sAmDsgrF9vj5jXgYANZ+HT4K6EDcCgUAQIA53HKbN04ZJZ2Ja7LSg7n1B/AVkRWXR4e0I2543Xsnr89wojfDCGY1Gw4rUFQAUxRfhlQLf+dfutftCo0oHawVJkqj+SB7oOfbyfsTNfFnc1O6uRfKGRw6XEDcCgUAQID5tk0NS803z0Wv0Qd1bq9H6wiCvN4Vn1dRJx0naPG0YNUYmxU4KtTl+4RrLNcRr42nRt/Bp+6cB3+9z2+c4JAeZUZk9SsBbTrTQWd+JzqgjfW7fw0hTZqSgj9HjtDnDpt+NEDcCgUAQILr3twkFSmfcz9s+p9YZPvkSCgc65ZDU9LjpGDTh17yvN2J0MdxgkUXni00vBny/91vfB+SxH+f2WFK8NhnzMtAb+xbfWr2W9Dmy+Kn9PDy+R0LcCAQCQQDo8HRwqEOeAh6M/ja9kWXMYk78HCQk3mwOboWOP1Aqvbp31G0/3U5nQ/CScQPBrcm3AvBJ2ydUO6oDto/L6+J96/tAz5AUMKiQlIISmgqXvBshbgQCgSAA7GnfgwcPWVFZZBozQ2aHEpp6rem1sOt5U9xRDMiDHiVJovgPxTyV8xQbszby9tffxtnmDLGFw2O8cTw5XTlISLzYGDjvzS7bLmweGymGlB4jF+CMUBlz8ZgB10rPlz03jfsb/WtkgBDiRiAQCAKAMoF5rmluSO1YZF5EtDaaU45TvsqjcMCqs1LrqkWHjvPjzqfw14W8+9C7eJwevC4vB58+yM7VO0Nt5rCZ1y53kX6l8RU6PYHxRClTyK+2XI1OozvrOVeni+Yjcv6MEnLqj5QZKQA0HmoMC5EsxI1AIBAEgML2QgDmxodW3MTp4lhkXgTAa83h0/PmlPEUAFNjp6Jp0bDr/+T8pUt/eim3viqHdfY9uY+64rqQ2TgSJtsnMzZqLK2e1oAMOe30dPpCUldbru7xfMP+BiSvRGx6LHGZcQOuZ5liAQ10NXeFRVhQiBuBQCDwM22eNo52HgVgjmlOiK05E5ra1rKNLm94tNA/aTwJyPk2nz32GU6bk7QL0rjoBxeRd2MeU++cChJ88MgHIbZ0eGjRcnfK3QD8q+5fuCSXX9ff1rINh+RgnHEc02On93heEYXp+emDGuZqiDFgzjUD0HS4ya+2BgIhbgQCgcDP7Gnfgxcv2cZs0qMGdvkHmrnxc8mIyqDd0+5r6KZ2FM/NLGkW+56UB2de/vjlaLTyhfiyxy4D4GTBSdpr2kNj5Ai5znIdyfpk6lx1vNHk32nhLzW+BMAtybf0Kl7qi+uBM7k0gyF5ejIgxI1AIBCMSgrb1BGSUtBqtNyQ9EVicRiEpppcTTQZmtCgIeHdBNx2N5bJFiYsneA7xpxjJvOiTJDg+NbjoTN2BBi1Rr6S/hUA/lL7F1xe/3hvjnUe41DnIfQaPTcm39jrMYrnJi0/bdDrCnEjEAgEoxifuAlxMnF3FHHzme0z6p31Ibamf/Z07AFgYvREKl6oAGDaXdN6eCCm3j4VgGObjwXVPn+yPHU5KYYUapw1vNz0sl/W3NywGYCrEq/qMUsKwO1w03hQrnoSnhuBQCDoRqu71e95ApGAzW3juF32JKgh30YhOzqbC+IuwItX9T1vlP42s7tmU7GtAkDOsTmHySvkjrvVH1XTVt0WLPP8SrQ2mnsz7gVgU80m2jwjO48GZwNvNMshrtvTbu/1mOYjzXhdXoxmIwnjEwa99qgRN3v37uWRRx7h6qvPZGL/8pe/ZO/evSO1SyAQqJAOTwd/qP4D1x24joX7F3LJnkv48tEv81bzW3gkT6jNUwXF7cVISIw3jifVkBpqc85CCVGovefNnk7ZczPug3FIHon0/HSSJvf0QJiyTL7mcqfePRVUG/3JrSm3Mt44nhZ3C3+r+duI1nq2/llckosL4i5gdvzsXo9RvDapM1MHlUyskDRV/gw66zqxN6t7GOuwxc1TTz3FokWLyMvLo7Cw0Pd4Tk4Oa9as8YtxAoFAPZzoPMHyw8t5pu4Z6lxyvN6LlyOdR/hRxY/4dum3sbqtoTVSBagxJKWw2LIYo8ZIhaOCQ52HQm1Or7S6WyntKgVA/7E8EiD3htw+j1emWVd/GLhOv4HGoDHw8NiHAXiu4TlK7aXDWqfR1cjWxq0AfDXjq30f94W4ST4veUjrR8VHET8mHgBriXVYNgaLYYubDRs2UFRUxP3333/W48uWLTtL7AgEgvDnSOcRVp5YSb2rnqyoLDbkbOCDWR/w5nlv8kDmAxg1RnbZdvH1Y1+n2RUeg/UCha+/jQrFTbwunoXmhYDsvVEju9t2A5DqSKX+fTk3aPyS8X0eP/YyWdxUfVQVeOMCyGWJl7EgcQFuyc2jJx8dlif0idNPYPfamRE7g8sSLuvzOEXcpJyXMuQ9EnMTAbCWWof82mAybHHT1NREcnJP1VdeXh5Ud+eSJUuCtpdAMBqxuq18t/S72Dw2ZsbN5Nmpz7LIsog4XRzpUencl3kff5/yd9IN6Zx0nORbJd+i3ROepbkjxeq2csJ+AoA58erJt+mO0vPmnZZ3cHgdIbamJ8qw0amHpmJvtGOIN5B5Yd/jK7IuywLkPJLORvU3l+uPtdlridPGcajzEM/UPjOk1x7tPMp/mv4DwMNjH+433DQScWPOMwPQWtY65NcGk2GLmxUrVrBixQpsNpvvMZvNxqpVq1i5cqVfjBuIrVu3UlBQEJS9BILRiCRJ/KTiJ9S56hhnHMcfJ/4Rk97U47hJsZPYOGkjSfokjtmP8dipx1Sd0xEoitvkWUi50bkkG4bm8g8W80zzSDek0+Zp44NWdTXAkySJT9s+BWDsp7JHJvvKbHQGXZ+viUmO8SW6KoMgw5W0qDS+l/09ADbWbORz2+eDel2Xt4sfV/wYCYmllqVcEH9Bn8c6bA5sJ+XrtjJSYSgo4iZiPTdPPvkkJpMJs9lMS0sL8+bNw2KxkJeXx+OPP+5PG3vFarXS3Dy63d8CQaDZ3rKdD20fEqWJYn3OeuJ18X0eOy56HL/K/RU6dGxr2eZrIjaaUHNISkGn0XF90vWA+kJTFY4Kap21RGmiiC6KBmDCkgkDvk6Zal31YXiHpgBuSr6Jm5NvxouXNeVrONF5ot/jJUniV1W/orSrlGR9Mt8f+/1+j1cqneIy44hJjhmyfREvbgC2bNlCSUkJW7Zs4ZFHHqGkpIQnnnjCX7b1y+bNm7ntttuCspdAMBqxe+z8tvq3AHwt42tMjp084GvOjz+fb2V9C4DfVP+GGmdNIE1UHUoysVpDUgrXJ8vi5lPbpzS4GkJszRk+tclem1nRs3Aekyd+j71i7ICvy7xIDlspXXfDndXZq5kZNxObx8aDJQ9yuONwr8dJksQTNU/4biR+Mv4nvfa16c5IQlJwJudG7WEp/UgXyM3NJTe370z2QFBQUMDixYsHdazD4cDhOBNXVsJoLpcLl8u/PTqU9fy9rlqI9PODyD/HoZzf3+v+Tp2rjkxDJncm3Tno9+R2y+281/Ie+zv38/OTP+dX4381pHLTkRKqz7CxsxGegyldU8j5Wg6u+MDs74/zy9JlMTN2Jgc6D/BGwxvcnXq3v8wbEZ+0fgJAflU+NrsNQ7yBxCmJA56rZboFgPp99TidzqB+34bDQJ+hDh2/Hv9rvln2TY53Hefrx7/OyvSVrEheQbRW9mg1uZr4dc2v2dG6A4Dvjfke82LnDfhe1R+QBaBlmmVY36G4cfKQzbbqNuxtdvTRvcuIQPwdDmUtjTTMwLhWq+3zC5Sbm8uJE/270kbC1q1bWb58OVarFYvF0m9s/yc/+QmPPvpoj8efe+45YmNjA2ajQBDOODQOfjfmd3Rpu1jWuIwZ9hlDen2DvoFNGZvwaDzc3nA7U7qmBMhSdeBqdFG2oQzt8S+c4VrIXJ1J/EV9h/FCTVFcEW8kvUGqK5Vv1H4DDaEVBG7c/CLrF7i0Lr729Nfo+mMXMTNjGPvTgT03XpeX0jtKwQMTnpqAIdUQBIsDT5emi/8k/YdjsXIHZqPXyBjnGNwaN9VR1Xg1XrSSliXWJVzYfuGg1qz+v2o6iztJeyCNxKsTh2yTJEmU3VWG1+5l/B/GE5UdNeQ1hktnZyd33XUXra2tJCT033xw2J6boqKiHo81NTXxyCOP8I1vfGO4yw7Ipk2bhpSwvHbtWh5++GHfv202G9nZ2SxdunTAN2eouFwutm/fzpIlSzAYIuOPqzuRfn4Q+ec42PN7pv4Zuuq6mGCcwMNXPoxO03dCZ1901nby94a/83HWx3xr0rcwaIPzfgb7M5QkiZeueQntcS2OBAdRE6PQFGto3tjMNfdeM6QOsIPBX+d3uedyth/ZToOhgbyr8pga07MDcDApbC/EVe4iWZ/MeOt4jnGM6ddO57Lr+i5p7s6/pv2LpoNNzEyeSe51wY0mDJWhfIa3SLfwZsub/KX+L9S6aimPLvc9d17MefzPmP9hRuzgbz6eefgZAC7/0uWDCvn1xnOTn6NhXwPnjz2/z/c6EH+H3QuYBmLY4mb27N47H27evJkHHniA++67b7hL90lxcTFz5w4tUc9oNGI0Gns8bjAYAvbDF8i11UCknx9E/jn2d34Or4Pnm54H4L7M+4iOih7WHveOuZfXW16nylnFK62vcFfaXcO2dzgE6zM8tvUYVe9V4Y3ysv2Z7fy/S/8fNTfUULu7lg8e/oBbX701IPuO9PySDElcZb6Kd1re4a3Wt5iZMNOP1g2d3Z1yf5uLEi6i/nM5dJJ1SdagzzH9gnSaDjbRfKiZKbeGh6dwsJ/hrem3cmPajZywn+CE/QTRmmgmxkwkN2ZoIs7tcGOrkAVC6ozUYX9/zHlmGvY10HGqY8A1/Pl3OJR1/D5bKjc3N2BN/JqbmykoKGDDhg1s2LDB1wl5w4YNbN26NSB7CgSjjXda3sHqtpJuSGeJZfh9pOJ0cXxjjOzFfab2GexedbdrHw5et5ed39sJwKGvHqJ9fDvzkuZx7d+vBaD09VJaK9SbeKkM03y7+W2cXmdIbVGSied55tFyvAWAjAszBv361AvkURf1eyMjqfhc9Bo902KncVPyTSxNWjpkYQNyhZPklYgyRRGXETdsW3xJxeXq/W4P23Ozdu3aXh8vLi4etjEDsXjx4rMSiYuLi9m0aROrV68O2J4CwWhCkiReqH8BgBWpK9BrRlZzcGPyjTxd+zSnnad5seFFvpz+ZX+YqRrK3y7HdtKGLlnHka8cYUrMFBL0CTANxi8ez8mCk+x/aj+XP3Z5qE3tlQsTLiTNkEa9q54CawHXJV0XEjvqnfUcsx9Dg4ac4zkc4xiGTMOQSpXTZqUB0LBPPdVfaqPlmCwaLVMsI0q6VkKttlODDxMFm2F7boqKinr9LycnJyiN9bZu3cq6desAWLNmjWjmJxD4gYOdBzlqP0qUJopbUm4Z8XoGjcE38fjvdX/H7oks783+p/YD0PWlLjwxnrP625y/6nwADvz1AB6nOoeK6jQ6lqUsA+D5+udDZsf7re8DcH7c+XTs7wDAmNcznaA/UmfJnhtriRVnR2i9UGql+ZjcG663IaRDIWGc+sXNsG/Ltm3b5k87hszy5ctZvnx5SG0QCCKNVxpfAWCJZQkWvcUva16ffD1/q/0b1c5qtjZu5Z70e/yybqhpP91O2RtlAOy7YR9wdvO+iTdPJDYtls66Tqo+qGL84r7nI4WSL6V8ib/U/oVDnYc40HGAmXHBz7153/o+AFear/T1qjHmDk3cxKbGEpMSg73RTsvxFtJnp/vbzLBHETeWKSP721bETdupthHbFCj8nnMjEAjCE7vHzvaW7QDcnHyz39bt7r35R90/IsZ7c/SFo0geidRLUjkx9gQ6dMyOP1NooTPoyLk2B4Dyd8r7WibkJBmSuNpyNYAvJBlM2txtvuaHVyZeSV2xPHE+Om/oiexJU2SPhBJ+EZyN8r4o79NwMY2TR7B01nfisquzJ9igPTd95dj0hRIyEggE4cG71nfp8HaQFZVFfny+X9e+Lvk6/lr7V6qd1bzc9HLQK6cCQemrpQBobpBzF6bGTu0xniLnmhwO/f0QFW9XwC+CbeHguSPtDl5vfp3t1u18x/UdUgzD6147HD60fYgHD7nRuaQ70rGWWAEw5gzNcwOyR6L642qfh0JwNr6w1AjFTbQlGkOcAVeHi7bKthGHuQLBoMVNb31t+kLt3SEFAkFPXm9+HZCTgP39N2zQGPhqxld57NRjPFv/LLel3jbiZOVQ0tXS5ZtjVHaZHJrqbZ7U+CXjQSO3vG+rasM0tufQUTUwLXYas+Jmsa9jHy81vsTKzOAMPwZ83sKrzFf5Kp1M403oEobeW0m5aDcfFeLmXOxNdrqauwAwTzKPaC2NRkPC+ASaDjfRdirMxU2oc2wEAkHgaHQ1+kIDgaqYuT7pep44/QS1zloKWgq4JumagOwTDMrfKkfySCTPSOY9y3vg7F3cxCTHkDk/k5rPaqh4p4KZ94a2l0x/3JF6B/s69rG1YSv/lf5fGLVD95wMFZvbxic2eeTC1ZarfSGptAvShrWeT9wIz00PlPfElG0iKm7kXYVN40w0HW5SbVKxyLkRCARsb9mOFy8z42aSZcwKyB5GrZHbUuVht/+s+2e/Y1PUTulrckgq7bo0ap21cr5NXO+NTccvlROJT713Kmj2DYerLFeREZVBk7uJ/zT9Jyh7vmt9F7fkZmL0RPJi8qjfI3tulMqnoZI09Yucm+MtYf39CgS+ZOLJ/ikUUHvF1Ij8wnv37u21BNtsNgekQ7FAIAgM21pkz6ySWBoolqcu5+napzlqP0pheyHzTPMCul8gkLwSJ7efBMB2hfzDfl7cecToeu/JknWpLBZrPlP3hHSDxsB/pf8X6yvX80ztM9yafGvAR2a80/IOAFcnyd8738TqmSk00TTk9RJzE9Hqtbg6XLRXt6s2DBgK/JVMrKD2iqlhe25efPFF8vPz2bhxI2vWrOH555/n+eefZ/Xq1WzZssWfNgoEggBS46xhf8d+tGhZbFk88AtGgEVv4eYUuRLrn3X/DOhegaLxYCP2JjuGOAMHJx0Eeg9JKWTOzwTk/iv2JnVXit2cfDMphhTqXHW80fxGQPeqcdSwu00euXC15Wq8Hi9Nh2VBkzwjeVhr6gw6X/dcEZo6G38lEysoFVO2k+r03Axb3DzyyCMUFBRQUlLC7NmzKSwspLCwkM2bN5Obq+6hZQKB4AzvWd8D4IL4C0g1DC8cMBTuSrsLDRo+tn1Mqb004Pv5GyW8lHVZFoUOOU+pP3ETbYn2hQLU7r0xao18Je0rADxd9zRuyR2wvV5tehUJiXmmeWQZs7CWWvE4POhj9CTkDH/YqMi76R1lpMVIe9woqL1L8bDFTWlpKQsXLgTkeVLvvSf/QC5fvpzNmzf7xzqBQOAXaj6v4W/T/safEv/Eyf8+SW1hre85pYHaVeargmJLtjHbt9dz9c8FZU9/Uvl+JQDxl8XT6GrEoDFwftz5/b4m8yLZe6N2cQNyUz+z3kyVo8oXrvQ3HsnDq82vAnBL8i3AmZBU8vRktLrhp4MqlUDWUutITIwovB6vr8Teb56bbNlz01bZpsr8pmF/g/Lz89m7dy8gz3x6/PHHAXjqqaewWq3+sE0gEPiB8rfLeeGKF2g+2ozb7sZZ6eSlpS9R9VEVLe4W9rTvAeQGasHi7rS7AXiz+U1aXOHTcE3ySlTtlEvAG+fKF+Pz484nWtt/w7nMC8NH3MToYnyfz19q/oJL8n+Ttl22XdQ6a0nQJfiEbvd8m5FgzjUD0Fqm3qGOwcZWYcPj9KAz6nyiZKTEj5F7OnkcHl+JuZoYtrhZu3Ytu3fL8dKVK1fS2NiITqdj1apVYpClQKASHDYHb3/9bdxdbnKvz+XLe79MzMwYXO0u3vqvt/ig4QO8eJkSM4UxxjFBs2tW3CxmxM7AKTnZ2rg1aPuOlIb9DXS1dGGIN1CcIw8JvtB04YCvU8RN7ee1qrzLPZfbUm/Dordw0nHSN5LDnygeu5uSb/KVnPvEzXkjEze+idVC3PjwVUpNsozIK9YdvVFPTIqcRN9e3e6XNf3JsM9y2bJl3H///b5/FxUVUVJSQktLi+hOLBCohI9//DEdNR1YJlm4aetNJE9PZswPxhCbHktrWSu7n5JvUK40XxlUuzQaja9L8ZaGLTi94THosPqTagAyL8mk0C7n21yccPGAr0udmYrWoKWrpUu1OQrdidfF+xr5barZRLvHfxevE/YTfNb2GVq03JF6h+9xf4kbxXNjLbOGhZAMBi0nvsi38VMZuEJ8luy9aatWX8XUsMWNVqvljjvu4OWXX/Y9lpOTQ2Jiol8MEwgEI8NWaWPPH+WQ06I/LkIfLXd+0MZomf+D+QBE/yEarUMbtHyb7iyyLCLdkE6Tu4m3W94O+v7DoWaXHFbS5mvp8HZg1puZGjt1wNfponQkT5MrgBoPNAbURn9xa8qtjDOOo9ndzKaaTX5bV6mSW2heSKZR9mi5HW5fwutIxU3C+ATQgKvdhb1R3dVpwULJPzJPNPt1XUXcRJTnprCwELPZzL333otOp+P222/n3Xff9adtAoFgBOx7Yh+SRyL7ymwmLJ1w1nPn3XsehjEGohujmblrJhOjJwbdPoPG4Gvq91z9c2Fxl31612n5f8+T//ci00VoNYP7GVVySRr2NwTGOD9j0Bj4/tjvA/B8/fOcsJ8Y8Zrl9nLean4LgK+kf8X3ePPRZiSPhNFs9OVyDBd9tB5TlpxXIkJTMj5xk2f267rK+xxR4kbpcdPc3Mzu3buZMGECK1euRKfT8eCDD/rTRoFAMETcXW72P7UfgNn/3bNzri5KR9tNsit5+rbpIZsH96WULxGtjeaE/QS723eHxIbB0lHf4btIFE2SZ+0NJiSlkHq+XGYfLuIG4JLES7gq8So8ePjpyZ+OuDT8yZon8eLlisQrmBE3w/d495CUP76LvrybciFuAF+llGViYMJSESVuupOfn8/69et58sknWbRoEU8++aQ/lhUIBMPk2JZj2BvtmMaZmHhTT6+MW3Lz2aLPANC8p6GrJTTVDgn6BG5MuhGA5+rUXRauhKTM08wc1MvN+y5KuGjQr1fETbiEpRS+n/194nXxHOo8xD/q/jHsdfa272W7dTsaNDww5oGznvNXpZRCYo4sbqxlVr+sF854PV6fyEvM82/aSETm3Ci89NJL3H777eh0Om677TbmzJlDYWGhP2wTCATD5Oi/jwIw896ZaPU9/8wPdB7gdO5pbJNsSE6J4y8eD7aJPu5MuxOAD20fcrLrZMjsGAglJMUX/fomx0wmxTD4i7Fy4W4+1oy7K3DN8fxNelQ63xv7PQA2nt7IvvZ9Q17DJbn4+amfA3KF1KSYSWc9769kYgVRMXWGtqo2vC4vWoPW7+MofJ6bqgjy3Nx2223odDruu+8+LBYLhYWFNDU1sW7dOmbP7n2AnEAgCDz2Zrtv9tHU23tPdv2kTZ7EHHWTPB247I2y4BjXC+Ojx3N5wuUA/Lv+3yGzYyBqd8uND2umyx6coYSkQO4LEp0UjeSRaDoy9LlJoeSGpBtYbF6MBw9rytfQ6Bqa9+mvNX+ltKsUs97MQ1kP9Xje3+Kme8XUaEcJSZlzzX4rA1eIyJybpKQktm3bRnNzMxs3bhSCRiBQCSWvlOB1e0k9P7XPbqSf2GRxM+MaOe/h1Lun8Lq9QbPxXO5Ol5vGvdb8Gq1u9d1tS5JEXXEdAHtz9gJDFzcajSYs825Atv3H439MTnQODa4GHip5iDb34EIRn9g+4S+1fwHge2O/h1lvPut5Z5sTW4VcHp8yQ3hu/I2SJ+bvkBSc8dzYm+yq80YOW9xs3LiRRYsW+dMWgUDgB45tPgbAlNum9Pp8q66VUkcpWrRcefmVRFuicdqcZ41kCDZz4+cyOWYyXd4uXm58eeAXBJnW8lYcVgfaKC2nJpwiRhvDBXEXDHmd5OlyOXjz0fCbexSni+M3ub8hWZ/MMfsxvlnyTZpd/Z/H/vb9PFL2CBISy1KWcW3StT2OUYZlxmXGEZPc+2T1oaLMPWqrasPrCZ1oVwOBSiYGiE6KRmfUAdB+Wl3eG//6qAQCQUhxtjupfE+efTRp2aRejzkRLZf0nh93PhajheyF2QC+UFYo6N7U74WGFwLS8n8kKF4bpoLX4GW+aT4GrWHI6yietJZj4TNyojvZ0dn8ceIfSdAlcKjzEP917L8obOs9x3Jbyza+VfItOrwdzImfw3fHfrfX43whKT95bQDiMuLQ6rVIHkl1F91gE6gycJD/btVaMSXEjUAQQZx69xQep4fEnMQ+Q1InYmRxc2nipQBMWDIBgJMFoU3mvdpyNcn6ZOpd9RS0FITUlnOpK5LFTd0U+X+H2/Qwaar8mTQdDa+cm+5Mjp3M01OeZqxxLKedp1l1YhUPlTzEq02v8rntc/7T+B++ceIbrC1fS4e3g7nxc/ld3u98YxbORck/Sp6R7DcbtTrtmcGOp9RXyRNMAhmWAvXm3ehDbYBAoKA0cQtVz5VIoPytcgByrs3p9X3s8nZRbpSPUZJ4xy8eD8jVQO4ut6+TcbCJ0kaxInUFG2s28lz9c1xjuUY13wVF3JRPKkeHjgWJC4a1jmWKHBqwlljxur29VrKFAxOiJ/CvKf/ij6f/yIuNL/Kx7WM+tn181jE6dHw94+vcm3kvBk3fXi4lRKcIP39hGmeitbwV20kbWZdm+XXtcEGSpICGpUAOJwJ01HYEZP3hMqK/rL179/LII49w9dVX+x775S9/6ZsWLhAMBrfDzYc//JBN4zbxu9jf8dZX36L1pEgEHCqSJJ0lbnpjT8ce3Fo3aYY0JsbI/W8ScxOJSY3B6/JSv7c+aPb2xvKU5URpojjceZi9HXtDaouCJEnUF8vvS8u0FvJN+STqh3cXnJCdgD5Gj9flpbUivL/jJr2JtePW8tL0l/ha+teYZ5rHBOME5pnmcW/Gvfxnxn/4xphv9Cts4Iy4UcZT+Asl7yYcZnkFis76TlwdLtBAwoSEgOwRceLmqaeeYtGiReTl5Z3V1yYnJ4c1a9b4xThB5ONxeXj9jtf57Oef0VbVhrvLzaG/H2Lzws10NnaG2rywovlYM7aTNnRROrKvyu71mI/b5LvrS0yX+LwiGo2GMRfJE8FrPq0JjrF9YDFYuC7pOuDM5OhQYztlw95kR9JLWPOsXJl45bDX0mg1vuGF4ZhU3BvjosfxraxvsXHSRl6c8SIbJ23kwTEP+uZG9Ye7y+1rMOdvz03CuC/EzcnRK24Ur40p24TeGBiPbFyGLG7aa9QVlhq2uNmwYQNFRUVnTQYHeVq4aOInGCw7v7eTkldK0Bl1XPfP67jzoztJzEmktayVV5e9iuRV/7whtaDkzGRdnkVUXFSP5yVJYlfbLkAWN93JvEi+EJ3+9HSArRwYJbH4fev7VDuqQ2wNPq+NNc+K1+gd8QR1JReq+VhkiJuR0HKiBckrz5SKTYv169rCc3Mm3yZQISk447nprFXXzeiwxU1TUxPJyT3diOXl5WExAE8QeuqK6yj+QzEAN75wI9O/PJ2sS7O49fVbMcQbqPqgimNbj4XYyvBBqZIat3Bcr89XOCqodlajk3TMi5931nNq8dwA5MXkcZHpIrx4VdHUT8m3aZ7azPTY6WREZYxoPcVDEa4VU/6ke76Nv/OrhOcmsJVSCornJmLCUitWrGDFihXYbGe+ODabjVWrVrFy5Uq/GCeIXCRJYsd/7wAJpt4xlYk3n5l/lDI9hXnfky++ux7dNeQ+FdWOap6te5ZfV/2aJ04/wU7rTlxedZUW+xvJK1H5/hfi5qrexc1HrR8BML5rPDHas/uJpM9NB418IVDDj9SX078MwEuNL9HiCq0IUMrAm6c2jygkpeDz3ERIWGokBCqZGOSEYpC/06P1htvXnXiiOWB7RFxY6sknn8RkMmE2m2lpaWHevHlYLBby8vJ4/PHH/WmjIAI5teMUpz85jT5WzxW/uKLH83O+Mwej2UjT4SaObx3c3KMWVwuPlD3CTYdu4tfVv+bZ+mf5S+1feLjsYW46dBMvNb4UsT9yDQca6GruwhBnkIVKLyjVLJO6eva/MSYYfX1Gaj4LvffmItNFzIidgUNy8K/6f4XMDkmSqC2Smxu2TGsZdgl4d5Scm5YS4bnxlYH7OZkYznhuXO0uHK0Ov68fDgTFc/NFWMreaMfj8gRsn6EyomqpLVu2UFJSwpYtW3jkkUcoKSnhiSee8Jdtggim8NdyXtbMe2f2OszNmGgk/6F8AA789cCA6x3vPM7tR25nu3U7APNN87kn7R5uTr6ZFEMK9a56Hjv1GA+XPUy7R113GP5ACUllXZ6FzqDr8Xy7p5097XuA3sUN4BNFdXvqAmTl4NFoNNybcS8Amxs2h2wkQ/vpduz1drw6L4kzE8mJ7r0KbSgoF5qOmg6cHc4RrxfOBNJzY4g1EJMieyhHa2gqGOImNiUWjU4DklydpRZG3GQhNzeXZcuWsWzZMnJyRv6HL4h8Gg83yiXLGpjz7Tl9Hjfjv76Ye7TjVL9dRk90nuAbJ75Bk7uJnOgc/jX1Xzwx6Qm+M/Y7/Hj8j3ltxmt8J+s7GDQGPmj9gAdOPECLO7Lumit39h+S+sz2GW7JzbiocSS5e7+QpF2QBkDDPnXMPVqQuIBJMZPo9HbyfP3zIbFBybex5di4dsy1fskLibZEE22JBkb37CPJK/mSqgMhbqDbGIZR2MjP0erA3mgHAituNFoNcenqy7sZdG3Y2rVrh7TwunXrhmyMYHSwb+M+ACbePLHfPzpzrpmsS7Oo/riaI88d8eXhdMfqtvLt0m/T6mllRuwM/jTpT5h0Z3uCorRR3JN+D3Pi5/Dfpf/N4c7DPFTyEJsmbSJG559ZNqFEkiRfInBfzco+bP0QgEsTLu1zndRZ8lDHUPe6UVC8N4+UP8K/G/7N3el3E6+LD6oN5YVy36CWKS1cY7nGb+sm5iXSVdiFtdRK6sxUv60bTrRVteHudKM1aH1TvP1NfFY8dUV1tFWPPnGjeG1i02KJMvWsnvQncRlxtJ9up6MmDMVNUVHRoBdVS1dRgfrwuDwcff4oAOevPH/A46ffM53qj6s5/K/DPcSNJEn8uOLH1LnqyDZm86eJPYXNWWvFTecvk/7Cvcfv5XDnYX5Q8QN+lfsrtJrw7BKrYDslJwFr9VrS8tN6PO+VvHxkk5OJLzNdRh29h53SZsmvtVXY6LJ2EW2ODpzRg2SheSETjBOocFTwfP3z3Jd5X1D3P1osf1djZsYwxjjGb+taJlqoK6zzXYBGI0q+jWWSJWCdmpWQd3tV5IWiB0LJ6QpkMrGCGhv5DVrcbNu2LZB2CEYJFe9UYG+wE5se65tp1B+Tl0+m4MECGvY1YKu0kZB9psvmq02v8rHtY4waI7/I+QUmfd/CRiEnJoff5v2Wb5z4Bh+0fsBfa//K/Zn3D/g6NaMkAKfOSsUQ07Mb7KHOQ7S4W4jTxjErbhbb6P1vOdoSTcL4BGwnbTTsbyB7Qe+NAIOJTqPj/sz7+WHFD/lH3T9YlroMiz5wPTu6I0kS1v1WYohh9rzZfl1b8Vgq1SyjkUDm2ygoQx1Ho+emtVQOeQYyJKWgxnLwEcnliooK1q5dy+23387tt9/Or371K9raRt+XSDB4Dv/zMADT7po2qLu1mOQYMi+UG8xVvF3he7zZ1cxvq38LwINjHmRSbO9Jsr1xfvz5rB0nh1mfrHmS3W27B/1aNaKIG+V9OhclJHVJwiXoNf3fzyihqYa96si7AVhqWcqUmCl0eDv4W+3fgrZvYV0hMZVy2PLqS64e4OihoQwxHM2em2CIm9HsuQlGMrGCT9yoKCw1bHHz4osvkpuby5YtW7BYLFgsFp544gnMZjP79u3zp42CCMHZ4aT0tVIApn95+qBfl3OdnKiuzE0C+PPpP2Pz2JgSM4U70u4Ysi03Jt/ITck3ISHx05M/pdOjniz/oaLk2yhdhs9F6W9zWeJlA66lJBWrJe8GQKvR8lDWQ4BcORWsrsVv7noTAClNIiUjxa9r+zw3QtwIz02AGO1hqWGLmzVr1rB69WpKSkrYuHEjGzdupKSkhPvuu4/77gtuXFwQHlS8U4Hb7iYxN5G02T1zQ/pCGQJZsb0Cj9NDRVcFrza9CsD3s78/oDeiL7439ntkRGVQ7azmT6f/NKw1Qo3H5fGNB+jNc1PvrOeY/RgaNP0mEyv4PDf71eO5Abgo4SIuNF2IW3KzsWZjwPdrdjVzrFDujp1yvn+FDZxph287aVNVb5BgouTcBMVzUz36PDchCUtFguemubmZH/zgBz0eX79+/ZCSjwWjhxMvnQBg0pcmDSnpPH12OrHpsbjaXVR/Us2fT/8ZDx4WJC5gdvzwcyHidHH8cNwPAXih4QX2tu8d9lqhomF/A+4uN9GWaCyTeuaiKInEM+NmYjEMnKuScp58IW860qS6uV7/nfXfALzV/BZHOo8EdK+tjVuJPyHf9efOzvX7+nGZcehj9EgeaVT2YOlq6aKzTvaWJk/1fwM/BcVz47Q5cbaNnp5CLruLtirZWyU8N0Pktttuo7y8vMfjFRUVLF++fERGCSIPj9ND2etlAEy6dfD5MSD3URi/aDwA+wv2s8O6Aw0avjnmmyO265KES7gx6UYkJP7v5P/h8IZXJ1MlJJVxYUavglHJt7k88fJBrWfONaOL0uHudNN6Ul09WKbFTuNay7VISKw7tQ6PFBiPh8PrYEvDFswnzMAZb5Y/0Wg0JObKeTejsdeN0t8mPis+oGXKUfFRRCXI64+m0JQyaT0qIYqY5MC3u+ieUKyWLvAj6nOzcOHCHnOkNm3axG233TZyywQRReX7lThaHcRlxPmGNA6FrMuzOPLcEQ6+dxC+BFeZr2JizMSBXzgIHh77MJ/YPuGk4yTPNj5LBiMbjBhM+ksm7vJ28ZntMwAuTxicuNHqtVimWGg80EjT4SbMOWa/2eoPvj3223xo+5BDnYfY2rCV29Nu9/serzW9RrOrmaQSOVwSCHEDcoO5pkNNo9JzE4x8GwXTWBNNh5tor2oPqJdITXQPSQWjNYsibtx2N06bE2OiMeB7DsSgPTdFRUVn/VdWVsacOXN6PD5nTt8dZwWjl7I3ZK9N7g25aLRD/2Mbu2Cs/H+KQOvS8tX0r/rNtgR9At8d+10A/lH/D2y68LnY9CduCtsKcUgO0g3pQxKCyoyppsNN/jHSj6QaUvnvMXJ46k+n/0S907+Jz06vk7/V/o24mjj07Xq0Bq1v0KW/SZzwheemYhR6boIobkZjUnEwk4lBHnWheMjUEpoSfW4EQaHszS/EzfXDy19InpYMFtC36Lnk1CXMuHCGP81jqWUpmxs2s7djLwWJBdzB0Cuwgo292U7LcflHLHN+T3GjhKQuS7xsSHdvydPlu9umQ+oTNwBfSvkSrze/zoGOA/yy6pdsyN3gt7VfaXqFOlcd08vkar7k6cm9zuryB8pogNHouQnkwMxzGY3l4MEsA1eIy4jDaXPSXtMesBuCoRDerVkFYUHLiRasJVa0Bq0vd2aouCQXdRfInXUvOnyRP80D5ByI72V/Dw0aDsYdZF+H+tsZ1H4uT6u2TLL0iKtLkjTkfBsFn7hRoecG5NLwH477ITp07LDu4O3mt/2ybpu7jU01mwBYULMACFxICiBhwhfipmL0iRvhuQksirhRqvKCgZJU3FmrjrYaw6uh/YK9e/dSUFDQ43Gz2SzKwQU+lP40Yy8fO+zkwQJrAdUXVJP+XjpRhYFJQJwWO40bLTfyasur/KbmN/wz8Z+qHs3QX0iqxF5CnasOo8bIPFPPmVz9kTzjjLiRJEmV41QmxUzi6xlf56nap/j5qZ8zM24mWcbe52oNlk21m2hxtzDBOIHUklRaaSX1/MCJm9EalvI4Pb6Lb7BybmB0lYMrna+VZpHBQMm7aa9Rx/s8oiZ++fn5bNy4kTVr1vD888/z/PPPs3r1arZs2eJPGwVhTtlbckhK6VczHF5seJHG8xsBqPu8LmAZ+d/I+AZGr5Gj9qO82fxmQPbwF4q4ybiwZwL0ztadAMwzzSNaO7QZUeY8M1qDFleHC9sp9XoV7su8j1lxs+jwdvD9su9j99qHvdbhjsO8UP8CAN/L/h5N+2WvVSDFjRKWaj/djsc5enrdWEutSB4JQ7yB+DGBH4SqeG5GS1jK6/b6vIHB9NzEZ8rvs1pyboYtbh555BEKCgooKSlh9uzZFBYWUlhYyObNm8nN9X9fiO5s2LCBDRs2sGrVKlatWhXQvQQjw+PyUP2h3FF2wtIJw1rjhP0Eezv20ja5DY1eQ2d9J22VgXExJ+mTuMwmd/L98+k/j+iCGUgkSfKJm96qz961vgvIVWVDRWfQ+RIRW461DN/IAKPX6Hks5zHMejPH7Md4tOJRvJJ3yOvYvXZ+VPEjPHhYYl7CXP1cWk7I5x3IsFRsWiz6aD1IBOz7rEa6N+8LhldQ8dyMlrCU7ZQNr9uLzqgLinhUiM2IBSJA3JSWlrJw4UIAcnNzee+99wBYvnw5mzdv9o91vaB0Rl69ejVPPvkkAEuWLAnYfoKRUbu7FleHi5jkGF+DuKHyYsOLAFyecTlp58udjWs+r/GbjedyYduFZBgyqHPV8e/6fwdsn5FgLbHS1dyFzqjr4V2odFRyzH4MHTquMF8xrPWTJsvhAuUir1YyozL5Rc4v0KFju3U7v6j6xZC8epIk8djJxzjpOEmqIZW149bSeLARJIhNjyUuLS5gtms0mlGZVKzk2wQjmRjOeG466zpHhYesezLxcCpTh4vPc6OSLsXDFjf5+fns3bsXgMWLF/P4448D8NRTT2G1Wv1hWw+sVivFxcVnrb9q1SoKCgooKysLyJ6CkXHq3VMAjL1y7LD+0Do9nb7w0LKUZWTMk0MwSjJtINCj54GMBwB4uvZpmlzqS6xVvDbp+enoos6u5nm3RfbazDHNGfYEbctk+XVKNZaayTfl8+iER9GgYXPDZjac3oCXgT04kiTxp9N/4q2Wt9Ch4/8m/B+J+kTf6IlAhqQUlKTi0ZR340smDlJFTUxKjO9vRC35IIFEybcJZqUUqG8y+LDFzdq1ayksLARg5cqVNDY2otPpWLVqFatXr/abgedSWFh4lpBRQmCBElSDxWV3UfF2BV0lXSG1Q21UvlcJwLiF44b1+vet79Ph7SDbmM080zwy5n8hbnYHTtwALElcwvTY6XR6O30VNGri9Kengd6HZe6w7gBgoXnhsNdXRjk0H28e9hrB5Nqka/nBuB+gQcPLzS/z75R/9ytKXZKLDVUbeLruaQB+MO4HzDfNB87M1QpkSEphNCYV+8JS04IjbjQazajKu/F5boLU40ZBbfOlhl0ttWzZsrP+XVRURHl5OUlJSSQmBiZD22w209Jy9p2kUq3VV56Pw+HA4TjTUt9mk92/LpcLl8vlN9s++vFHFP2yCNOVJlwP+G9dNaG8X4N939xdbqo/kfNtxlw2Zljv95tNstdmaeJSPG4PKbPl0FZtYS2OLgdanX+rmRQbPW4P30r/Fg+WP8jLjS+zzLKMnOjhJ0T7G0XcpM1JO+t9rXXWcqjzEBo0XB53eY/3fLCfoSlXzlNoOdHi17+TQHJj4o3Ej4vnfyv/l9KYUu48fif3pN3DTZabSNTLv0leycvn7Z/zx9o/UtJVggYN38n8DtcnXu87T2UietL0pICfe9xY+YLQWt466L2G+neoJiRJ8nluEvIS+jwHf59jXFYcreWtWE9aSZs/+KG9gSKQn6FyQ2KaYArqdyQqRa5itTfa6ers8nlP/WnDUNbSSGoZBDFM5syZw6pVq3qMgVD4yU9+wqOPPtrj8eeee47Y2Fi/2dF5sJPqH1WjS9CR80xOUGOdaqXzQCfV/68anUVHzt9yhpw82KHt4Ndjfo2kkXiw5kFS3ClIHonSu0uRuiTG/2E8UdmBm0sD8ELyCxyLPcZk+2TuaFRHYz+vw0vpXaXggQlPTsCQbvA992n8p2yzbGNc1zi+2vDVYe/hbnZT/vVy0MLEFyaiMYTP97leX88rya9QGyV79zSShhR3ClHeKJr1zdh1cpJ4jCeGG1puYJp9mu+1kiRR9uUyvB1exv1mHMacwLaRb/ugjdpf1xIzI4axj40N6F5qwN3kpvze4H+van5VQ/uH7aR8LQXLzcGrIAoFJx86ifOUkzH/O4a42YHLGTsXyStRsqIEPJDz1xz0ySPqNNMrnZ2d3HXXXbS2tpKQkNDvsYPe/YEHHmDFihW+JOLeZk11Z926dYNdetisWbOG22+/vU9hA7KdDz/8sO/fNpuN7Oxsli5dOuCbMxQ8Szxs2rAJp83JrORZjL048n6oXC4X27dvZ8mSJRgMhgGP37V7F9VUk7c0j2uvv3bI+73Y9CLSaYkp0VP4ytKv+B5/YdYL1H5WyzTzNKZcN2XI6/bHuec4wzGDu47fxfGY46QvSGdOfOjHi5zedZpSTykxaTHc9NWbzhKN/yn9D3TCspxlXDfvuh6vHexnKEkST/z3E7g6XFwy9RIsU8LnguByuUjZnoJ3vpctLVs40XWCBkOD73mT1sQ1lmv4etrXe+QktVW2UdJRglav5ab7b+qRz+RvTltOs+XXW9C367nuup6fV28M9e9QTZzacYpyyjHnmbn+5uv7PM7f5/jhzg8p/rCY7MRsFly3YMTrjZRAfYaSJPHnhj8DsOT2JUEPTf0l/S90nO7gwukXknR+kt/PUYm8DIZBi5vdu3efVZVUVFTU57HBKO/bunUreXl5/QobAKPRiNHY8+7LYDD49UtlMBgYt3gcJS+VULmtkpwF6glh+JvBvnenP5BDJxMWTRjWe11gk0OO1yRfc9br0y9Ip/azWloOtwTsx105x4mGiXwp5UtsadzCH+v+yD/M/wh5Y7+GIvlCPeaiMURFnfFcNbga2N+5H4DFyYv7fW8G8xlaJlmo31tPW3kbaeeF3pU/FLRouSHlBpZlLqPGWcPJrpN0ejrJNGaSG52LUdu7R6blsBz2TpqWRHTc0PoDDYfkiXLFUHtVOzqNDq1+8N8tf/+GBYPWEjm3KGV6yqBs99c5Jo6Tw5KdNZ2qes/8/Rm2n27HbXej0WlImpgUsNEhfRGXHkfH6Q4cTQ7fefnzHIeyzqDFjZI8rBDKWVNKno0ibKxWK83NzQHvrzMQOdflUPJSCRVvV7DgZ6G/Owglrk6XLy8k+6rsIb++xlnDnvY9aNBwteXqs55LOV/Ou6nf59+hiX2xMnMlbza/yZHOI7zd8jbXJQ3uDjtQ9NWZeFvzNiQkZsbNJCNq5JPNLZNlcaP2cvCByIzKJDOqZ+J1bwSzUgrkJExdlA6P00NbVZsvwThSCebYhe6MloRiZWBmwviEoAsbkNsngFx2H2qGfQv67rvv+tOOQVNcXExxcTH5+fmUlZVRVlbGpk2bSEoK/aCu8UvluUn1RfV01KsjYzxUVH9cjdflxTTWNKySxO0t2wGYHT+b9Kj0s55LmyV7ERr2NfR4XSBIMiTx1YyvAvIk6i5vaCviaj7tXdy83vw6gN/El1IxFQ7l4P4imJVSABqtBtM4OXl7NPS6aT4SGnEzWhr5hWJgZnfi0r+YLxXO4mblypW8/PLL/rRlQKxWK4sWLWLNmjXk5eX5/luzZg1mszmotvRGXEYcURPkMEHl+5Uhtia0KCXg2QuzhxWmVIYhXmO5psdzyl11e3U79qbgdBC+K+0u0g3p1Dpreb7++aDs2RsddR3yRVCDr+cPwPHO4xy3H0ev0bPUstQve/l63YS552YoKII5WJ4bGF3l4MFu4Kfg89xUtyN5w7qGpl98PW6CnGujoHhuOupCf3M/bHGjdAluawueElZKwSVJ6vGfWoidKX+4ysV9tHLqPbl537irht7fpryr3Ndhd5FlUY/no0xRJObKFwTlTjvQRGuj+eaYbwLwt9q/0eIKzQVfCUklT0/GmHAmb0Tx2ixIXIBZb/bLXuHUyM8fuOwu37kq3sFgMFq6FDtaHbSflsNCwU5Qj8uMAw14XV46G0PvVQgUwnNzhmHXamk0GhITE5kwYQKLFy/uke8SjGopNRIzMwbra1ZfZ97RiLPN6WuyN5x8m3ea3wHg4oSL+7xQp56fSmtZKw37GoYloIbDtUnX8mz9sxyzH+Op2qdYnR24ZpV90VtIyi25fZ6uG5Ju8NteSliqraoNV6cLQ6x6EjEDQePBRiSvRExqjO8ONBgoXYqVYYeRSvMx2WsTlxFHtDnwydrd0Rl0crJrbQcdpzsCOlYjlCiem2AOzOxORHhutm/fTlJSEnPmzKGlpYWioiLff8XFxf60MayImR6DRquh5XhLxMd3+6Lqwyokj0RibiKJ44eWIClJEm+3yBfqq5Ou7vM4JSciWEnFAFqNlu+M/Q4AWxu2UtFVEbS9FXoblrnLtosmdxNmvZlLEi/x214xyTFEJ8kXIeVHM5Jp3C9PnU+blRaUik8FxXMT6WEpXzJxkDoTn4syRFLxHkUakiT5/k4T80KTmB4RnptADscMZ3TxOlJnp1JfVE/le5VM//L0UJsUdEYSkjrSeYRKRyVGjZErE6/s87hgJxUrzDfN57KEy/jI9hF/qP4Dv8r7VdD29nq8Po9Yd8/N601ySOoayzUYNP71rlgmWaj5rIbm481BzUMJBYpQDlYysYKScxPpYanu08BDQdyYOCiOXHHT1dyFo1Xuxm/ONYfEhoiolhL0zdgr5AZ+1R9Vh9iS0OBLJh5GSErx2lxhvoJYXd+hAeVC23SoCa974CGJ/uTbWd9Gi5b3W9+nuC14Xsrmo80425wY4gwkz5ATMlvdrXzQ+gEANyT7LySloOTdWE9Y/b622ghFMjGc8dy0nWrD6wnudzmY+JKJpwY3mVgh0j03Sr5N/Jj4kIWQFXFjb7LjcYV2AvuI+iPv3bvX13OmO2azmfvuu28kS4c1yjBDZa7SaKKrpYu64jpg6J4bj+RhW4vcP6m3KqnuJOYkYog34Gp30XysmZQZKcMzeBjkxuRya8qtvNj4Ir+t/i3PTHkmKI39lHybjHkZvpla/2n6D07JyeSYyUyNmer3PcNtgOZwkSQp6GXgCvFZ8Wh0GrxuLx01Hb6y5UhDLWGpjtOhzwcJBKGulAI5lK3RapC8EvaG4FSy9sWwf5FffPFF8vPz2bhxI2vWrOH555/n+eefZ/Xq1WzZssWfNoYdirhpPNjocxOOFqo+qAIJkqYk+X5MBktxezENrgZMOhMXJ1zc77EarcZ3hx3s0BTAqsxVxGpjOdR5iFeaXgnKnuc27/NIHrY2bAXgttTbApInMlo8N22VbTisDrR6bdDDJlqd1idobKciMzTlcXl8F99QhaVGi+cmVJVSIH+XY9PUEZoatrh55JFHKCgooKSkhNmzZ1NYWEhhYSGbN28OeafgUBOXEUdiTiJIUPN5TajNCSpKvs1IqqQWmhcSpR14IKYiboKZVKyQbEjmgTEPAPD76t/T5GoK+J7niptPbJ9Q7azGpDNxTVL/nq7hooibSPfcKAI5aVoSeqP/B/4NRMK4M6GpSMRaasXr9mKIM4TMMxU3Rk52ba+OTHGjdCcOpecG1JN3M2xxU1pa6huimZuby3vvvQfA8uXLRbIxMOYSuZrl9CenQ2xJcBluvo3L62KHdQfAoC/USvggWL1uzuW21NuYGjOVNk8bv676dUD3crY7aTwoV/Mo4uZfdf8C4Obkm4nRxgRkX6Wk1N5gj2gvpPIdCmZ/m+74uhRHqOeme2fiYFaidSfSPTetpXK1XSg9N9CtYqo+TMVNfn4+e/fuBWDx4sU8/vjjADz11FNYrVZ/2BbW+MTNrtEjbjobOn0XiewrhyZudtl2YfPYSDGkDHrydqgqphT0Gj0/HPdDtGh5u+VtPrV9GrC9agtrkbwSpmwT8WPiOdRxiML2QnTouDPtzoDtG2WK8rmZFbd3JKJ4/5S5ZcHG18gvUsVNiGZKdceXc1PXEfQihGAgPDdnM2xxs3btWnbv3g3IoxgaGxvR6XSsWrWK1auD39xMbYy5WBY3NZ/WRHS77+5U7pS9NinnpQy5SZZSJbXUshSdZnAD31LOky9EHTUd2JtDk7w2PW46t6XeBsDPT/2cDk9gkhXPDUn9ve7vgNxY0B9DMvtDuROMZHGjCORQeW4iPSzVdFQO2wZ77EJ3YlNj0eg0IKmjyZw/cbY7fWIi1J4bn7gJV8/NsmXLuP/++33/LioqoqSkhJaWllHbnbg7qTNTMcQZcLQ6fP0dIp3hhqQ6PZ3sbN0J0GMCeH9EmaJ87vymQ6F7jx8Y8wAZURlUO6v5VVVg+t5070x8vPO4L4T3lfSvBGS/7ih3gpHayM/V6fLNzwpVL59IH54ZqoGZ3dFoNcRnRmZoSrnxiEmOCXr353NJGJ9AYm4iUaaB8yYDybDFjVar5Y477jhreGZOTg6JiaHpjKg2tHotGfPlO+rRknejjJwYt3BoJeA7W3fS5e1irHEsM2JnDOm1Sgl446HGIb3On8Tr4vnp+J+iQcN/mv7De9b3/Lq+JElnPDcXZfKn038CZCGYF5Pn1716I9I9N40HG0GC2LRY4jJC05Zf8dxEYlhKkiRVhKXgTFJxpJWDK3+boepM3J38b+Vzf+n9XPTji0Jqx7DFTWFhIWazmXvvvRedTsftt9/Ou+++60/bwp6sS7KAnnk3HslDUVsR/67/NxtPb2Rzw2aOdB5R1QDQodJe0y7/gGkg+4qheW6UKqmrLVcPOdlQaWYXSs8NQL4p3+dF+enJn9Lg8l8eUFtVGx01HWh0Gmon1/KR7SN06FiVucpve/RHpIsbX/O+IPe36Y4ibhxWBw5bZCVut59ux9nmRKPThDwfJFKTikM9U0qNjCiheOPGjTQ3N7N7924mTJjAypUr0el0PPjgg/60MWzJvFjOj1A8N17Jy0uNL3HjwRtZeWIlv6z6JU/VPsX6yvV8+eiXufnQzbzU+BIuyRVKs4dF5ftySCrtgjSiLYN3i1rdVj6xfQLAtZZrh7yvGjw3Cg9kPsCUmCm0elr5UfmP/PY5KiGplPNT+GXTLwG4OeVmxkeP98v6AxHx4mZ/aDoTdyfKFOX7u2mrjKy8G8VrY841h6TMvjsRK25U5LlRC35pq5qfn8/69et58sknWbRoEU8++aQ/lg17lOGGzceaqamrYdWJVTx26jHqXHWYdCauSryKL6V8icsSLiNGG0O1s5rHTj3G1459jXJ7eYitHxrDzbd51/ouHjxMjplMTkzOkPdVi+cGwKA18LMJPyNWG0theyG/qvRP/o0Skuqc1ckJ+wkSdAk8OCZ4NxDK3XZbVRvuLnfQ9g0WavDcQOSWg4e6M3F3IlbcCM9ND0Ysbl566SVuv/12dDodt912G3PmzKGwsNAftoU9MckxJE2R/6DXvriW4vZiYrWxfHfsd3ln5jv8Mu+X/HDcD/ndxN+x/fztfHfsd0nQJXCk8whfPvZl3re+H9oTGALDzbd5u/mLCeBDSCTujlJ90VnfSWdj6Ie15cbk8rMJP0ODhi2NW9jcMPKeT4q4+SBHniH14JgHseiD9yMWkxIjJwdK0FoeWZOru49dCFWllEKkVkyFemBmd3yN/CJM3PjKwENcKaUmhi1ubrvtNnQ6Hffddx8Wi4XCwkKamppYt24ds2fP9qeNYU3SfPkPWtorkRWVxT+m/IO70u7CqDWedVyMNoa70u7ihWkvMM80jy5vF98r+x4vNrwYCrOHhK3ShrXUikanYezlYwf9uhpnDUXtRcDgG/edS1R8FAkT5IuCGrw3IA/9/NaYbwHwy8pf+sJuw8Hj8lBbKE8Cr5lRw5z4OSxLWeYXOweLRqOJ2NCU7ZQNR6sDrSH4YxfOJVIrptSSTAyROV/KZXf5BLHSUVwwAnGTlJTEtm3baG5uZuPGjULQ9IJLcvFhzocAjDk8hr9M/suAoZe0qDT+MPEPLEtZhoTEzyt/7pe7/0CihKTS56RjTDAOcPQZFK/NnPg5I+rV4su7ORj6vBuF/0r/L65Puh4PHr5X+j0+b/t8WOs07G/A0+XBaXLizfHy6IRHgzKk81witRxcCUklT0tGFzW4/kqBIlIrppQy8FBNA+9OJIallBsOo9lITEpgOpWHI8P+ldy4cSOLFi3ypy0Rx5+q/8T+SfsByDySSaphcDF9g8bA2uy13JN2DwAbKjfwbot6K9GGE5KSJIk3m98E4Lqk60a0v5J3o4akYgWNRsP/G/f/uCzhMhySg++UfGdYn+ErBa8A0DSjiZ/l/ozMqEw/Wzo4ItVz48u3CWEysUIkhqW6rF0+IZE8XT3ixt5ox+2IjPyxluNySMoy2RKy0RZqJPi3gKOE3W27+Wf9P7FOsqKJ0uBsdtJaNvh8BY1Gw7ezvs3ylOVISPyo4kcc6DgQQIuHhyRJvmGZ464avLg5Zj9GWVcZUZooFplHJpIVz41awlIKBq2BDbkbWJC4AIfkYHX5ap48/SQeyTPga72SlydPP8mBD+XPPO/iPBaYFwTa5D6JVHGjjF0IdTIxROYIBuVv0jTWhDFx8F7dQBGdFO3z0HXURkZoShE3SZNDH/ZTE0LcBIAubxc/O/UzAG7NvJWM2XLIZagTwjUaDd/P/v6Zu//S71DpqPS7vSOhtbyVtlNtaA1axlw6ZtCvU7w2CxIXYNKPbEpw94optfUKMmqN/CL3F6xIWYGExKbaTXzt2Nf6Fao1jhq+U/odNtVuIvmAfG7XLAnM1O/BEqnipnG/7O1Tg7hRcm7aqtrweiJj9pHiTVWD1wbk39RIa+TX3XMjOIMQNwHg6fqnqXJUkW5I56Gsh3ydims/rx3yWnqNnnU565gSMwWr28p3S7+L3ROaOUq9cWqH7LXJvDCTqLjBtdv2SB5fvs1IQ1LwRcWUBuxN9pDPM+kNvUbPI+Me4acTfkqcNo5DnYf46rGvcu+xe3m27ll2t+3mQMcB3ml+hx9X/JhbD9/Kx7aPibfGk3BKvptX2gqECiXnprW8NWIuvM42p6/KRA1hqbiMOLR6LZJHoqMmMi68iudGuQFRA5GWdyPETe+EtqNSBNKia+Hfjf8G4PvZ3ydeF0/m/Ez2sGfInhuFWF0sv8v7HXcfvZvSrlLWVa7j0fGPqiK+enLHSQDGLRp8SGp3226a3E0k6hK5JOGSEdtgiDWQmJNIa1krTYeaiEsPTQv9gbgu6Trmmebxp+o/8Wbzm+zt2Mvejr29HjvPNI+7y+/mMz4jaWrSkBojBoL4rHh0UTo8Tg9tlW0kTgj/ZmH1++pBks9NDd8ZrU5L/Nh4bBU2bCdtmMaOzKOpBhTPjRI6VgORJm6aj8sJ25ZJQtx0R3hu/MwO8w6ckpP5pvlcmXglgM9zU19cj8c1cL5Fb6RGpbIuZx1atLzR/AYvN7088IsCjOSVfJ6b8YsH3y331aZXAVhiWYJBa/CLLWrqVNwfqYZUfjLhJ7x+3ut8O+vbXJpwKROME8iIyuD8uPO5M/VO/jb5b2yctBHkKnnGXBJarw3IF97EHFnQREpoqr5YzrdJz08PsSVniLSKKeG5CSxd1i7sDbInX4ibsxGeGz9y1H6Uw7GH0aLl4ayHfZ4Vy0QLRrMRh9VB48FG0mcP78d0jmkO3xzzTf5w+g9sqNzAtNhpTIud5s9TGBINBxqwN9oxxBnInD+4Kh6r28q7Vrlq6JaUW/xmS/KMZEpfK1VdUnFfpEWl8ZX0r/Q71VuZSTbm4tCLG5BDU83HmrGWWhm/KDijHwJJXXEdAGmzQ9u8rzuRlFRsb7b7knbVknMDkdXIT5lmH5cZF/Ip3GpDeG78yJO18tiJq81XMyl2ku9xjVZDxrzh59105yvpX+GKxCtwSS7Wlq+lwxO62LzitRm7YOyge4S82fwmLsnFlJgpfhVmKeeFh+dmsHjdXl8YUzXiRkkqjpBeN4q4UaPnJhLKwZsOf1EplW0aUv+rQOPz3FRHgLgR+TZ9IsSNn9jbvpdd7bvQSBruTbu3x/OKZ2O4eTcKWo2W/x3/v6Qb0ql0VPJ45eMjWm8knCwYWr6NJEm83CiH025NudWvtnQvB1dbxdRwaDjQgLvTjTHR6BsxEWoiqWLK3eX2XXzT8tXjuYmk+VJqDElBZIWlRBl43whx4yeUFvuzO2Yz1thzBMFIKqbOJVGfyGM5j6FFy5vNb/J60+sjXnOoeJweqj6oAgafb7O/Yz9lXWVEa6OHPW6hL5KmJqHRauhq6YqI/hVKSCrzwkw02tAnjsOZicORIG4aDjQgeSRiUmJUlbjry7mJgBEMakwmhsgawSA8N30jxI2feHDMgzyZ+yQLbL03WlPCUo2HGnG2OUe83+z42azMXAnA45WPc7Lr5IjXHAo1n9fg6nARkxJD6szBldEqXpsl5iWYdP69oOij9T7PQrjk3fRHzS7Zw5d5cWg6EveGMnHYWmINe+9Y92RiNVQdKvh63URCWErlnhtHqwNnx8h/i0OJEDd9I8SNH5kVN4sET0Kvz8VnxmPKNoF0JtY/Ur6e8XXmxM/B7rWztnwtTm/w/lB9IamF4wblWWjztLGtZRvg/5CUghrHMAwXxXOTdUlWiC05Q8KEBNCAq8Olyn5CQ8GXTKyikBSc8dw4Wh04Wh0htmZkqNVzE5UQhT5WrqUJ535CkiSJMvB+EOImiCihqZHm3SjoNDp+NuFnJOoSOWY/xh9O/8Ev6w6GoZaAv9b0Gg7JQW50LufHnR8Qm9Q6hmGodNR3yKEfjRyWUgt6o14W6IR/aKp+j+y5UVOlFMhT7qOT5J5GtsrwDU3Zm+x01skCWE2VUiB3KTZlyd/jcM676ajtwNXuQqPVkJgb/n2n/I0QN0FESSr2R96NQlpUGj+Z8BMAnqt/jg9bP/Tb2n3hbHNS86ks0AaTTOyW3DxX/xwAd6bdGbAwQKR4bpSQVPL0ZFXM4+mOLzQVxuLG4/LQsF8emKmmSimFSKiYUv4GE8YnEBWvvhLlSBjBoJSBJ0xIQG8UXV3ORYibIOJvz43CgsQF3Jl6JwA/OfkTGpwNfl3/XCrfq8Tr9pKYm4g51zzg8TtadlDjrMGit/hl3EJfRErFVPXH1YC6QlIKkVAO3nykGY/DQ1RC1KC+v8EmEiqm1JpvoxAJFVMi36Z/hLgJIhlzMkAj35H5u6LnoayHfPOnflTxo0FNnh4uFe9UAJBzbc6Ax0qSxD/q/gHAbam3Ea0N3BgByxQLGp0GR6sjrH+0lCq0rMvVJ24ioWKqe/M+tVSidScSKqbUmm+jEEniRpSB944QN0EkyhTliz/X7vZfaAogShvFupx1xGhjKGwv5JnaZ/y6voIkSZzcJicTD0bcFLUXcdR+FKPGyIrUFQGxSUFv1PvCJuGad+PscFJXJF98xy7o2VIg1ERCrxs1Nu/rTiRUTAnPTeBpPiYnE5snmUNriEoR4ibI+KuZX2+Mjx7Pmuw1ADxZ8yT72vf5fQ9XlYu2k23ojDqyr8we8HjFa3NT8k1Y9IF3n4Z73k3NpzV43V5M2SYSx6svSTAScm6UMnC1JRMrhPt8KUmSaNgnh8bVMG29NyJC3ByRxY1amnyqDSFugow/m/n1xg1JN3Ct5Vo8ePhBxQ+wuf37A9mxRw6njV0wlqi4/hMFj3ce52Pbx2jQcHfa3X61oy8UcROunhslJKVGrw2c8dzYG+w4bOFXqux1e32eG6X3lNoI9/lS7dXtdLV0odFpVHvhDfeEYrfDjbXMCghx0xdC3ASZ7p6bQCS9ajQa1o5bS7Yxm1pnLT879TO/7tNZLJd3DiYk9UTNEwAstSwlO3pgL48/CJfp4H1R9eEX4uZydYqbKFMUMakxQHh6bxoPNeK2u4lKiFJtroLiuWmvbsfr9obYmqFTv0/2jCVNTUIfrc4qnu6em3AsPrCWWJE8ElEJUcRlxoXaHFUixE2QSZmZgs6ow2F1BKziJE4Xx7qcdeg1enZYd/Dvhn/7ZV1Xpwv7ITsAOdf0L24OdBzgg9YP0KFjVeYqv+w/GHyem8PhVzHlcXp8ZeBqTCZWCOfQlOIxzZibocpkYoC4jDi0Bi2SR6K9JvzCJo375RsLtYakAJ8gcHW4/NIxPtg0HZE900lTk1TVYVtNCHETZHQGnS+RMRB5NwrTYqfx7axvA/Cbqt/wqe3TEa9ZtbMKySVhGm8iaWrfd72SJPGbqt8AcEPyDYyPHlyjP3+QNDkJrV6L0+akrSq8EjLriupwd7mJSYlRtas5nMvBlUR+JTysRjRajW/eVThWTCk9hFJnqVfcRMVF+XpIhWPejci3GRghbkJAoPNuFO5MvZMbk27Ei5dHyh8Z8fypk+/Irx+/dHy/dwvvtLzDvo59RGuj+UbmN0a051DRRel81QPhlndT+UElAFmXZan6biycy8GVGwolPKxWwrliSglLqdlzA+Gdd+Pz3ExTZ2hVDQhxEwICWTHVHY1Gww/G/YBZcbNo87TxP6X/Q6u7dVhrSZJExdsVAEy4ekKfx7V52vhd9e8A+Hr610mLCn5FSrjm3VR/KDfvU2sysUK4loM7O5w0HpS/E2pNJlYI16Rid5eblmNy/xW1i5twrpgSnpuBEeImBCiem/o99XicgWu2B3L/m1/k/oJ0QzonHSd5qOQhOj1DH3rYeKiR1rJWNAYN2Qv7Tg7+ffXvqXfVM9Y4li+nf3kkpg+bcKyY8nq8VH/0hbhRaTKxQrjm3NTvqUfySMRlxhGfFR9qc/olXMvBmw43IXklYpJjfOJBrYSruJG8kq/HjRA3fSPETQgw55mJtkTjcXhoOBDYUQkAyYZk/jDxDyTqEjnYeZDvln0Xh3doZbyl/ykFIHZWbJ+zYnbZdvFS40sA/HjcjzFqQzMXKRw9N40HGnG0OjDEG0i7QJ39VxSUsFRbZRtuhzvE1gweJd8mc36mqsN+EL7zpZSQVMr5Kap/j33ipjq8xI3tlA233Y0uSkdijvp6YakFIW5CgEajCVrejUJeTB6/n/h7YrWxfN72OWvL1+L0Dr5K4MQrJwCIu7D3ssMGZwP/r+L/AfKYhTmmOSM3epiEY8XUqXflKetZl2Wh1av7zzI2NRZDvAEkaC0fXpgzFPgqpVScTKwQrvOlwqFSSiFcPTdKvo15kln1vxWhJCzfmQ0bNrBp0yY2bdrEhg0bQm3OsAjUEM3+OC/uPH6V9yuiNFHsbN3Jt0q+RZtn4DvDtqo26grrQANx83qKmy5vF6vLV9PibmFKzBS+k/WdAFg/eCyTLGgNWlztrrC5OJzc8UWy9qLgVZYNF41GE5ahKeVvTe35NhC+86XCoVJKQUkoDjdxI/JtBkfYiRtFzKxcuZKVK1eSn5/PqlXB66PiL5Sk4mB5bhTmm+bz27zfEquNpai9iPuO3Ue9s77f15S8WgJA5sWZ6M1nN+VyS25+WP5D9nfsx6Qz8XjO4yELRynoDDrfpNxwyLvxOD1U7ZSb941frH5xA90qpsKkHLyzsZPWMtnLlDFX/eJG8dw4bU4creHRCTocxi50J9w9N6JSqn/CTtysW7eOlStX+v69ePFiNm3aFEKLhody99h0pCnobewvTLiQpyY/RbI+mZKuEu45dg9FbUV9Hq/k2+TdmHfW4w6vg9Vlq3m/9X2iNFH8Ju83jIseF1DbB4sv7+ag+vNuaj6vwdXhIiYlJiwuChB+FVN1hfLIBctkC9GWwE2m9xdRcVHEJMudoMPF+9hR04G9yY5Gq1HtNPDuKOKm43RH2ISvQXhuBktYiZuysjKsVitms7nHcwUFBcE3aATEpcfJ5Z4SvinQwWRq7FSenvI0udG5NLoa+caJb/CLyl/Q7jn7LqbL2uXLB8m9Kdf3eKWjknuP38vO1p1EaaJYn7ue2fGzg3oO/ZFyXvgkFZ/aIb+/4xaOU23X3HMJN3ETLv1tuhNueTdKSMoyxaLasQvdUboUe5weupq7QmzN4Gk+KsTNYFD/N7AbZWVlvT5uNpuxWq29PudwOHA4znhGbDb5h8LlcuFyufxqn7LeYNdNn5uO7aSN6l3VZF4W/B/dNG0af837K78+/Wtea3mN5xue563mt7gz5U5utNxIsiGZktdK8Lq9JE1LIn5CPO2l7Txx+gmeb34eh+QgQZfAunHrmBM7x+/v50gwTzEDsudmKHYN9TP0BxUFFQBkXZkV8H39dX6mCfKF11piVdXnDr2fY/Uncpl96pxU1dnbF/Fj46nfU4+1/Oz3OBTf0cFQWyyH2FPOSxmxbUE5Ry1EJ0fT1dSF9aQVfULwLofDPb/Ohk7sTXbQQHxOvOq+A90JxGc4lLXCStz0RVJSEs3Nzb0+t27dOh599NEej2/bto3Y2NiA2LN9+/ZBHddikptd7X1tL/Uz+s97CSSzmU2CMYG3LW/TRBMb6zbyZO2TjHGOYfbfZmPCRGl+KXfvvZuqMVV4m+RhfuO7xnNL8y3UVdTxJm+GzP7ecDbKlWANhxp44/U3huwRGexnOFK8di+nd50GoFRTSuWblUHZd6Tn56qXf2Raylp447U30OjU53FSzlHySlR+KL+vZd4yTr95OpRmDZomr5xbsff9vVRlV/V4Pljf0cFS85bsHWsyNvHmm/75PQj0OXrjvdAE7736HnGVwR9AOdTzU2b76VP1bH9fXZ9/X/jzM+zsHHyPtogQN30JG4C1a9fy8MMP+/5ts9nIzs5m6dKlJCQk+NUOl8vF9u3bWbJkCQaDYcDjq03VbH1mK5oqDdddd51fbRkOD0gPsM26jZeaXuKg/SB1njpidslx/8+u/YyWaFmMTYuexlfSvsKVCVeqtpeF1+3lz//zZzwOD5dNv4zE3MH1gxjqZzhSKt6uoNRTSsKEBG7++s0Bfz/9dX5ej5c/ffNPeF1eLj//cl9HXTVw7jk2HmikpLMEQ5yBWx64JWzKZ4uOFvHRGx+Rqk/l2uuu9T0e7O/oYHnme88AcPmdlzN+ycgS44N1jq/8+RVOnjzJ9LHTmXHdjIDtcy7DPb8D1Qeoooqs/CxVXDP6IxCfoRJ5GQxhJW5yc3N7fdxqtfb5nNFoxGjsWb1jMBgC9kcz2LXHzB+DRquhvaodR4Mj5B09DRi4Oe1mbk67mRpHDe898x6nHaeRciRuvOJGxhnG0VLYwj1L71HVj2qvGOSJuQ37G2g93krKlKElOAby+9Gd6p1yuGT8ovFERfXeHDEQjPj8DGDONdN8rJmOUx0kT1Rf/F85x7rP5Zy2zIsyMcaEtpJvKFhy5Yq/jqqOXj+rYH1HB4Oj1UFriVyNlnVhlt/sCvQ5mrLk8GpXXVdI3suhnp/1mBWAlOkpqvnsB8Kfn+FQ1gmPW5gvyM3NxWw295p7s3jx4hBYNDKi4qN8DeeU7qlqIdOYSfTrclXJJfdcwrfHfpsbkm4g2a2+i1hfKO+tmpOKTxbI/W3GLVJHldlQCJcBmqc/lsNQWZdmhdiSoRFOIxjq9sgCMmF8gq/KKxwIt3JwpaN96szwqKoMJWElbkAOM3WvjNq6detZpeHhRrCGaA4Ve7OdincqAJh6x9TQGjNMlHJUtfa66ajv8PUFGbcw/MSNUjHVUtISWkMGoPpj2Ts25pIxIbZkaCjVUu3V7Xjd3hBb0z9KxWf6nPQQWzI0wqmRnyRJNB6Qb9RSZqq/1D7UhJ24Wb16NVarla1bt7J161Z2797Nk08+GWqzho2vU/Fn6hI3J148gdflJXVWatiWHKrdc6NMWU/PTycuPfjJjCMlHMrB22va5RERGhhzUXiJm7j0OLQGLZJXUv3F1ydu8sNL3Ciem7Yq9c/w6qzrxN4oV0qF629yMAmrnBuF1atX+/7/8uXLQ2jJyBlzsfyDW/NpDR6XB51BF2KLZI78+wgAU+8MT68NnPHcNB9pxuvxotWpS8uXvSGHV3OuywmxJcPDPNEMQGupeudLKV6b1JmpGBPDJ98GQKPVYMo20VrWiu2UzRemUiP1xXK1Z7h5bkzZX3jHqtQtHuFMQ1LLRAuG2PDItwkl6vq1H4WkzEgh2hKNq8NF/Z7QlYN3p/10O5Xvy6Wz4RqSAkjMTUQfrcfd5VbdgEev2+sL++Ve33syvNrpHpZSa4dXJd9mzKXh5bVRCIcZU842J83H5YrVtHx1T7Q/F0XcdNR14HF6QmxN/yj5NiIkNTiEuAkxGq2GrMvlRMfKncHpcTIQxzYfA0nOUUgcP7gSajWi1WlJmirPX1Fb3k31J9U4Wh3EpMSExSDH3kjMSQQNuNpddNYPvv9EMFE8N+GWTKyglNi3nVJv2KRuTx1IYBprIi4tvMKrsamx6KJ0IEFbtXrfY0Dk2wwRIW5UQPYV2QBUfdCzUVewkSSJA389AMC0u6eF2JqRo4xhUO561EL5m+UA5FyTo7pw2WDRG/WywOFMS3g14eo84w0NV3ETDiMYwjUkBfKE+/ixX+TdVKpc3HwRllJ+0wT9E56/qhHG2AVjAaj+sBqvJ7RVEbW7a2k82Ig+Ws+0u8Jf3KReIJdMqiXkpxDu+TYKSmKjMqlYTdTtrsPr9hI/Jl5VTQaHQjiUg4drpZRCOOTdSF7JVxghysAHhxA3KiDtgjSiTFE4Wh2+4XOhYv9T+wGYvGIy0Wb1T08eiPTZ8g+umsSN7ZSNxoONaLQaJlw9IdTmjAgl7KdGz031R1+UgF86RrWdtAdCETdqDkvVFsk9usJV3CRkfyEgK9UrIK1lVtydbnRGnS+RX9A/QtyoAK1e6/PeKBOiQ4GzzcnRfx8FYOZ9M0Nmhz9RPDet5a10WdUx+bfsTdlrM+aSMcQkhU/Ds95QPDfNR9Qnbqp2ymHe7CuzQ2zJ8FF7WMrZ7vQJ23ArA1dQPDdqDksp+TbJ05PDNowdbMS7pBLGL5ZnsZzcfjJkNhzbfAxXhwvLZAtjLx8bMjv8SUxSDAkT5Duz+r3q8N4o+Ta514VnlVR3fAnbKgtLeZ1eanbJvaPCsUGiguK5cdqcqhHn3WnY1wCS3C8mLiO8kokVwkLcHBTJxENFiBuVoAyaq/qwCneXOyQ27P+LHJKaed/MsHXj94aaQlPOdqdv5EK4loB3RxE3bZVtONudIbbmDF3HuvA4PMRlxJE0JSnU5gwbQ6yBmFTZu6e2dgZwZmxMuIakIDzEjRi7MHSEuFEJydOTicuIw213c3rX6aDv33CwgZpPa9Dqtcz4SvCm4waDtNly7w01iJvyt8px292Y88wRcRcWkxzju/g2H1NPaMp+wA5A9lXZYS/U1dwJ+vSn8m9V5kWZIbZk+PjEjYq7FPvKwEWl1KAR4kYlaDSakIam9vx+DwB5N+WF5SiA/lDETV1xXYgtgeMvHgdg0rJJYX/RVfDl3agoqbjzgNx3Z9xV4RuSUlCzuKn5VA79hdtoi+4o4sbeYA+Z17w/3F1uWk7I89si4YYoWAhxoyLGL5XFTflb5UHdt6O+g0P/OATAnP+ZE9S9g4HiMm8+0hzS0InL7vKVgE9eNjlkdvgbtSUVuzpddJ2Q81PCOd9GQRE3ahtz0VHbIXdO1hC2jSgBoi3R6GPlSURq9N40H21G8khEW6J9s7AEAyPEjYrIuTYHNHLiazDLEvc9sQ+Pw0PGvIywbXbWH/GZ8ZjGmpC8Uki9Nye3ncTV7sKUbQrri8G5+JKKj6ojqfj0J6fBDfHZ8STmhm+HbQWl9Fdt09eVYb8pM1KIMkWF2Jrho9FoMI1Vb96N0h4k5byUiPH2BgMhblREbEqsb5CmcocfaFx2F3v+JIek5n53bsT+8SjT12s/rw2ZDUpIavKyyRH1PidN+6LXjUo8N1Xvf1ECfkX459uAesNSkZBvo6DmpGLlhizc5naFGiFuVEbeDXkAlL0eHHFz5Nkj2BvsmMaZIipUci6Kp0Sp7gg2HqeH0ldLATnfJpJQPDctJ1rwukPbYRvOiJuxV0ZGOwNF3LRVtuF2qCcnJBLybRTULG6UQggld1AwOIS4URm5N8jlwad2nAp4fojklSj8dSEAc749B60+cr8OmfPlu8uaz2tCsv/JHSdxtDqIy4gj65LICv0lZCegj9XjdXmxlllDaouzzekbBzD2isgQN7FpsRjiDCCBrUIdzfy8bq/vRiGiPDcqy7mRvJJP3CgtLQSDI3KvZmFKynkpmPPMuLvcAQ9NnXjlBM1HmokyRUVMR+K+UJKKbRU2OhuCP8H6+NYvqqS+NAmNNvxDJd3RaDW+XjKhDk1V7qxE8kgY0g1hO0/qXDQajc97o5a8m8ZDjbg6XEQlRPkSysMZZQSD2jw31jIrzjYnOqPOF/4VDA4hblSGRqNhym1TADj2wrGA7eP1ePnoRx8BkP+dfIwJxoDtpQaMiUZf+ERxpwcLl93FiRdPAPLMrkjEN0AzxEnF5W/LlYaxs2NDaoe/UZKK1VIxpfwNZc7PjAixrtawlJJvkzIzBZ1BF2JrwgshblTIlNtlcVP2ZhnOtsCEpo48d4TmI81EW6KZ9915AdlDbSiVYNUfVwd139JXS3G0OjCNM5G9IHznHPWHb4BmCD03kiT52ijE5keYuFFZUnEkJRODesWNLyQVpnO7QokQNyok9fxULJMteBweSv5T4vf1PS4Pn/zkEwDmrZ6HMTGyvTYKYy6VEx+DLW6UHkIz7pkREXe5vaG4zEM5Y6rlRAutZa3oonTEzhTiJpD4PDcRJm66WrpwdqhnjIhIJh4+QtyoEI1Gw7S7pgFw8OmDfl//4N8O0lrWSmx6LLP/e7bf11crYy+TE0xrP68NWifSjtoOKt6pAGD6PdODsmco8IWlDjcheaWQ2FDxdgUAYy4bgzYmsn7afOKmxBpSO0AWAEo36swLI0PcGBOMvl49avHeSNKZvlzCczN0IusXIII472vngQZOvXvKr3drznYnu/5vFwAX/fAiouLCt/nWUDFPNBObFovH6fFV1ASaI/8+guSRyLwoM6wHOA6EZbIFXZQOV7uL1orQ5IWUvSUn4CudviMJX85NeSteT2jL7ZXZd+aJZmJTIsdDpnhv2qvaQ2yJTPvpduwNdjQ6jRi7MAyEuFEpCeMSmHD1BAAO/PWA39b97Oef0X66ncScRM5feb7f1g0HNBqNL++m6qOqoOx5+B+HASJuGOm56Aw6kqfL3hulo2owcdldvv42yt9NJGHKNqE1aPE4PbRXh/biW/XBF32EFkRGqb2CIm6C2R2+P+qL5ZBU8rRkDDGGEFsTfghxo2LOv18WH/v/sh+X3TXi9ZqPN7P7l7sBuOq3V6E36ke8ZriRddkX4uaDwIubhv0N1O+tR2vQ+pLEI5nUWakANOwLvrip2lmFu8uNKdtE0vTI85BpdVoSJ8ijJFrLQlsxVbmzEohccaOWsFTdni86E4t8m2EhxI2Kybsxj4TxCdgb7CPOvZG8Etvu24bX5WXCNRPIuzHPT1aGF9lXydVKVR9U4XF5ArqX8pnl3ZhHTFJMQPdSA6nnfyFuQuC5Uaqkcq7JiYiRC72RmPeFuAlhObizw0ldoXzRzb4isir/fOLmlDrEjeK5EeJmeAhxo2J0Bh3zvi+Xae/+xe4Rtbbf8+c9VH1YhSHOwJInlkTsBWAg0malEZMcg6vdFdA5U852p0/cRHqDRIVQem584ubanKDvHSzUUDFV82kNXrcXU7YpYpokKvg8YyHKGTsXxXMjkomHhxA3Kue8r51HTGoMtgobB/42vNybhoMNfPD9DwBYsH6B7494NKLRahi3aBwAJwtOBmyfI88ewdHqwDzRTM7VkXvB7Y7iubGWWgM+OqQ7LSdaaDnRglav9X22kYhlogUIbViqe75NpN0gJebIv4tqGHFhb7L7PEhpFwjPzXAQ4kblGGINXPTDiwD46Ecf0WXtGtLrne1OXrvtNdxdbiZcM4ELHrggAFaGF4EWN5IkseeP8qT12d+cHbG9bc4lNjWWuMw4ABoPNAZt3+MvyaMtxi0aF9GdthXPTSjFjZJvE2khKYCECbInynbKFvKKNGVul3miedT0IfM3QtyEARc8eAFJU5OwN9j56IcfDfp1Xo+X1+94neYjzcSPiee6f1w3ai60/TF+sVwqXPNpTUA6QFd9UEXjwUb0sXpmfDWyq6TORbnLVFzqweDES/Joi0lfiqxp6+fSPedGkoLfS8jtcPua90VaMjFA/Jh4tAYtXpeX9tOhrUir+eyLJokR0kcoFAhxEwboDDoW/n4hAHv/vJcTr5wY8DVej5ftq7ZT9kYZ+mg9N790M7GpkdOTYiSYc82YJ5rxur2Uv1Pu9/UVr82Me2YQbY72+/pqJn2unB+gJJ0GGlulTc6d0sDEmycGZc9QYc4zo9FqcLY58bQENhm+N2p31+JxeIhNj8Uy2RL0/QONVqclYdwX3psQh6aEuBk5QtyECROWTGDud+cC8PZX36Zmd9/DH91dbt76r7c48NcDaLQarvvXdeKP5ByUC2Hpf0r9um5bVRsnXpbF5wXfvMCva4cDGXMzAKgtDFyydndKXpHHk4y9bCxx6XFB2TNU6I16X2jKWRn8EQFVOyM330ZBCU21locu9CdJkq/YQfxuDx8hbsKIy9ddztgFY3G0OtiyaAvHXzrewz1dW1TLcxc/x5Fnj6DRabj+ueuZvCwyJ1GPBEXclL1RNqIqtHMp+l0Rkkdi7BVjSZ2Z6rd1wwVF3DQdasLVOfLeTAOhhKQm3hrZXhsFZUCpszoE4iZCm/d1Rw0VU9ZSK/YmO7oona8CUTB0Rl8XtzBGZ9DxpTe+xMs3vkzl+5W8uuxVxlw8hvFLxoMGqj+s5tS7pwCISYnh+ueuZ8KSCaE1WqWMuXgMMckx2JvsVH1UxbgrR15lY2+ys++JfQDMXz1/xOuFI/Fj4onLjKOjpoP6vfVkXZIVsL06Gzp9F9xJt0Z2vo1C0rQkSl8rxVkVXHHjcXmo/kQeOBupk+3hTMVUKD03SkgqbXbaqGy06i+E5ybMiIqPYtlby7jwBxeiNWg5ves0u/5vF7se3SULGw1M//J0vrL3K0LY9INWryX3xlwAjm897pc1i/9QjKvDRdoFaRHdb2UgfKGp3YENTZW+WorklUjPTx817Q2UAaXBDkvVfFaDq91FTEoMKedF7pwjX8VUCHNuRL6NfxCyMAzRR+u5/LHLmfWNWZS+Wkr9XrmTZcqMFHJvyPX1wxD0z9Q7pnLomUMce/4YV/36KnRRumGvZW+2U/zbYgDmr50fsTkJgyF9bjqlr5UGXNwoJeCRXiXVnVCFpSq2VQAwfsn4iK649IWlVOC5EeJmZAhxE8YkZCcw+5uzQ21G2DJ+0XhfCKXszTIm3TL8i+Tn6z/H0eog9fxUpiyP/DlS/ZE5X/5RVsqGA0FnYycnt8l9ikaTuFE8N55mD45WB4aU4AxUVN7rCUsnBGW/UKGEpdoq2/C4POgMw7/hGQ5uh5uGvXKHbyFuRoYISwlGLVq9lml3TwPg0N8PDXudtuo29vxBLv++7LHLIvrOdjCMuXgMaOTEyPaawPQLOfr8UbxuL+lz0n0X/NGAMdHoa5TYcrwlKHvam+0+L9z4JeODsmeoiMuIQx+jR/JK2E4GPzTVsLcBj9NDTEoMibmjI9QaKIS4EYxqZnxFbrJX+loptsrh/Zjt/P5O3HY3WZdmkXt9rj/NC0uMiUbfKIbqj6sDssfhfxwGznx+ownLFDns3HykOSj7nXr3FJJXInlGMqYsU1D2DBUarebMDK8Sa9D3V0JSGfMzRnVo2x8IcSMY1aTOTCX7qmwkj0Tx74qH/PrKDyo5+u+joIGFv18ofpC+IOsyuUqq+iP/i5umI03U7q5Fq9cy5Y7RFwJUEnobDwZnxMVoCUkpKOKmpSQ4nrHuiHwb/yHEjWDUM+978uT1/Zv247A6Bv06V6eLbfdvA2DWylliem83xl4m90IJhLg5/E/Za5NzbQ5xaZHduK83kmfKYbhgzO+SJMmXTDxqxM1EMyCPuQg2Qtz4DyFuBKOenGtySJ6ejLPNSeEvCgf9uo9++BEtx1uIHxPP5esuD6CF4YfiuanfU+/X+V2SV/KJm+lfme63dcOJlJlfeG6CIG5aTrRgO2lDF6WL6OZ93VHETbA9Nx11HVhLraA5k5QvGD5C3AhGPRqtxidOin9bjPP0wBfj4y8dp+i3RQBc/deribaMrhlSA2EaayIxJxHJK1H5QaXf1j313inaqtowmo3k3ZDnt3XDieTpyaABe72djrqOgO5V8U4FAFmXZ2GIDU5lVqhRWmkEO+em6kO5IWXqzFTxe+IHhLgRCIC8G/OYcM0EvC4v9X+qx+PqezBhXXEdb33lLQDmfGcOOdeM3oZ9/aFU1igXSH+gVLVNvX0q+ujR2cnCEGvAkCkLjYb9DQHda7SFpKBbWKqsFa/Hf6NZBmI0jLcIJkLcCASARqNh0e8XYYg3YD9kZ+fDO3vM7QKo2V3D5kWbcXW4GLdwHFf84ooQWBseTLh6AnAmIXWk2JvsHNt8DIAZXx19VVLdMY43AoEVN65OF6d2yONcRpO4MWWb0Bq0eJwe2qragravEDf+RYgbgeALLJMsXPOPa0ADB548wGu3vebr0+JodfDZ45/x/GXP47A6GHPxGG5+6Wa0evEn1BfjFo5Do9PQfKyZ1pMjT8488LcDeBwe0manjfqEy6gJUUBg825OFpzEbXdjGmcaVQMctTqtr5lfsEJTXS1dPqE69nIhbvzB6PTrCgR9kHtDLmkPpNH4VCPHtx7nxEsniB8bT2ddJx6HHKrKuymP6/91PVGmqBBbq26izdFkXpjJ6U9OU/FOBbNWzhr2WpJX8g0lnf3N2aO+5F7x3NTvqQ/YHiWvlAAw6ZZJo+79Nk8003K8BWuJlfGLAt+4sPrjapDAMtlCXMboqwAMBGF327lhwwY2bNjAqlWrWLVqVajNEUQgiUsTWf7ucsZcMgbJK9F2qg2Pw0PStCSuefoabnnlFiFsBomSj1Tyn5IRrVPyagmt5a0YzUam3jnVH6aFNcaJsrhpPNSIq9Pl9/W9bi+lr5YCMPGWiX5fX+0kTZFneDUfDU6jRBGS8j9h5blZs2YN69ev9/171apVLFmyhO3bt4fQKkEkknlhJnd9fBe2UzY66jowJhqxTLKMujvYkTJp2SQ+/vHHnNx+kq6WrmFVgUiSxOfrPwfgggcuGDVVO/2hT9YTmxFLZ20n9Xvryboky6/rV39Sjb3JTrQlelSGSZQBpU1Hm4KynxA3/idsPDdWq5Xi4mKsVqvvsVWrVlFQUEBZWVnoDBNENAnjEsicl0nS5CQhbIZByvQUUs5LwevyDtt7U/1xNTWf1qAz6sh/KN/PFoYnGo2G9Dly08hATF9XQlK5N+aOyrwyZV5ZMEZcONud1BXVASLfxp+E1be2sLDwLCGTmyvP8ekueAQCgbqYcps8IkGpdBoqu/5vFyDPkRL5CGdInxsYcSNJ0ln5NqMRxXNjO2kLSNivOzWf1uB1ezFlm0gYnxDQvUYTYROWMpvNtLSc3TGyoKAAOCNyesPhcOBwnGmpb7PJwxFdLhcul3+/tMp6/l5XLUT6+UHkn2Mozi/v1jw+/vHHVGyroKWihfis+EG/tmpnFf+/vbuPbqrM8wD+vUlfUiztbUsBSws25XUqKCmvDqwg6aLrqKN0YBb3wNEZ252X9ezszDbWWUc9Mx62PbtnZ5yZ3aWO7J5dUEvjcR0HdLZxBlFcFAgoAhXsFVosHSrpbQulmCZ3/4gJFPqSNje93Cffzzk9B9Lk9vnlJr/88tzn5VTDKViSLXD8yBFVuxPlHE64NbRS8Zn3z+gaa/sH7ej8tBNWmxVTVk4x5Hk0+hwmy8mw5djQe64XZz86i4nzJ+p6/CvjO/XH0FIJecvy0NfXp+vfMVI8zuFIjiVpAy3mYRIlJSWoqKhAeXn5oPd56qmn8PTTT19z+wsvvIBx48bFs3lE9KWWx1vQe7QX2WuzkbM+J6rHaJqG01Wn0dvYi8y7MjGxQt8PGLMLdAWgbAj1ZNu32mFNt+py3HMvnYPvJR9uWHQD8h7P0+WYZtRS1YLeY72Y9INJyLg9fj0qp398GhePXMTE70xE5urMuP0dEfT09GD9+vXo7OxERsbQ58Sw4sbtdqOurm7Y+1VVVcHhuPY6u8vlQk5ODiorK4d8/EA9NwUFBfj888+HfXJGyu/3o6GhAaWlpUhOFm/Qo+jxAeLHaFR8J9wnsHP9TqRNTMPDTQ8jKXX4TuPGbY34/UO/R1JaEjYe24j0vOh6fBLpHG6buw2dSie+/ruvY9qfxz5lWdM0bL11K3zHfHA+50TxRmMWS7wezqHnrz04suUIFj2+CEufWqrrscPxrVi6As/nP4+gP4gNRzYga0aWrn/HSPE4h11dXZgwYUJUxY1hl6XKyspQVlY2qse63W4UFRUN2WMTlpqaitTU1GtuT05OjtubJp7Hvh6IHh8gfoxjHd+sslnY/fe7cf6z8zjx0gnM+/a8Ie/fq/bi7cfeBgAsfWIpsqaNPOknwjnMX56PTqUTZ/acwfS7Y5+y3f5hO3zHfLCmWDG7bLbhz5+R5zC3OLRwoXpcjVsb2t5tQ9AfRGZhJnLn5Ao5aUHPcziS45hqQDFweZxNuLBRVZWzpYiuc9ZkKxb83QIAwJ4n9gy5U7imafB814OeP/Uge1Y2FvxwwVg103Tybw/Nrml5S5/NSY+9cAwAYL/bDpuc2Js3RqaDH43fdPDmhi+3t1h9k5CFjZFMVdx4vV54vV44HA4oigJFUVBbW4vs7Gyjm0ZEw7j1e7dCLpJxoe0C9j6zd9D7Hf7NYTS+2AjJKmH1ltWwpugzlkREBbcXAAjNmIp1Vo8W1HDsxVBxM+fBOTG3zewmzA0N2PZ97EPfpfgM9A3vu5ZIe3eNFdMUN6qqYtWqVXC5XCgqKor8uFwuyLJsdPOIaBhJqUlY8c8rAADvV7+PE6+cuOY+x18+jobvhBblXPazZbovTieazMJMpE9JR9AfxJm9Z2I6VsuuFnQ3dyMlIwWFf8Gd7sfnj4ctywYtoMWl98b/Jz/UT1RIVglT75iq+/ETnWmKm/BUcE3TrvkhInOYft90zP/+fADAjgd34NC/HULAH8Clzkt45x/ewWvrXoMW0FC8sRiLKhcZ3NrrnyRJkd6b5l3NMR3r8POHAQBz1s9Bcpq4Y5WiJUlSZMPQ9g/0332951APACBvaR5SM68dF0qxMU1xQ0RiWPkvK2H/mh19F/vg+a4Hz6Y/i19l/wp7n9kLLaDh5oduxurnV0OycAxCNMLf+k++cXLUx+jt6MXxl48DAOZ+a64ezRJCPIubCwcvAAiNtyH9sbghojFlSbLg/lfvxx2/uAO2LBsCXwSgBTVkz87GPfX34M4td8JiZWqKVvgSUtu+Npw/c35Uxzi69SgClwLIvSU3sq0DXS5uzn6g7+7rAX8AFz+8CIDFTbyYZoViIhKHZJHgeNSB+d+fj85TnbCmWDF+ynijm2VK6TemY/LCyWjb1wZlhzLsNPuraUENB395EAAw75F5nLVzhdx5l3tuNE3T7blpe78NwZ4gbDk2THKwmIwHfj0iIsNIFglyoczCJkZF9xQBAJpeaxrxY5XXFXSc6EBqZqphi/ZdryYUT4BkldDr68X51tH1ig2k+X9D46OmrprKXso44bNKRGRy4eLmVMMpfHF+8DWEBuL9uRcAMPfbc5GSnqJ728wsyZYUWe8mvHO3Hj7d+SkA6LKqNA2MxQ0Rkcnl3pKLrBlZ6LvYN6Ld11v3tuKU5xQkqxSZxUb93bj4RgDAmfdim2of1nmyMzRA2QIU3s0p9/HC4oaIyOQkScLND98M4PKU7mi8+/S7AIDiDcXIvImbNg4kUtzEuI5Q2CevfgIASJuThrScNF2OSddicUNEJIDijcWQrBJa323FucbhF537bM9nOPnGSUhWCUt+vGQMWmhOeUtCO6O37WtDMBCM+Xif/E+ouLlh8Q0xH4sGx+KGiEgA6Temo/Cu0GWO/f+0f8j7BvuC8HwvtE/f3IfnQi6S490808opzkHyDcn4ovsLnDsW20rFPe09OP32aQBA+uLodrmn0WFxQ0QkiMVViwEAH/3nR/Ad9w16v4O/Poj2D9phy7Jh2TPLxqp5pmSxWjB54WQAsV+aOv7ycWgBDRMdE5E8iatAxxOLGyIiQUy5bQrsX7NDC2h4+7G3B9yepu1AG3a7dgMAlm9ajnG548a6maZz45LQuJvW/2uN6TiNLzUCAGaunRlzm2hoLG6IiASy/JnlkKwSTrxyAt5fePv9TlVUvPrAqwhcCqDoniLMe2RkC/4lqvzl+QCA5j80j3o/w+7PunF6d+iS1IyyGbq1jQbG4oaISCC583Iju6/v+uEuvPPEO/B97MOR/z6CbUu2obu5G/J0GXf9113cvytK+X+WD0uyBV0nu9CpdI7qGB/XfQxoQN5teciYmqFzC+lqLG6IiATjeNSBWypugRbUsPdne7Fl9ha8vuF1XGy/iEmOSfjmW9+ETbYZ3UzTSElPQd7S0Kypkw0nR/x4TdNweEtoiv5X/uorejaNBsHihohIMJIkofTfS3Gv+17kzstFyvgUZNyUga/+9KtY99Y6pOdxps5ITXOGVhM+5Tk14se27WvDuSPnkGRLwuy/nK1302gA3DiTiEhQM9fMxMw1HLyqh2nOadjzkz1o+UMLgoHgiPaE+mjLRwCAGWtmwCbb4Pf749VM+hJ7boiIiIYxeeFk2LJs6O3oxem3Tkf9uF61F0e3HQUAzP3W3Hg1j67C4oaIiGgYliQLZqwJzXJqrGuM+nGHf3MY/vN+5BTnoGBFQbyaR1dhcUNERBSF2etC42WOu48j4A8Me/9gXxDeZ0PT8Rf8YAEkibPTxgqLGyIioigUrCjAuInj0OvrjWpg8dFtR9Hd0o203DTMeXDOGLSQwljcEBERRcGSZMGsdbMAAId+fWjI+/Zd6sOen+wBACz80UIk2Th/ZyyxuCEiIoqS41EHJIsEZYeC9g/bB73foX89hO7mbqTnpWP+38wfwxYSwOKGiIgoalnTszCzLDS9/r1/fG/A+6hNKvY8Eeq1ue2p25Ccxk0yxxqLGyIiohFY9NgiAEDji41o+l1Tv9/1XerDzg074b/gR8GKAk7/NgiLGyIiohGYNH8SSv62BADwxkNvoP1w6PKUv8eP3675LVrfbUXK+BTc+R93cv8ug3CEExER0Qgt37QczX9sRvsH7di6cCumOaehbV8bes72ICktCfe9ch8yb8o0upkJiz03REREI5RkS8I3Gr4B+912BC4FoOxQ0HO2BxnTMvDAzgcwbdU0o5uY0NhzQ0RENArjcsfh/tfuR8uuFqifqLBl21B0bxGsyVajm5bwWNwQERGNkiRJmLpyKqaunGp0U+gKvCxFREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQkm4XcE1TQMAdHV16X5sv9+Pnp4edHV1ITk5WffjG030+ADxYxQ9PkD8GEWPDxA/RtHjA+ITY/hzO/w5PpSEK266u7sBAAUFBQa3hIiIiEaqu7sbmZmZQ95H0qIpgQQSDAbR2tqK8ePHQ5IkXY/d1dWFgoICtLS0ICMjQ9djXw9Ejw8QP0bR4wPEj1H0+ADxYxQ9PiA+MWqahu7ubuTl5cFiGXpUTcL13FgsFuTn58f1b2RkZAj7ggXEjw8QP0bR4wPEj1H0+ADxYxQ9PkD/GIfrsQnjgGIiIiISCosbIiIiEgqLGx2lpqbiySefRGpqqtFNiQvR4wPEj1H0+ADxYxQ9PkD8GEWPDzA+xoQbUExERERiY88NERERCYXFDREREQmFxQ0REREJhcUNERERCSXhFvGLl5qaGsiyDABQVRWVlZXGNkhnNTU1AICmpiYAwObNm41sTtyVlpaioaHB6GbozuVyoaioCACQnZ2NsrIyg1ukr9raWqiqClmW0dTUhKqqqsj70mxUVcX27dtRX18/4GtRhJwTTYyAefPOcPFdyaw5J5oYjcg7LG50EH4DlpeXAwA8Hg8qKipM90YcjMvlQnV1deT/FRUVpn0jRsPtdsPj8RjdDF2pqopVq1bhzTffhCzL8Hq9KCkpiWoDOrOoqalBeXl5vw/8Rx55BPX19cY2bBS8Xi/2798PVVXh8/mu+b0IOWe4GM2ed4aL70pmzTnDxWho3tEoZrIsax0dHf1uE+Wp7ejo0JxOZ7/4Dhw4oAHQmpqajGtYnHR0dGibN28W5vyFlZeXa9XV1f1ua2hoMKg18eF0OqO6zUzq6+s1h8Nxze0i5ZyBYhQp7wx2DsNEyDmDxWhk3uGYmxgpihLpBr+aGSvxgezfvx+KokT+b7fbAYSqctFs374da9euNboZuqutrUVZWRkURYm8Lp1Op8Gt0pcsyygtLY28LhVFibxWRZIIOQdInLwjas4BjM07LG5idOWb70qyLAvxJpRlGR0dHXA4HJHbwi9S0T44PB6PcB/4wOXXqNfrhaqqsNvtqKioEOqDEACee+45KIqCrKwsuFwueDweU12miZboOQdInLwjas4BjM87HHMTJ9nZ2cNeZzWrTZs2YfPmzaYdqDmY8BtQlA+IsHCSkWU58mFRXV2NwsJCdHR0GNk0XcmyDJfLhYaGBtTU1MDpdGLt2rXCvU4HI3LOAcTMO6LmHMD4vMOemzgRNcm4XC6sW7cuMpBRFOHuU5EtWLAg8u/wt3yRem9cLhfsdjvq6+vR1NQEn8+HkpISo5s1ZkTNOYCYeScRcg5gXN5hcROjwbpIwxW5SNxuN4qKikw55XQoXq+33xtQNIO9DmVZHvQSh9mEx6GEu/jtdjsOHDgAWZbhdrsNbp2+EinnAGLmHdFzDmB83uFlqRjZ7fbIybr6ZIp0LTVcaYe/OYWn/omQTH0+H7xebyTG8JoaNTU1sNvtpv92ZbfbYbfboShKvzEMqqoKk2AVRRnwckVFRcXYNybOEiXnAOLmHdFzDmB83mFxo4Oqqip4PJ7IG9DtdgvVfer1euH1eiOj3gGxYnQ6nf0+FLxeL2pra4X6plhdXY26urpIknG73XA6nf2Sjpk5nU5UV1dfM4vowIEDph5UPNilJpFyzmAxipJ3BopPtJwz2Dk0Mu9ImibQKl4GClfcALBv375+i0+ZmaqqKCwsHHDAm4gvHbfbjbq6OrjdblRWVqK0tFSYb8Ph1XsB4Ny5c8K8RsNUVcWmTZuQk5MTubZ/5aJ+ZqIoSuS16PV6UVlZiYULF/b7Rm/2nDNUjCLknWjOIWDunBNNjEblHRY3REREJBQOKCYiIiKhsLghIiIiobC4ISIiIqGwuCEiIiKhsLghIiIiobC4ISIiIqGwuCEiIiKhsLghItOqra1FVlbWsD81NTVGN5WIxhAX8SMiU7tyEz5FUVBaWor6+vp+S7xnZ2ebcqViIhod7i1FRKY20CaK4U37iCgx8bIUERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhdsvEBERkVDYc0NERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREJhcUNERERCYXFDREREQmFxQ0REREL5fwBnhwwKlkaBAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj0AAAG4CAYAAACuBFb3AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAC30ElEQVR4nOydd3hb5dmHby3LkizvEa8kzt47JIGQkEAYAcIse0MZZbTQssoKlNl+ZZa99y4UQoAkQEggE7K3k9iO4z3lpWXpfH8cjuJgO7FljSP5va+Lq410znueV5J1fnqmRpIkCYFAIBAIBIIoRxtuAwQCgUAgEAhCgRA9AoFAIBAIegVC9AgEAoFAIOgVCNEjEAgEAoGgVyBEj0AgEAgEgl6BED0CgUAgEAh6BUL0CAQCgUAg6BXow22AmvB6vZSWlmK1WtFoNOE2RyAQCAQCQReQJInGxkaysrLQajv35wjR04bS0lJyc3PDbYZAIBAIBAI/KC4uJicnp9Pnhehpg9VqBeQXLT4+PmDrut1uFi1axPHHH4/BYAjYumoi2vcY7fuD6N9jtO8PxB6jgWjfHwRnjw0NDeTm5vru450hRE8blJBWfHx8wEWP2WwmPj4+qj/E0bzHaN8fRP8eo31/IPYYDUT7/iC4ezxcaopIZBYIBAKBQNArEKJHIBAIBAJBr0CIHoFAIBAIBL0CIXoEAoFAIBD0CkQis594PB7cbneXjnW73ej1ehwOBx6PJ8iWhYdg7dFgMKDT6QK2nkAgEAh6L0L0dBNJkigvL8dmsyFJUpfP6dOnD8XFxVHb9DBYe9RoNCQkJNCnT5+ofe0EAoFAEBqE6OkmNpuN+vp60tLSsFgsXboRe71empqaiIuLO2SnyEgmGHuUJInm5maqqqowmUwkJiYGZF2BQCAQ9E6E6OkGkiRRWVlJfHw8qampXT7P6/XicrmIjY2NatETjD2aTCacTieVlZUkJCQIb49AIBAI/CY678BBwuPx4PF4Atq4UHB44uPjfa+9QCAQCAT+IkRPN2htbQVArxcOslCivN7K6y8QCAQCgT8I0eMHIsQSWsTrLRAIBIJAIESPQCAQCASCXoGI0wgEAoFAIAg6xY5iWmklR5cTNhuEp0fgF3/605+YM2dOl44977zzOOecc4JskUAgEAjUiiRJPFL8COduO5ev674Omx1C9Ai6TUFBAa+88gp///vffY+Vlpby6KOPsmHDhnbH33777Xz66ads3LgxhFYKBAKBQC38YPuB1Y2r0Wl0jLGMCZsdQvQIus1TTz1FXl4es2bN8j1WWlrKY4891qHoGT9+PJMmTeLf//53CK0UCAQCgRpweB08sf8JAC7OuJjsmOyw2SJEj6BbuN1u3n333W6Hq8455xz++9//0tTUFCTLBAKBQKBGvqr5ilJXKRmGDC7PuDystgjRIwDAbrczbNgwhg0bht1u9z1eW1tLZmYmRx55JB6Ph59++onq6mqOO+443zFLly5lypQpAFx55ZVoNBo0Gg1vvPGG75g5c+bQ3NzM4sWLQ7YngUAgEIQXr+Tlvcr3ALgo4yJMOlNY7RHVWwFAkiQcXkenz3u9XuxeOwaPAa0UfJ0Zq43tdm8bk8nEm2++yVFHHcVdd93F448/DsD111+PzWbjjTfeQKfTsWLFCjQaDePHj/edO3z4cO6//37uu+8+/vjHPzJjxgwAjjzySN8xI0aMwGQy8fPPP3PGGWcEYJcCgUAgUDsrGlZQ6CzEorVwWspp4TZHiJ5A4PA6mL5xerjN8PHT2J/8UtNTpkzhtttu47HHHuOMM86goqKCDz74gCeffJIhQ4YAsGPHDpKTkw8axZGRkcGJJ57Ifffdx9SpU7nooovara3X68nNzWXbtm3+b0wgEAgEEcXHVR8DcEbqGVh0ljBbI0SP4HfMnz+fBQsWcOmll9LU1MTMmTO56aabfM/X1NSQlJTk19pJSUlUV1cHylSBQCAQqJg6dx0rG1YCcHrq6eE15jeE6AkAsdpYfhr7U6fPe71eGhobiLfGh2TKeqw21u9zY2JieO2115g8eTKxsbG8/vrr7UJlkiT5tbYkSWKkhEAgEPQSFtUtwoOHEeYR5MXmhdscQIiegKDRaA4ZTvJqvLi1bkw6U0hET0/59ttvAXA4HOTn55OXd+DDmpKSQl1dnV/r1tXVMXjw4IDYKBAIBAJ1s7B2IQBzk+eG2ZIDqP8OLAgpmzZt4oEHHuDyyy9n/PjxXHXVVdhsNt/zw4YNo66u7qDH4PBDQVtbWykuLmb48OFBsVsgEAgE6qHUWcqWli1o0XJ80vHhNseHED0CH263m8suu4ysrCyeeuop3njjDSoqKrj55pt9x0ybNg1Jkvj1118POtdikRPUfi+GFLZt24bD4TiooksgEAgE0cky2zIAxseNJ8WQEmZrDhA1oic/P5/zzjuPnJwczGYzw4YN44EHHqClpSXcpkUMDz74IBs2bOC1117DarUyZswY7r33Xl5//XUWLpTdlNOnTyclJYUlS5YcdO7AgQNJSEjgxRdf5NVXX+WDDz6goKDA9/zixYsxm81dntclEAgEgsjlR9uPAMxImBFmSw4mKkRPcXExRxxxBKtWreKGG27gySefZNq0adx3332cf/754TYvIli3bh0PP/wwN9xww0HjJe644w4mT57MH//4R+rr64mJieHCCy/k448/Puh8g8HAc889h06n49prr+X888/nxx9/9D3/8ccfc+aZZ2K1WkO2J4FAIBCEnsbWRn5tlKMBMxNmhtmag4mKROa3336b+vp6fvrpJ0aOHAnA1Vdfjdfr5a233qKurs7vMuvewoQJE3C73e0e1+l0rFmz5qDH/vznP/P888/z3Xffceyxx/oenzt3Luedd167ZO0NGzawZs0aXnjhheAYLxAIBALVsKJhBR48DIgdQG5sbrjNOYio8PQ0NDQAcpO8tmRmZqLVaomJiQmHWVHLgAEDuPLKK3n00Ue7dPyjjz7K2Wefzbhx44JrmEAgEAjCzoqGFQAcnXB0mC1pT1SInmOOOQaQ5z5t2LCB4uJiPvzwQ55//nluuukmX5KtIHA8//zzXZ6j9cEHH/DRRx8F2SKBQCAQhBtJkljduBqAKdYpYbamPVER3jrxxBP5xz/+wcMPP8wXX3zhe/yuu+7iwQcf7PQ8p9OJ0+n0/VvxGLnd7g5DPW63G0mS8Hq9eL3eLtunNPNTzo1GgrlHr9eLJEm43W50Ol1A1+4qyueho89FtBDte4z2/YHYYzQQ6fsrcBRQ5a7CqDEy0jiy03tp2/8NBF1dSyP5215XZbzzzju88847nHXWWaSkpPDVV1/x+uuv8/TTT3PDDTd0eM78+fO5//772z3+3nvvYTab2z2u1+vp06cPubm5ImQWQlwuF8XFxZSXl9Pa2hpucwQCgUDQCavjVvNt0rcMcAzgoqr2cxiDRUtLCxdccAE2m+2g2ZC/JypEzwcffMAVV1zBrl27yMnJ8T1++eWX89FHH7Fv3z5SUtr3CejI05Obm0t1dXWHL5rD4aC4uJj+/fsTG9v1UQ+SJNHY2IjVao3aMQzB3KPD4aCwsJDc3Nxuve6BxO12s3jxYubMmYPBYAiLDcEm2vcY7fsDscdoINL399fCv/Jz489c3+d6Lk67uMNjgrHHhoYGUlNTDyt6oiK89dxzzzF+/PiDBA/AvHnzeOONN1i/fj3HHXdcu/OMRiNGo7Hd4waDocM3wuPxoNFo0Gq13RonoYR7lHOjkWDuUavVotFoOn1fQokabAg20b7HaN8fiD1GA5G4v1aplQ3NGwCYljjtsPYHco9dXScq7sAVFRV4PJ52jysxPhESEQgEAoEguOxq2UWzt5k4XRxDTEPCbU6HRIXoGTJkCOvXr2fXrl0HPf7++++j1WoZM2ZMmCwTCAQCgaB3sK5pHQDjLOPQacJTdHI4oiK8deutt/L1119z9NFHc8MNN5CSksKCBQv4+uuvueqqq8jKygq3iQKBQCAQRDWK6JlonRhmSzonKkTPjBkzWLFiBfPnz+e5556jpqaGvLw8HnroIW677bZwmycQCAQCQVTjlbxsaNoAyENG1UpUiB6AI444wjcUUyAQCAQCQejY49iDzWPDpDUxzDws3OZ0SlTk9AjCT1NTE4MHD+bdd9897LE1NTVYLBYhUgUCgSBK2NS0CYDRltEYNOqtOhOiR9At5s+fT//+/ds9/vTTTxMXF8d5553ne2zhwoXMnz+/3bEpKSlcddVV3HPPPUG0VCAQCAShYnPzZkAWPWpGiB5Bj3G73Tz99NNcfPHFB42JWLhwYYcdrwGuvfZa1q1bx/fffx8qMwUCgUAQJBTRM8oyKsyWHBohegQ9ZsGCBVRVVXHGGWd0+Zzhw4czatQo3njjjeAZJhAIBIKg09jaSKGzEIDRZuHpEUQAdrudYcOGMWzYMOx2u+/x2tpaMjMzOfLIIztsAAnw+eef079/f/Ly8nyPXXbZZTz77LOA3KVZ+a8tc+bM4csvvyQKJqEIBAJBr2VLyxYAcow5JBmSwmzNoRGiRwCAyWTizTffZPfu3dx1112+x6+//npsNhtvvPFGpxPOV6xYwfjxB5coXnPNNcyZMweAt99+2/dfWyZOnEh9fT1bt24N8G4EAoFAECp8+Twq9/JAFJWshxNJknC3dD7WXvJKuJvduHVuNNrgDxw1mA1+Df2cMmUKt912G4899hhnnHEGFRUVfPDBBzz55JMMGSK3FJ8/f/5Bycmtra3s2bOHefPmHbTWtGnTGDJkCIsXL+aiizqetDtgwAAAtm3bxqhR6o4DCwQCgaBjtjTLnh615/OAED0Bwd3i5um4p8Ntho+bmm4ixhLj17nz589nwYIFXHrppTQ1NTFz5kxuuummTo+vra1FkiSSkrrv0lTOqa6u9stWgUAgEIQXSZJ8okftlVsgwluC3xETE8Nrr71GQUEBjY2NvP76613yGvmTl6Oc449XSiAQCAThp9hZjM1jI0YTo9oho20Rnp4AYDAbuKmpc2+I5JVoaGggPj4+ZOGtnvDtt98C4HA4yM/PPyhB+fckJyej0Wioq6vr9nWUc1JTU/0zVCAQCARhRcnnGW4ejkGr3qaECkL0BACNRnPIcJLX68XgMWCwGNBq1e1c27RpEw888ACXX345GzZs4KqrrmLz5s0kJCR0eLxer2fgwIEUFBS0e+5wHhzlnOHDh/fccIFAIBCEnEjpz6Og7juwIKS43W4uu+wysrKyeOqpp3jjjTeoqKjg5ptvPuR506ZN49dff233uMViAaC+vr7D83799VcSEhIYOXJkj20XCAQCQehRytWF6BFEHA8++CAbNmzgtddew2q1MmbMGO69915ef/31Q87JOu200yguLmb37t0HPT5x4kQAbrrpJt59910++OCDg55fvHgxp556qsjpEQgEggjE4XWwq2UXEBlJzCBEj+A31q1bx8MPP8wNN9zArFmzfI/fcccdTJ48mT/+8Y+demxOPfVUUlNT+fzzzw96/Mwzz+TGG2/km2++4eKLL+b888/3Pbdjxw62bNnCZZddFoTdCAQCgX9IksSy+mW8XPYyL5e9zB77nnCbpFp2tezCg4cUfQp9DH3CbU6XEDk9AgAmTJiA292+15BOp2PNmjWHPDcmJoYbb7yR1157jfvuu8+Xt6TT6Xj66ad5+un25fwvvPAC48ePZ/bs2YHZgEAgEPSQYkcxdxXexdaWAw1TXyh7geOTjufevvdi0pnCaJ362N6yHYBh5mER47EXnh5BQPjLX/5Cc3NzuxBWR9TU1PDKK6/w4IMPRswfikAgiG5KnaVck38NW1u2YtKaOCX5FGYmzESLlkV1i7gm/xoaWxvDbaaqUETPcHPkFKMIT48gIMTFxZGfn098fPxhj01JSaGpqSkEVgkEAsHhcXqd3LTnJircFfQ39ueFwS+QFpMGwIamDdyy5xa2tmzlnsJ7eHzg42g1wl8AkSl6xDsnEAgEgl7NK2WvUOAoIEWfcpDgARgXN45nBz9LjCaG5Q3LebPizTBaqh4cXgcFjt/ajgjRIxAIBAKB+tlj3+MTMnfk3nGQ4FEYbh7O7bm3A/BS2Uvsd+4PqY1qJN+ejwcPyfpk0g3p4TanywjRIxAIBIJey8tlL+PBw8yEmcxO6ryw4rSU05hinYJLcvGv4n+F0EJ10ja0FUm5mUL0CAQCgaBXste+lyX1SwC4Luu6Qx6r0Wi4Lfc29Bo9PzX8xNrGtaEwUbXsaNkByJVbkYQQPX7gz3BNgf+I11sgEASDNyreQEJiduJsBpsGH/b4/rH9OTP1TEAOc/Xm76a25eqRhBA93cBgkIeptbS0hNmS3oXyeiuvv0AgEPSU+tZ6FtctBuCyjMu6fN7lGZdj0BhY17SOtU2909vj9Dp9TRsjKYkZRMl6t9DpdCQmJlJZWQmA2WzuUizT6/XicrlwOByqHzjqL8HYoyRJtLS0UFlZSWJiIjqdLiDrCgQCwYKaBbgkF8NMwxhp6fr8v/SYdM5IPYOPqj7i7Yq3OcJ6RBCtVCdKEnOiPjFiOjErCNHTTfr0kd9gRfh0BUmSsNvtmEymiEr46g7B3GNiYqLvdRcIBIKeIkkS/63+L4AvXNUdLky/kI+rPmZFwwoK7AXkmfICbaKqidQkZhCip9toNBoyMzNJT0/vcGxDR7jdbpYtW8aMGTOiNkQTrD0aDAbh4REIBAFlS8sWipxFmLQmTkw+sdvn5xhzmJkwk6W2pbxf9T5/7/v3IFipXpQk5uGmyAptgRA9fqPT6bp8M9bpdLS2thIbGxu1oqc37FEgEEQHi+oWATAzYSYWncWvNS5Iv4CltqV8VfsVf87+s9/rRCKR2IlZIToTTAQCgUAg6ACv5PUlMB+fdLzf60yIm0A/Yz8cXgdL6pYEyjzV4/K62OOIzCRmEKJHIBAIBL2IDU0bqHJXEaeLY1r8NL/X0Wg0nJpyKgBf1HwRKPNUz277blqlVhJ0CfSJibxcSyF6BAKBQNBrUJoRzkqYRYw2pkdrnZJ8Clq0bGjewD7HvkCYp3ra9ueJtCRmEKJHIBAIBL0ESZJYblsOwDGJx/R4vbSYNJ+36MuaL3u8XiQQyfk8IESPQCAQCHoJexx7KHWVEqOJYYp1SkDWVEJcC2oX4JE8AVlTzQjRIxAIBAJBBLDMtgyAydbJmHSmgKw5M2EmCboEKt2VrG5cHZA11Yrb62a3YzcgRI9AIBAIBKpGCW3NSJgRsDVjtDG+Xj/RHuLa7ZCTmON18WTFZIXbHL8QokcgEAgEUY+t1caW5i0ATE+YHtC1T0k+BZA9SXaPPaBrq4lI7sSsIESPQCAQCKKetY1r8eIlLzYv4KXWw83DyY7JxuF18FPDTwFdW00onZgjbbJ6W4ToEQgEAkHUs6phFQBTrVMDvrZGo2FO0hwAX+PDaGRbyzYgcvN5QIgegUAgEEQ5kiSxqvE30RMfeNEDcFzScQD8ZPuJFk9LUK4RTtxeN7vtkZ3EDEL0CAQCgSDKKXYWU+YqQ6/RMzFuYlCuMcw0jBxjDk7JyU+26Atx7Xbsxi25idfFkx2THW5z/EaIHoFAIBBENWsa1wAw1jI2YKXqv0ej0TAn8bcQV330hbgivROzghA9AoFAIIhq1jWtAwial0dByev52fYzzZ7moF4r1ChJzJEc2gIhegQCgUAQxUiS5BM9E6wTgnqtIaYh9DX2xSk5fT2BooW2np5IRogegUAgEEQt+137qXJXodfoGWUZFdRrta3iWlK3JKjXCiVur5t8ez4gPD0CgUAgEKiWdY2yl2ekeSQmbXDyedoyO3E2ACsaVkRNo8I9jj24JTdWnZWcmJxwm9Mjokr0rFu3jnnz5pGcnIzZbGbUqFE8/fTT4TZLIBAIBGHCF9qKC25oS2GoaSjZMdk4JScrGlaE5JrBJlqSmCGKRM+iRYuYNm0alZWV3HPPPTz11FOccsop7N+/P9ymCQQCgSBMhCqfR0Gj0TArcRYA39d/H5JrBhvf+AlTZIe2APThNiAQNDQ0cMkll3DyySfzySefoNVGjZYTCAQCgZ+UucoodZWiQ8dYy9iQXffYxGN5p/IdltuW4/K6iNHGhOzawaDtzK1IJyrUwXvvvUdFRQUPPfQQWq2W5uZmvF5vuM0SCAQCQRhZ37QekMMyFp0lZNcdZRlFqiGVZm+zr0dQpHJQErMl8kVPVHh6lixZQnx8PCUlJZx++uns2rULi8XCxRdfzBNPPEFsbGyH5zmdTpxOp+/fDQ0NALjdbtxud8DsU9YK5JpqI9r3GO37g+jfY7TvD8Qef88vtl8AGGceF/LXZKZ1Jp/WfsqS2iVMMU/p8nlqew932nfKScxaKxmajIDYFYw9dnUtjSRJUsCuGibGjh3L7t3yTJArr7ySY445hqVLl/LMM89w3nnn8f7773d43vz587n//vvbPf7ee+9hNpuDarNAIBAIgsuzfZ6lxlDDuVXnMtQxNKTXLjAW8Hb625g8Jv5a+le0ERpYWWdZx4LkBfR39OeSqkvCbU6ntLS0cMEFF2Cz2YiPj+/0uKgQPQMHDmTv3r1ce+21PP/8877Hr732Wl588UV27drF4MGD253XkacnNzeX6urqQ75o3cXtdrN48WLmzJmDwWAI2LpqItr3GO37g+jfY7TvD8Qe21LjruHkHSejQcO3I74lXhe47/Su0Cq1Mnf7XBo8DTyb92yXu0Gr7T18rOQxPqv9jAtTL+TGzBsDsmYw9tjQ0EBqauphRU9UhLdMJrn3wvnnn3/Q4xdccAEvvvgiK1eu7FD0GI1GjEZju8cNBkNQPmzBWldNRPseo31/EP17jPb9gdgjwLbmbQAMjB1ISmxKqMzyYcDAMYnH8EXNFyxrWsbUpO5Nd1fLe7jLsQuAUXGjAm5PIPfY1XUi09/2O7KysgDIyMg46PH09HQA6urqQm6TQCAQCMLH5qbNAIyJGxM2G5RGhd/Xf49XirzimmjqxKwQFaJn4kTZbVhSUnLQ46WlpQCkpaWF3CaBQCAQhI9NzZsAGG0ZHTYbplinYNFaqHJXsbVla9js8Jc9jj24JBdxujhyjJHdiVkhKkTPOeecA8Crr7560OOvvPIKer2eY445JgxWCQQCgSAcuL1uX2+ZMZbweXpitDFMT5gOwHd134XNDn/xdWI2RX4nZoWoED3jx4/niiuu4L333uPcc8/lueee45xzzuH999/n1ltv9YW/BAKBINKRJInG1kYqXBVEQR1KUNhl34VTcpKgS6CfsV9YbWkb4oq09yuamhIqREUiM8ALL7xA3759ef311/nss8/o168fTzzxBH/5y1/CbZpAIBD0GEmS+KbuG14se5FiZzEA2THZnJN2Duenn49OowuzhepBCW2NsowKu4fiyPgjMWqMlLhKyLfnM8Q8JKz2dIctzVsAGGkZGWZLAkdUeHpAzty+7777KCwsxOVykZ+fLwSPQCCICjySh/uL7ufuwrt9gkeLlhJXCU+UPMG1+ddia7WF2Ur1sLn5tyTmMIa2FMw6M9PipwHwXX3khLjsHju77XL/u3DmRQWaqBE9AoFAEI1IksQ/iv7Bl7VfokPHtZnX8uPYH/lx7I/8PffvmLVm1jWt48bdN9LsaQ63uapA8fSoQfTAgRDXD/U/hNmSrrO9ZTsePKQZ0sgwZBz+hAhBiB6BQCBQMQtqF/gEzyN5j/DHzD8Sp4vDrDNzVtpZvD70dRJ0CWxt2cr84vlIRFbeSKCpcldR5ipDi1Y1YZkZCTPQoWOPYw+FjsJwm9MltrTIoa3RltFhDxEGEiF6BAKBQKWUOkv5Z/E/Abg261qOTTq23TGDTIN4ZtAzGDQGljcu55e4X0JtpqpQ+vMMNA0M6ZDRQ2HVWzki/ghATmiOBJQQ4SjzqDBbEliE6BEIBAKV8p/S/9DibWGcZRyXZlza6XEjLSO5KfsmABYnLKbEVdLpsdGO2kJbCpEU4pIkSRV9joKBED0CgUCgQrY1b+Pbum8BuDX31sNWZ52fdj4TLRNp1bbyVNlToTBRlaj1Zn1MwjFo0LCtZRtlzrJwm3NIKtwVVLur0aGLqnJ1EKJHIBAIVMnzZfLw5LnJcxlmHnbY4zUaDX/L+htaScuyhmWsaFgRbBNVh1qaEnZEsiGZ8XHjAfjBpm5vj1KqPsg0CJPOFGZrAosQPQKBQKAydtt3s6JhBVq0XJ15dZfPy4vNY3LTZACeLXk24prh9ZSd9p24JBcJugT6GvuG25x2zEqcBai/O7Mvn8cSXfk8IESPQCAQqI73Kt8D5JtkrjG3W+dOb5iOWWtmh32H6j0KgaZtaEuNFUdKXs/G5o1Uu6vDbE3nKKJHbSHCQCBEj0AgEKiIOncdC2sXAnBR+kXdPt/itXBOijyP8MXSF3uVt0dNTQk7ok9MH0aaRyIh8WP9j+E2p0PckpsdLTsAIXoEAoFAEGS+qv0Kt+RmhHkEY+L8u3lfkHYBJq2J3Y7drG5cHWAL1YvP0xOn3pu14u1Ra3fm3fbdOCUnVp1VlSHCniJEj0AgEKgESZL4ouYLAE5LOc3vdeJ18cxLmQccCJVFO1WuKspd5WjRqrq3jJLX80vjL6ocHbKpSRaOI80j0WqiTyJE344EAoEgQtnWso09jj0YNUZOSDqhR2udn34+GjT83PAzBfaCAFmoXhQvzyDTIMw6c5it6Zx+sf0YGDsQDx6W2ZaF25x2/Nr0K4Cv0izaEKJHIBAIVMKC2gWA7A2w6q09WivXmMuMhBkAvF/1fo9tUztq7c/TEccmyp211daoUJIk1jetB2CidWKYrQkOQvQIBAKBCvBIHl8p89zkuQFZU0mEXlCzgLrWuoCsqVbUnsTcFiWvZ2XDSlo8LWG25gCFjkJqW2sxaoyMNKtjblmgEaJHIBAIVMC6pnXUtNYQr4v3zWnqKePjxjPMNAyn5OS/Vf8NyJpqxOV1qbYpYUcMMg0i15iLS3LxU8NP4TbHxy9N8ty20ZbRxGhjwmxNcBCiRyAQCFTA4rrFgBzaMmgMAVlTo9FwYfqFAPy35r94JE9A1lUbSlPCRH1it/sahQONRqPKWVzrmtYB0RvaAiF6BAKBIOx4JI9v+vbxSccHdO1jk44lXhdPuas8asvXlYqjMZYxqmxK2BFKFddPtp9wep1htkbO5/m1UU5inhA3odPjanfW4mp2hcqsgCNEj0AgEISZTc2bqGutI14XH/Bf2Uat0Zcj9Hn15wFdWy1EUhKzwkjzSDIMGbR4W1jdEH4xus+5j5rWGmI0MR2+jt5WL9//+XteG/YaL/d/mfXPrQ+DlT1HiB6BQCAIM0p33ukJ0wMW2mrL6SmnA7C0fim17tqArx9uIimJWUGr0XJM4jEAPi9fOFFK1UdZRmHUGts9v+T6Jax7Wg5/2avtfHf9dxQuKgyliQFBiB6BQCAIM0q/FqXEPNAMNg9mpHkkHjy+svhoodxVToW7Ah26iKs4UvJ6ltqW4vKGN2R0qNBW3e46Nr8iC8uT3zuZ0VfJnqDldy2PuDEnQvQIBAJBGClwFFDkLEKv0TMtflrQrnN66umAHOKKtBvVoVBCW4NNgzHpTGG2pnuMjxtPuiGdRk9jWBsVSpJ0yCTmtf9ci+SVGHDyAIafP5zpD03HYDFQ8UsFuz/fHWpze4QQPQKBQBBGfrLJJcuT4iYRp4sL2nVOSDoBk9ZEkbOIDc0bgnadUOMLbfk5pyyc6DQ6X77VV7Vfhc2O/a79VLor0Wv07fJ5miua2fLGFgCm/H0KAJZ0C+NvlDs2b31za2iN7SFC9AgEAkEYWdmwEoCjEo4K6nUsOouvMux/1f8L6rVCiVK5FUlJzG05OflkAH62/Ry2fCslkXqUeRQm7cHest2f78br9tJnch+yj8z2PT7s3GEAFC4upNXRGjpje4gQPQKBQBAmHF6Hr+3/NGvwQlsKp6acCsCS+iWq6gTsL06vkx32HUBkJTG3ZYBpACPMI/Dg4Zu6b8Jiw4qGFQAcGX9ku+d2/08OXw0+c/BBj6eNTcOaY6W1pZV93+8LvpEBQogegUAgCBPrmtbhklxkGDLoH9s/6NcbZxlHjjEHu9euioqhnrKjZQetUivJ+mSyY7IPf4JKOSX5FAC+qgl9iMvtdfNLo9yJ+ciEg0WPq9HFvu9kQTPotEEHPafRaBhwygAA9ny5JwSWBgYhegQCgSBMrGpYBcDU+Kkhaaqn0Wh8N9hoqOJSkpgjqSlhRxyffDw6dOyw72CPI7QCYlPzJpq9zSTpkxhqGnrQcwXfFuBxeUgclEjysOR25w48dSAAe7/aGzHJ8UL0CAQCQZhQRE8wq7Z+jyJ61jaupcxZFrLrBoO2oieSSdInMT1hOgAL6xaG9NpKaGta/DS0moMlQcHCAkD28nQkKnOPyUWj09BY3EhTSVPwjQ0AQvQIBAJBGKhwVbDHsQctWo6wBmbAaFfINGYyKW4SENneHkmSDoieCKzc+j1KvtXCuoW0ErrE4OW25UDHwnv/8v0A9D22b4fnGswG0kanAVC2JjIEtBA9AoFAEAaUOVgjzCNI0CeE9NrKDXZB7YKICUv8nnJ3OdXuanToGG4eHm5zeszRCUeTZkijzlPHdvP2kFyz2FHMHscedOiYHj/9oOeay5up310PGsialtXpGn2O6ANA+ZryYJoaMPwWPR6Phw8++IBrrrmGM844g82b5V4JNpuN//73v1RUVATMSIFAIIg2lFL1qfFTQ37tYxOPxaw1s9+5P2J79mxpkXvHDDUPJVYbG2Zreo5eo+fM1DMB+CXul5Bcc6ltKQATrBOI18cf9Nz+n2QvT9qYNGITO399M4/IBKLc01NfX89RRx3FBRdcwPvvv88XX3xBVVUVAHFxcdx000089dRTATVUIBAIogWP5PH1RgllPo+CSWfi2MRjAVhQE5khrs0tkTdv63CckXoGOnQUG4vJt+cH/Xo/2uSZb8ckHNPuuZKfSgDInn7oqjifp2dtOV6PN7AGBgG/RM8dd9zB1q1b+fbbb9m79+CsbZ1Ox9lnn83ChaFNxhIIBIJIYWfLTmweGxathVGWUWGxQQlxLa5bjN1rD4sNPUHxUI21jA2vIQEkzZDmEyCf1n4a1GvVuGvY2LQRgJkJM9s9r4ienKNzDrlOyogUDBYD7iY3tTvUP8zWL9Hz+eefc+ONNzJnzpwOM7qHDBlCYWFhT20TCASCqOSXJjl8Mck6Cb1GHxYbxseNJzsmm2ZvMz/U/xAWG/zFoXGQ75A9IeOt8jiE6q3VLLxkIev+sw5XU3iHd/aEs5LPAuCbum9o9DQG7TpL6pbgxctI80gyjZkHPeducVO5oRKA7KMO7enR6rRkTMgAoPwX9ef1+CV6bDYbeXl5nT7vdrtpbY2cttQCgUAQSpRmcJOsk8Jmg1aj9Y1AiLQQ1z7jPiQk+hr7kmZIY8dHO3hn8jtse3sb39/4PW+OeRNHnSPcZvrFeMt40txpOCQHX9Z8GbTrKN2fT0w+sd1zVRurkDwSlkwL1hzrYddKGytXcNVsrQmskUHAL9EzcOBA1q1b1+nzixYtYsSIEX4bJRAIBNGKW3L7Rk9MjGs/0TqUnJwii541jWsod6n/V7rCPqPcJXh83HiaK5v59spvabW3kjMjh7isOGwFNn6+7+cwW+kfGo2GyY2TAXi/8n1apcA7EEqcJWxq3oQWLXOS5rR7vmKdXIikeHAOR8qIFABqtkep6Lnqqqt47bXX+PDDD335PBqNBqfTyV133cU333zDNddcE1BDBQKBIBrY0bKDFm8LCboEBpsGH/6EIJJjzGFC3AQkJBbWRk4eZpGxCJBF48oHVuJucpMxKYNzfziXk946CYANz26ganNVOM30m7EtY0nSJVHqKmVx3eKAr/917deA7GlMM6S1e77iV1n0pE9I79J6PtGzLUpFz5///GcuueQSzj//fIYMGQLABRdcgNVq5ZFHHuHqq6/myiuvDKihAoFAEA0ooa0JcRPadcANB76ePTWR0bPH7rVTFiOXRw+pGcKmF+UGhTP/ORONVkO/Y/sx+MzBSF6JdU93HpFQMwbJwDmp5wDwRvkbeKXAVUV5JA+f13wOHJjw/nv89fTYCmy4W9w9NzKI+PUXp9FoePnll1m2bBmXXHIJJ510EuPGjePqq69m6dKlPP/884G2UyAQCKICNeTztOXYxGOJ1cZS5Cxic/PmcJtzWDa3bMar8dLH0Ieaj2vwtnrJPSaXvrMOdA0ef72c3Jz/33w8bk+4TO0RZ6WchUVrYbdjd0CHw65qWEWZq4x4XTzHJR3X7vlWR6svN6erosecZsaUagIJanequ4KrR2UD06dPZ/r06Yc/UCAQCAS4vW5fqfVEa3jzeRQsOgvHJh7LV7VfsaB2gepHOiiv3zjzOLa9uw2AkZeOPOiYnJk5mDPMtFS0ULSkiAEnDQi1mT0mXhfPBekX8HL5y7xY9iKzEmeh0+h6vO6n1XIp/MnJJ3fY1LF6SzXeVi+mFBPW3MMnMSukjEhh/7L91GyrIWN818RSOAi/b1UgEAh6CdtatuHwOkjUJzIwdmC4zfFxSoo8hPTbum9xeNVd9bS+WU4CH1k0ktrtteiMOgafcXBulFanZcjZcurFzo92htzGQHFh+oVYdVb2Ovb68nB6wj7HPt+sLaX78+9RQlvpE9K7Nbk+UvJ6/PL05OXlHfbF0Gg07Nmzxy+jBAJBZFJgL2BV4ypKXaVkGDI4Kv4o8kydt7fobSihrYlxE1WRz6MwKW4SfWL6UO4qZ1n9Mo5PPj7cJnWI0+tka8tWACwLLAAMOGUAxgRju2OHnjOUDc9uYM8Xe5C8Ehpt12/gasGqt3J5xuU8Xfo0/yn9D7MTZ2PWmf1e762Kt/DiZXr8dAaYOvZ+VW+uBg6UoXeVqBY9M2fObCd6PB4PRUVF/Pzzz4waNYrx48cHxECBQKB+GlsbebLkSf5X8z8kDiTDPlHyBLMTZ3Nn7p0kG5LDaKE6aNuUUE0oPXteLX+VL2u/VK3o2dayDZfkwuKxULtYzh0ZevbQDo/NmpaF3qzHUeugels1aaO6dxNXC+enn8+n1Z9S4irh9fLXuT77er/WqXJVsaBW7sd0WZ/LOj2ueosselJHpXZr/eRh8t93VOb0vPHGG50+t3HjRk444QQuvPBCf20SCAQRRJ27jj/t/hO77LsAmGqdykDTQAodhaxoWMH39d+zpXkLTwx8gmHmYWG2Nny4vC5f2/9JceoSPQCnJp/Kq+WvsqphFVWuKtJi1CcSFE/ZwNKB1GyRPQp9j+vb4bE6g46saVns+24fJctLIlb0xGhj+EvOX7h17628VfkWxycf71erg+fLnsctuRlnGcf4uM6dEv6KnsSBiQDY9tpU7VkLuH917NixXHPNNdx+++2BXrrLPPTQQ2g0GkaNCs9MG4Ggt2D32Llu93Xssu8iRZ/CK0Ne4dnBz3JLzi08Pehp3h/2Pv2M/ah0V3L97uspcBSE2+SwsbVlK07JSbI+mbxY9YX8cmNzGWsZixevanv2rGpYBcCAlXJoJn18OubUzsM9ytyo/cv3B9+4IDIrYRYzE2bSKrXyQNED3W5YuKNlB1/UfAHAn7P/3OlxzZXN2KvtoIGU4SndukZ833i0ei0ep4em0qZunRtKghJUzsjIYNu2bcFY+rDs37+fhx9+GIvFEpbrCwS9iX/u/yf59nxS9Cm8POTldr8gB5sH8+awNxlpHkl9az3X519PXWtdmKwNL2sb1wJyPk93EkRDidKz58vaL1XXs6fR0+grqU9cmwhAvzn9DnmOMiFcGZ4ZqWg0Gu7MvROrzsq2lm08W/Jsl891S24e2fcIEhInJJ1wyOo8xXuWODARg9nQLRu1ei3x/eIBqN9T361zQ0nARU9NTQ2vvvoqOTmHnswaLP72t78xdepUJk1Sn/tYIIgmltQt4YuaL9Ci5aG8h+gX2/ENyKqz8tSgp+hn7EeFu4L7Cu8LaLO1SEFt/Xk64rik4zBqjBQ4CtjWEp4frp2xtnEtHjz0NfSldYPs6eh33KFFT+bUTLR6LY3FjdiKbKEwM2ikxaRxT997AHir8i0W1S7q0nmvlL3ClpYtxOniuCn7pkMe629oS0EJcdXvrffr/FDgV07P7NmzO3y8vr6eHTt24HK5ePvtt3tkmD8sW7aMTz75hPXr13PjjTeG/PoCQW/B7rHz+P7HAbi8z+VMtk4+5PFJ+iQey3uMS3deys8NP/Nh1Yecn35+KExVBU6v0+elULPoseqszEqcxTd13/BlzZeMtIw8/EkhQgltTamdgqfWgy5G5/PkdEaMJYb08emUry2ndEUpCf0SQmFq0Dg26Vguar6Idyrf4Z6iezDrzExP6LxX3qK6RbxW/hoAd+XeRZ+YPodcv6eiJ2GA/Pra9qhXYPolerxebzv3rEajIS8vj+OOO44rrriCYcNCm7Do8Xi48cYbueqqqxg9enSXznE6nTidTt+/GxoaAHlKvNsduFbaylqBXFNtRPseo31/0L09vlr+KhXuCjINmVyccnGXzulv6M+NmTfyf6X/x7OlzzLdMv2wX8KBJJzv4fqm9bi8LlINqWRps4JmQyD2eFLCSXxT9w3f1n3LjRk3EqONCZR5fiNJEittKwHI25JHMcWkjksF/eH3mjY+jfK15ZSvK2fQ2YNCYW6PONx7eF36dZQ7y1liW8Jf9/6Vv2T+hbOSz2p3T15Yt5CH9j+EFy9nJp/JLOusw75WyqyypGFJfn2GrP3lZoa1+bWHPD8Yf4tdXUsjqS1w6yfPPvssd911F/n5+aSlpXHMMcdQXV3Nli1bOj1n/vz53H///e0ef++99zCb/e+FIBBEM83aZp7OfBq31s0fqv/AcPvwLp8rIfFm2pvsi93HYPtgzq+Ofm+P5JFY9991mD4z4UnzkHNcDkmnJoXbrE7x4uXpzKdp0DdwdvXZjLCPCLdJ1Opr+U/mf9BKWq6850qavm4icV4iaVccviKr/pt6ql6owjzBTPa9h/YMRQoePHye/DlbLXLPohxnDpObJpPqTqVB18D6uPXsMsnVlKOaR3F67eloD5PNIkkSey/ai7fZS98n+2Ls37730eFoWtVE2aNlGAcb6fuvjqvqgkVLSwsXXHABNpuN+Pj4To/r0RgKtVBTU8O9997LPffcQ1pa18sS77zzTm655RbfvxsaGsjNzeX4448/5IvWXdxuN4sXL2bOnDkYDN1LDosUon2P0b4/6Poeny9/HneVm2GmYdxyzC3dTsod6RjJRbsvIt+UT/LRyUy1Tu2p6V0iHO+hJEl89YeviP9C/j4xFBmofrWaCUdNYPhFXReLXSVQeywuL+bNqjcpyyvjb/3/FkAL/eOTmk+gFMbFjSO2JJYmmjjiD0cwfO7hX8Oy5DI+euEjKIW5c+eGwNqe0dX38BTpFN6vfp8XK15kv3E/+40HV6jp0HFl+pVckn4Jes3hb/UtlS3sbt4NGjj18lPRm7ovD6pyqnjv0ffQ1GoO+VoH429RidQcji7t6q233vLLiEsuucSv87rL3XffTXJycrfzeIxGI0ZjezVrMBiC8qUYrHXVRLTvMdr3B4feo63Vxsc1HwNwdebVxMR0P/Qx2DCYc9PO5d3Kd3m6/GmmJU3r0pdyoAjle7jr013s+WIPnhgPa+5ew2U1l7HzqZ38cOMP5B6dS9Kg4Hh8errHeWnzeLPqTVY1rsKGjVSDfzkegWJN8xoApumn+SqMco7K6dIe+4zvAxpoKW/BVefCkh4Zlb1deQ8vzbqUuWlz+aDyA9Y2rqXcVU6CPoHJ1smclXoWA01dH3XSuLcRgPh+8ZjiTX7ZnDpE/pw4ahx4W7wddspuSyD/Fru6Tpe+aS677LJuG6DRaEIievLz83nppZd48sknKS0t9T3ucDhwu90UFhYSHx9PcrLoBisQ9JT/1fyPFm8Lg02DmZEww+91/tjnjyyoWcBex14W1i5kXsq8AFqpDtwtbn645QcAtl+yHfvpduYOn0vzhmb2/7iftf9cy/EvqbPzcf/Y/oyxjGFT8ya+rv2aizMuDpstbq/bV/k2eNdgVntXo0/RY83p2jDMmLgYkgYlUZdfR9XGKixzIkP0dJU0Qxo3Zve8cEfppJw81P97ZYw1BnO6mZbKFur31Hd5Snso6VLJekFBQbf/27t3b7BtB6CkpASv18tNN91EXl6e77/Vq1eza9cu8vLyeOCBB0Jii0AQzXgkDx9XyV6e89PO71GvGaveyuV9Lgfkklq3FH0J4tve2UbjvkbIgm2XbWOSdRI6vY6jHjgKgO3vbcdpcx5mlfBxSrI8hPSLmi/C2rNnY/NGWrwtJOmTMGyQf83HDm0/HfxQKHOkqjZWBdy+aKFul9w/K2lIz7yP8f3lUG7Dvq6Fm0JNlzw9/foduhdCOBk1ahSfffZZu8fvvvtuGhsbeeqppxg4UD3TjAWCSGW5bTmlrlISdAmckHxCj9c7O/Vs3qp4ixJXCQtrFnJa6mkBsFI9bHp5EwBlF5XhMXl8peo5R+eQPDyZ2u21bHt3G+P/pM45hccnH88TJU+w17GXtU1rOcJ6RFjsWGZbBsCR8UdSub4SgNjB3RM96ePS2fXJLio3VAbcvmghEJ4ekDszl68plwW/ClHPmF8/SU1N5fTTT2/3X2pqKlarldNPP73LJewCgaBzPq3+FIDTU08nVtu9m05HmHQmLs24FJBL4KPJ21O5oZKKXyrQGrSsOV7OR5kcJ/cy0mg0jL1mLABbXu28ujTcWHVWn7fng8oPwmKDJEksrV8KwDGJx1C5ThYtxgHdqyxKHS3nmtRsVfcE8HBSt1P29PRU9Fj7ymHHiPb0dER5eTmvvvoq69atw2az4fUe3GFVo9Hw3Xff9dhAgUAQfipdlb7mcKennB6wdc9KO4s3K96kxFXCVzVfcXpq4NYOJ5tfkxsRJp2cREtSC9kx2WQaM33PD79gOD/c/AMV6ypoKm0iLisuXKYeknPTzuXj6o9ZZltGibOEbGNoS75323dT4irBqDEywTOBdXvWAd0XPcqNvHZXraqHYYYLj9vjGx2RNLSH4a2+6g5v+eXp2bRpEyNGjODBBx9kz549/PDDD1RVVZGfn8/SpUspLi4O+9yWpUuXHrJHj0Ag6DoLahfgxcs4yzj6xgau/4ZJG33eHkmSyP9vPgD1p9cD7bswm9PM9JkkN2YsXFQYSvO6RZ4pj6nWqUhIfFT1Ucivv9S2FICp8VNp2CzfROP7x6Oz6rq1TsKABLR6La0trTSWqDPsEk5sBTa8rV70Zj3W7K4liHeGInqiKrx1xx13EBcXx86dO1myZAmSJPHUU09RXFzMhx9+SF1dHY8++migbRUIBGFAkiQW1CwACEqV1VlpZ5GsT6bUVcqSuiUBXz/UVK6vpKmkCYPFwPpx6wGYFNd+9ET/E/oDUPhtYQit6z7npp8LyJV7do89pNf+rl6OFrQNbaWN63ovNgWdQeebC6WEcQQH8IW2hiT32AumDB1tKIoiT8/PP//MNddcQ9++fdFq5SWU8NYf/vAHLrzwQm699dbAWSkQCMLGtpZtFDmLMGqMHJd0XMDXN2lNnJsm31jfrng77F7inrL7i90AZB+XzXbPdqDjeVt5J+YBsqfH61HvANbp8dPJMebQ6Gnkq9qvQnbdvfa95Nvz0Wv0zEyYScWvFQCkT0j3az0lbKMk7AoOoLwmPa3cggM5Pc3lzbQ6W3u8XqDxS/R4vV4yMuT6+8TERHQ6HbW1Bz5Io0eP5tdffw2MhQKBIKx8W/ctADMTZ2LRBafHydlpZxOrjWWnfSdrG9cG5RqhYu+Xv7XrmCOPdOhn7Ed6TPsbdeaUTIwJRhy1Dt8NXY1oNVqfKH2n8h08kick11U+d9Os00jQJ1Cx7jfRM94/0ePL69khRM/v8ZWr9zCfB8CUYvJ1c27a39Tj9QKNX6InLy+PgoICeQGtlry8PJYsOeCWXrFiBYmJiQExUCAQhA+v5GVR3SIATkjqeZl6ZyTqEzktRS5Zf6vSvw7waqCptEm+OWug8KhCoPOp6lq9lpwZOQCUrijt8Bi1cHrK6SToEih2Fvs+D8FEkqQDn7vkE2h1tPpuzEolVnfxiR7h6WlHoMrVQS5iUnMys1+i5/jjj+fjjz/2/fu6667jlVde4bjjjuPYY4/lzTff5IILLgiYkQKBIDysb1pPlbsKq87KkfFHBvVaF6RfgBYtKxtWkt+SH9RrBYvipcUAZIzP4Fej7O3uKJ9HIXOKXNFVukrdosesM3NBuvyd/lr5a3il4IbjtrRsYZ9zH0aNkRkJM6jdIVddxSbHYsn0z9uYPEyIns4IpOgBdZet+yV67rrrLt5//33fKPe//OUvPPDAA9TU1GCz2bjnnnt48MEHA2qoQCAIPUoi6azEWcRouz9nqzvkGHOYnTgbgLcr3w7qtYLFvh/2AZA+M51ddnnK9UTrxE6Pz5wqi56y1WXBN66HnJt2Lhathb2Ovb7eOcHif9X/A+DYpGOx6CxUb6kGIHVUqt+dwJXQTeO+RtwtkV8lGCicNictFS1AYHJ6QN0VXH6JnqSkJCZOnOgb8KXRaLj77rtZv349v/zyC/Pnz/drEKFAIAg9jnoHG1/ayPpn1uPY6/A93rYx3KzEWSGxRZnx9G3dt1S5I29kQPEPsqen5Qj5JjIgdgAphpROj+8zWR6G2VDYQHNFc0hs9Ber3sp56ecB8Er5K0FLOLd77L7QlhLy9IkeP0NbAOZUM7FJclPN+t31PTMyilC8PJZMC8b47vU/6gyfp0eFFVx+iZ7nnnuOqqrI+0ISCAQHU5dfx7tT3mXxNYtZ9tdlFP+tmB3v7QBge8t2KtwVmLQmplinhMSeUZZRjLWMpVVq9c35ihQaihuo31OPRqshf7QcnptsnXzIc4zxRlJGyKIoErw956efj0lrYqd9J8sblgflGovrF9PsbSbHmMPEONlLVrVZvt+kjurZtPeEgQmA3JdGIBPIyi0FZRisGnsi+SV6brjhBrKzs5kzZw6vvvrqQZVbAoEgMnA1uvjo2I+o21WHNcdK9tHZ4IVvL/+Wgm8K+KFenhB+ZPyRGLWB+QXYFS5MvxCAT6o+we4NbV+YnuDL55mYwSrk7tWHyudRyJqaBUDZKvWLniR9En9I/QMAz5U+F/BKLkmSfCMvTk853RfKahve6gmJAxIBqN9b36N1oolAjZ9oS1y23GG8qSRKqrd27NjB3XffTVlZGX/84x/JzMxk7ty5vP322zQ0qM+dJRAI2rPi/hU0FjeSkJfARWsv4qzFZxF/bDxI8OOtP7K0dikQutCWwjGJx5Adk43NY2NhzcKQXrsnlCwvASDp6CSKnEXo0DE5/tCeHoA+R8idmct/KQ+qfYHisj6XYdVZybfns7A2sO/Pr02/stO+E6PGyBmpZwDgbHD6ckNSR/bQ0zPgN0/PXuHpUajdFdgkZohC0TNkyBDuvfdetmzZwubNm7ntttvYu3cvl156KRkZGZx++ul88EF4BtQJBILDU7O9hl+flKuLjn32WCx9LGi0GlIvT8WYaKR6SzXez7zo0DE9fnpIbdNpdL7ckfcq3wt6pVCgKF0pV2DVjJGHWo62jMaqO3xL//Rxct+Zqk2RkTKQoE/gij5XALK3p8XTErC136t8D4BTUk4hUZ8IHBgSGpcd58vJ8Rfh6WmPkt+UOCgxYGsqoywctQ7cdnUljfd4yvrIkSP5xz/+wY4dO1i/fj1/+ctf+OGHH7jooosCYZ9AIAgCvzz+C5JHYuC8gQw4aYDvcV2cjkm3yiGZEa+PYHLcZKz6ns3i8Yd5KfOwaC0UOgtZ2bAy5NfvLk6bk+qtcghmyzB55t+0+GldOjdlZApooKWiheZKdSczK5ybdi7ZMdlUuit5pfyVgKy5s2UnP9p+RIOG89PP9z0eqNAWCE/P75EkyTdoNJCix5ho9DUobC5V12e6x6JHYdOmTXz00Ud88sknNDY2YjSGLgdAIBB0HXutne3vyuMRJt/aPvwy+prReI1eEgoTmFbctRt3oInTxfkmrr9b+W5YbOgOZWvKQIL4vHhWGeV8nq6KnhhLjG8uVPXm6mCZGFCMWiN/y/0bAO9UvMNu++4er/lC2QsAHJ90PHmxeb7HgyJ6CmxI3sgedxII7DV2XA0uABLyEgK2rkaj8YW41JbM3CPRs23bNu677z6GDx/O+PHj+fe//82IESN45513qKhQb1t1gaA3s+W1LbTaW0kbm0b2Udntnm80NbL/6P0AJH8ZuDh/dzkv7Ty0aFnduDogN9VgooS2TJNNNHubSdAlMMw8rMvnp42Wh2hGiugBmJEwg5kJM/Hg4b7C+3BL/ocxNjZtZJltGVq0XJN5zUHP+Sq3elCurhCfG49Gp8Hj9NBUpr58k1CjhLasOVYMJkNA11YquNSW1+OX6PnHP/7B6NGjGT16NI888gj9+/fntddeo6Kigv/9739ccMEFxMXFBdpWgUDQQyRJYvNrmwEYf8P4Dhu9/dT4E4UnFQJQ9FFR2IZhZhmzfEnUSq6HWlFET+0YOSl0avxUdBpdl89PHSPf0CMlr0fhzr53Eq+LZ4d9B6+U+RfmapVaebT4UQBOTTmVfrH9Dno+kJ4erV7rmwIuQlz4QltKKX8gUWsys1+i54EHHiAjI4MXXniBsrIyvv76ay699FISEgL/wgkEgsBRvbWa2u216Iw6hp4ztMNjVjSuoOyoMjSJGprLmsM6F0oZffB17dfUutXZGkOSJMpXy5VX24ZtA7oe2lJQPD2KVyNSSDOkcUfuHQC8Wv4qK2wrur3G+5Xvs8u+i3hdPDdm3XjQc82Vzdir7KCBlOGdN3nsDiKZ+QC+fJ7fwquBxBfe2h8F4a2SkhKWLFnCH//4R1JSAvNBFAgEwWfnRzsByDsxr8Puq620srZpLV6Dl8zj5BEJhYsLQ2niQYy1jGWkeSQuycUn1Z+EzY5DYSuw4ahzoI3Rsj5nPSB7erpD2hhZ9NRsqQmbZ81fTkg+gbNSz0JC4q7Cu9hr39vlc7c1b+PZ0mcBuCn7JpIMBzfIq9kiV24lDkzEYA5M+EXJ61Fu+L0ZJbyVNChwjQkVlAquqPD0pKenB9oOgUAQZCRJYtdH8jyozrw8RcYi7F47KfoURp04Sn5scVHIbPw9Go3moGaFLq8rbLZ0RsWvcv6icYQRj8HDYNNg0gxp3VojYUACepOeVkdrRIZd/pbzN0ZbRtPgaeD63dez37n/sOdUuaq4reA23JKbYxKO4fSU09sdE8jQlkJCf1n0NBSKnnIivCUQCKKWmm011O6UQ1sDTx3Y4TG7TXLC8PSE6fSf0x+A8jXlOG3OUJnZjtlJs8kwZFDTWsM3dd+EzY7OUERP/fB6AL+m0Wt1Wt8YgEicAh6jjeHJgU8yIHYAle5KLt95OZubN3d6fIWrgmvyr6HMVUaOMYf5/eZ3mF8WqPETbVFyetQ4ATzU+Hr0BDO8FU3VWwKBIHLYu1AOO/Sd3ZcYa8cDgfNj5ZlR0+OnE983nqQhSUheyTc9PBwYNAbOTTsXkBOagzXo0l8q1smiZ8dAeWbZzISZfq2jdMSNRNEDkKhP5LlBzzHUNJTa1lqu3HklT+1/imr3gYo0t9fNlzVfct728yhyFpEZk8lzg57rtBdUzTY5vNXTTsxtUYZhqnECeChxNbpoqZQbSwZT9DSXNquqPYAQPQJBL6FgYQEAeSfldfj8Puc+ag216DV6jog/AoB+c+RKmqIl4QtxAZyRegax2ljy7fmsbVobVlvaIkmSz9NTPrScFH0Koy2j/VoredhvomdHZIoegLSYNF4Z8gpzEufgwcNblW8xd/NcLth+AVftuooTNp/A/KL5NHgaGG4ezkuDXyLb2L5tAsivbe3230YkDA9c6wTF09NY3Bhx+VOBREnkNqWYiE3sWafrjrD0sYAGvK1e7DXqmaEnRI9A9dTvqeeXJ35hyxtbcDWrL6cjEnA2OCn5SZ4N1ZnoWdkodz4eZx5HnE7+lZZ7TC4AZSvDOwwzXh/PvJR5ALxXoZ7y9YZ9DThqHUgGifpB9cxMnIlW49/XquLpUQZARipmnZlHBzzKEwOfYLRlNB487LTvZH3TemweG6mGVK7Pup7Xh75OljGr03Vaqlpw1DlAE9gJ4HGZcWh0GrytXprL1dUtOJQooa1g5PMA6Aw6TKkmAJrL1PM66wOxiM1mIy4uDp2u630pBIKusOKBFay470AZ7NJblnLmV2eSNa3zL0tBe/Z9tw9vq5fEQYmdVmr83PgzAEdaD+SkKBPAKzdW4m5xB6yCxh/OTzufj6s+ZnnDcoocRe36uYQDxcvTOLARb4yXYxKO8XutpKHy+1KzoyYQpoWdGQkzmJEwg2JnMQX2Alq8LWQbsxlmHoZBc/jPkeLxSuifENDGeVq9FmuOlYaiBhqKGnxVRr0NJYk5GJVbCpY+FuxVdprLm30ViuHGb0/PL7/8woknnojZbCYlJYUff/wRgOrqak477TSWLl0aKBsFvZQ1/1rjEzx9Z/clYUACjjoHn532GfUF9eE1LsIo/LYQ6NzL0+JpYX2zXG7dVvRYc6zEZccheQ6EccJF39i+HJ1wNCD3dlEDlesqAagaWoVFa2Gy9fBT1TtD8fTYq+yyhyNKyDXmMiNxBicmn8hoy+guCR44IHqUsF8gie/7W4irF+f1BNvTA7JXDVCVR80v0bNixQqmT59Ofn4+F110EV7vgbhoamoqNpuNF198MWBGCnoflRsqWX7HcgCOfvRozvnuHC7ddCnp49OxV9n5+pKvVZfQqmaUROR+x3XsHVnTuAa35CapNYl+xoOPyZwi9+spXRW+JoUKSvn6l7VfYmsNf2l3+a9yU8La4bUclXAUMdqOE8S7QkxcjC/5M1KTmQNJUEWP0pW5KPyfoXARzMaECuY+ZgBVjfzwS/T8/e9/Z/jw4Wzbto2HH3643fOzZs1i9erVPTZO0DuRJInvbvgOySsx9JyhTLl9CiAPZjz989PRx+op+anE7/4xkiTR7GnuNaKpsaSRul11aLQacmbkdHjMT7afABhkH9SudDhzqix6ylaHN68HYGLcRIaahuLwOvio6qOw2tI2ibluWB2zEmb1eM1oSGYOFDXb5TBfoDoxt0VUcIUmvKV4elrKW4J2je7il+hZu3Ytl19+OUajscPeCtnZ2ZSXl/fYOEHvZNenuyj5uQS9Wc8x/z7moOfi+8Yz9rqxAPx8789dFi6SJLG0fik37L6BozYcxYyNM5i1aRY377mZNY1roloAFf9QDED6+PQOqzQkSeLnBjmfZ7BjcLvnlbyeslXhFz0ajYZLMy4F5BBXsyd8bvOmkibsVXa8Oi9NQ5o4MqH7/Xl+T6SXrQeSUHh6Gop6Z6+eVmerr09RMD09lj4WIAo8PQaD4aCQ1u8pKSkRA0cFfiFJEmv/JZckT/7bZN+k3rYccfsR6E16ylaXUfJzyWHXtLXauK3gNv6696+sbFiJU5Ib7TV6GllmW8Z1+ddxW8FtqgiXBAMltJU7K7fD5/Pt+VS6KzFqjPR39G/3fMbEDDRaDU0lTTSVhv/L67ik4+hn7IfNY+PT6k/DZofi5bENsDEpbZKv4q0nJA5KBMSIBHeL2ydIgpnT01sbFDYUNoAEBosBc4Y5aNdRRE/E5/RMnTqVTz7peA5Oc3Mzr7/+OjNn+tegS9C7KV1ZSvmacnRGHeOuH9fhMZYMi2+Mwra3tx1yvbrWOq7Jv4bv679Hh45L0i/ho+Ef8ePYH3ln2Duck3YOeo2e7+u/56IdF1HsKA70lsJO8VJ5T31n9+3w+Z8a5NDWpLhJ6Dso6DSYDb4bT+XGyiBZ2XV0Gh2X97kcgLcr3sbuDU8PECWfp25YHScmnRiQNZVf3bY90SnAu0pdfh1IEJsc6yt7DiS93dPTNp+no2hNoLBkyqIn4sNb999/P7/88gsnn3wyX3/9NQAbN27klVdeYeLEiVRVVXHPPfcE1FBB7+DXJ38FYMRFI7CkWzo9buQlIwHY+eFOWh2tHR5j99q5Pv968u35pOhTeHPYm/w5588MNA0kThfHcPNwbs+9nTeGvkGOMYdSVylX7rqSQkdhwPcVLprKmrDttcn5PNMPnc9zlPWoTtdJG/vbFPAN6pgCfmLyiWTFZFHbWsvn1Z+HxYY9a/cA0DiikWMSjwnImoroqdtdF9Uh18PRNrQVjJuyNVf2ILsaXLgae1/vr2COn2hL1IS3pkyZwsKFC9m9ezeXXHIJAH/961+5+uqr8Xg8LFy4kDFjxgTUUEH0Y6+xs/tzefbT+BvHH/LY3GNyseZYcdqc7Fmwp93zkiTx6L5H2WnfSbI+mReHvMhw8/AO1xpuHs5rQ15jsGkwNa013LT7Jmrc0dErRUk+ThmZ0uHoibrWOjY1bwLk0ROdkT5OHjKsBk8PyKMpFG/PWxVv4fSGfjaY8lrkTcjDrAtMiECZAO5qcKmqi22oCWYSM8iVcjHx8t+D2mZDhQKfp+e3cGqwUDw9rgYX7hZ3UK/VVfzu0zN79mx27tzJunXr+PDDD3n//fdZs2YNu3btEqEtgV/s/GgnXreX9HHppI9NP+SxGq2G4RfKImbXx7vaPf913dcsqF2AFi2P5D1CXmzH/WkUUgwpPDfoObJjsilxlfC3vX/DLanjj7QnKKJHKTv/PStsK5CQGGoaSrqh89dcbZ4egFOSTyHDkEGluzLkuT0N1Q1oymQPxKxpPa/aUjCYDL6y9d4c4gpmErOCki/YtF89XohQUbdb7vodbE9PjDUGvUkOmaslr6fHYyjGjRvHH/7wB84991wmTZoU1PigILrZ+tZWAEZcPKJLxw+cJ08KL1xUiLf1QGJ9fWs9/97/bwCuzryaSdZJXVov2ZDMM4OeIU4Xx6bmTTxT8kx3zFclSsWVUoH1e5bb5F5I0xM69/LAAU9P7a5a1YwCidHG8MfMPwLwStkrNHpC94t9yYolANiz7Rydc3RA11ZuRL05mTkUoketU8BDgSKogy16NBqN6pKZuzSGYtmyZX4tPmPGDL/OE/Q+6vfUU7aqTPbgXNBxGOr3ZE7JJDY5Fketg7LVZWQfJQ8ufLrkaepb6xkYO5DL+lzWLTv6xfZjfr/5/G3v33i38l2mxU9jWvy07m5HFXg9Xsp/kZNt+0zp0+55t+RmRYPc8VrpdNwZlgwLlj4Wmsubqd5c3amICjWnppzKOxXvUOgs5O2Kt/lT1p9Cct01a9eQSipxo+LQawIyzcdH4sBE9i/b32tFj+SVfPPHhKcn8Hg9XmwFv4meIIe3QA5x2Qpsqpm/1aW/1mOOOaZbHhxJktBoNHg8Hr8NE/Qudn0qh6j6zu7r+2VwOLQ6Lf2P78+OD3awd+Feso/KJt+ezxc1XwDw975/73LL+7bMSpzFuWnn8mHVhzy07yE+Gv4RBsI3c8pfarbV4G5yY4gzdJgbsaFpA83eZpL0SYwwjzjIW9YRaWPTaC5vpnJDpWpEj16j54bsG/jb3r/xTsU7/CH1D6TFBHfGT7GjGNtmG6mkMnxC1wR6d2ibzNwbsRXZaHW0oovRkZAXxBEJvdTT07i/EY/Lg9ag9SV0B5OI9PT88MMPwbZD0MvJ/28+AIPPbN8c71Dkzc1jxwc7KPi6gKMfOprnSp9DQuLYxGMZFzfOb3tuyLqB5bbllLpK+U/pf7i5z81+rxUulNBWn8l90OraR7J9VVvxR6HT6PByGNEzJo3Cbwup2aquJO9jEo5hjGUMm5o38VL5S9zV966gXu/9qvdJzE8EYMDEAQFfv7eXrSuhraQhSR1+bgOFInp6m6dH+Vwl9E8I6uuroCQzR5ToEYnJgmDSWNIoJ9xqYNDpg7p1bv8T+gNQub6SX4t+ZZltGTp0XJ91fY9sMuvM3NX3Lq7ffT0fVn3ILGvgklVDhZLEfLh8nsOFthRSRsjeoppt6hI9Go2Gm7Jv4qpdV/F59eeclXoWw8zDgnKtxtZGvqj8glP2nAIQlMnRvb1BYSjyeaBNeKukd4meUFVuKfg8PSoJb/VY5lVWVrJmzRrWrFlDZaU6ylkFkYVSpp41Ncs3q6WrWNItvpvxp1/LFTwnp5xMv9iOB2t2h6nxUzkt5TQAHtr/EK103A9IrRyqcmufYx9FziJ06JgaP7VL6ymvc/XW6sAZGSDGx41nTuIcvHh5eN/DeKTghNb/W/1f9EV69E49erPeV2IeSJSQTnN5M2575FcQdpdQiZ7eGt4KVeWWgtrCW36Lnu+++45JkyaRmZnJtGnTmDZtGpmZmUyaNIklS5YE0kZBlLPnS7nPzqAzuuflUcg5Wm66V/lTJRoOzGYKBDdn30yKPoViVzGrrZEzRNfV6PKJk45Ej9KFeaJ1YpfHJyh5QS0VLarsIfPX3L9i0VrY2rKVT6o67hjfE+weO29Xvk3irkQAUkelBiU8EJsciyFOziHrjQMxa7eH1tPTUtGCx9V78k9D7unJjAJPz2effcYJJ5xAWVkZt912G6+88gqvvPIKt956K2VlZZx00kl89tlngbZVEIW4ml2+MQkDTxno1xrK5PC09WnMTpxN/9j+gTIPq97Kjdk3ArA8fnnENC0sX1sOktxuv6PE8O6GtkDuuaFMp1aax6mJNEOa7716tvRZKl2B9Tx/XP0xda119N0rj/M4XC8pf9FoNL16TELNjuA2JlQwpZrQxegAdXUMDjah6sasEBWenrvvvptRo0axY8cOHnnkES6//HIuv/xyHn30UbZv387w4cO5++67A22rIAop/qEYj9NDfP94v3/ZxU+TbxBJO5M4z3ReIM0D4OTkkxlhGoFL6+K5iucCvn4wOFRoq8nTxK+N8riPQ3Vh7gi15vUonJV6FqPMo2j2NvNY8WMBG+XQ6GnkzYo3ARi+T67YSh2TGpC1OyKhvxzishX2rmRme40de5XsRUwakhTUa2k0ml6XzCxJ0kFzt0KBkrLQXNGM5A3/aBW/RM/evXu5/PLLsVrbl7vFx8dz5ZVXUlBQ0GPjBNHP3oV7Acg7Kc/vxpZLzUtpympC69GStCnwX5RajZZbsm4B4Ku6r9javDXg1wg0hxI9qxtW48FDP2M/+sZ2PIS0M1JHyjd6tVVwKWg1Wu7uezc6dCy1LeV/Nf8LyLovl71MfWs9/Yz90G6XvzaDkcSs0Fs9PbU75dCWNddKTFz7sSmBprfl9bRUteBucoOGoLYDaIspzQQakDwSLdXhHzzql+gZNmzYIZOWKyoqGDJkiN9GCXoHkiRRsFAWxwPm+lf6K0kSn1Z/StU4eTyCUqYdaEaZRzGmWZ4n93/7/0/VwyAlSaJ0VSkAmVPbi55lNrnZ6OG6MHeE2j09AIPNg33Ve//a/y/22NvPZusOe+x7+KDyAwButt7sy7MJqujp/5voKexloidEScwKvc3To4S2rDlW9LGBbarZGTqDDlOqCVDHtHW/RM8///lPXnjhBf73v/a/oj777DNefPFF/u///q/HxnWVtWvXcsMNNzBy5EgsFgt9+/blnHPOYdeu9jOZBOqhfnc9DUUN6GJ05M7K9WuNtY1rKXIW0ThavhGVrQmO6AE4tv5YYjWxbGrexHf13wXtOj2lYV8DLRUtaPVa0scfnHfilty+fJ4ZCd3vmB4Jogfg4oyLmWKdgsPr4Ja9t2Br9S9M5Pa6ubfwXjx4mJkwk7wieYabta+V2MTYQJp8EL01vBXsQaO/R0lm7i2enlAnMSsoIS415E51SerNmzev3WNpaWmceeaZZGVlMWiQXHWze/duSktLGTJkCM888wzHHXdcYK3thMcee4yff/6ZP/zhD4wZM4by8nL+85//MGHCBFatWsWoUaNCYoege+z7fh8geyNiLP65sj+plqt0RkyT53WVry33dQQPNFavlQvTLuTVyld5uuRpZibMxKBVX6dmJbSVNjYNg+lg+9Y1rsPmsZGkT2J83KEn2XeEkmfRVNqEq9nl9/sWbLQaLQ/lPcQlOy5hv3M/f9v7Nx7v93i313m+7Hl22HeQoEvgztw7Kf5aTroPppcHenF4S3h6gkqok5gVLH0sVG2qUkUyc5c8PZs2bWLz5s0H/ed0Ounbty96vZ7CwkIKCwvR6/X07dsXh8PB5s2bg227j1tuuYWioiKefvpprrrqKu6++26WL19Oa2srjz76aMjsEHSPfT/Ioqfv7O7llShUuapYWr8UgNNnnI7WoMVeZQ/qjeLC1AtJ0adQ4irh4+qPg3adnqCE+DoKbSkeqlmJs9BpdN1e25RswpQiu6qVL1C1kqRP4vEBj2PRWljXtI47i+7Erel635sFNQt8yct39b2LtJg0qjfJbQCCVbmloIS3mkqbelU5dahFT29rUBguT4+ayta75OkpLCwMshk948gjj2z32ODBgxk5ciTbt28Pg0Udo+Y8kFAjSZKvVN3f0NaXtV/iwcNYy1iGJQ1j7Zi1VPxaQfnacl94INCYdWauy7qOB/c9yMtlL3NK8inE6+ODci1/6SyJ2SN5+KFeHikzO3G23+snDUnCvtJO3a66oN/8e8pg82CeGvQU1+dfz8qmlRSnFTOjdQbphkPbvah2Ef/Y9w8ALs24lGOTjgWgapOcOxbMyi0Ac5oZvUlPq72VxuLGkP8yDwetzlZse+VwXvLQ0Hp6ek14K4yeHlBH2XpoMpnCgCRJVFRUMHLkyE6PcTqdOJ1O378bGmQPgdvtxu0OXCfU5X9fTuF7hVR8VkGf8e2nXUcDyuvV1detZlsNLRUt6GJ1pE5I7fbrLUkSC2sWAnBK4im43W7SJ6VT8WsFJatKGHB6YGcitd3fSfEn8b7xffY49/By6cvclHlTQK/VEzwuDxXrKgBIm5B20Ou6vnk9ta21xOviGRc7rt1r3tX3MGFgAqUrS6neXs0Ad+BnTwWaUcZRPJn3JLcW3sp+434u3HUht2ffzoz4Ge3CoE6vkzcq3+D1qtcBOD7heK5Juwa3243X46Vqsyx6koYnBfQ7oiOsfa3U7ayjZncNlr5dG8Lb3b9DNVG9rRrJKxETH0NMakynewjkHmMz5LysppImXE4XGm3gw+LdJZjvoeLpsfazhvQzEpv22+tc2nTQ/TWQNnR1rR6LnsbGRmw2G15v+2GFffv6F7YIBO+++y4lJSU88MADnR7zyCOPcP/997d7fNGiRZjN5oDZUrK0BHepmyX/WULyWaH5BRMuFi9e3KXj6r+qByBmSAyLvlvU7etUGCoo6FOATtLhXu1mobSQhhhZtG7/djuNM4Lzy03Z35TYKexJ28OHVR+SsimFJE9we4p0FcduBx6HB22clhX5K9DsPvAl/k3iN2CFvIY8Fn3d+Wt+uPewVpJDEFuWbqF6rPpGUnTGxfqL+ST1E6qo4vZ9t5PpymRky0jS3elISJTGlLLBsgGbXvY2HNF4BEcUH8E3W74BwFXqorWlFU2MhpX5K9HsDe4N0mmRf5D9vOBntji2dOvcrv4dqonGFfLfrLaPlq+//vqwxwdij1KrBBrwur18+cGX6BPV4wcI9HvoafZgr5Z7IK3esxptafCHjSo0lsvvbfG2YhYuXOh7PJB7bGnpWmWY3+/w888/z+OPP87evXs7PcbjCU8seseOHVx//fVMmzaNSy/tfCTBnXfeyS233OL7d0NDA7m5uRx//PHExwcuZLG+YD3LfllGzN4Y5s6dG7B11YTb7Wbx4sXMmTMHg+Hwyb0L3lhAFVWMO3scR8w9otvXe678OaiSy67PHHMmANW51bz7zLt49ns46aSTAprM/Pv9SZLE7sLdrGlaw/bh23mw74MBu1ZP2Pj8RoopJveoXE4++WTf417Jy/M7nodWuHjExR02Jezqe7ireRdfv/s1cfa4iPo8u91uEpckUjy2mE9qP6EspoyymPbVfin6FG7JuoXZ8bMP+gzl/zefIopIG53Gyaee3O68QPP9wu/ZvG4z/eP7M23utC6d092/QzWxev1qyiknb0oex889vtPjAr3Hl9NfpqWihanDp7ardgwHwXoPK9dXspe9mNJNnHLWKQFbtyvsj9vPp//3KUankblz5wZlj0qk5nD4JXpeeOEFrr/+ek444QSuuOIK7rrrLm6++WZiY2N54403yMjI4KabwuPyLy8v5+STTyYhIYFPPvkEna7zZE2j0YjRaGz3uMFgCOiHbeDJA1l28zLKV5XjafIQmxS8Utdw05XXTvJKlCwrASDvuLxuv9aSJLHYJv9COCnlJN/56aPT0Rq0uGwu7KX2oOT1tN3fzTk3c8GOC1hiW8JFrosYbRkd8Ot1l8pf5P5ZWVOzDnpdNzdvpqq1CovWwlFJRx2y6uxw72HaCLlyqX53fcTdWA2SgRuybuDSrEtZVLeIlQ0rqXRXIiHR19iXmQkzmZ00G5PW1O7c2q2yhyt9bHpI9p2YlwhAU3FTt68X6O+wUGDLlz1sqSNTu2R7oPZozbHSUtGCo8Khqtcs0O9hY6HsbUkamBTyfcZny06ElsqWg64dyD12dR2//FvPPPMMJ5xwAl9//TVXX301ACeffDIPPfQQ27Zto7GxkZqa0PfxsNlsnHTSSdTX1/PNN9+QlZUVchs6Ir5/PDG5MUgeicJFheE2J+xUbarCUevAYDGQMSmj2+dvat5EmasMs9Z80OwonUHn6yNTtbEqYPZ2xhDzEE5Jln8xPbH/CVUkqvsqt36XxLy4ThaJRyccTYy2Z2XmSuWHvdqOo87Ro7XCRbIhmfPSz+OpQU/x/vD3+WD4B/xzwD85OeXkDgUP4KvcCna5uoIi2ntL2bpSuZUyLDQ9ehR8ycz7ozuZ2bZHFpWhrtwCsGTIOWmuBhetjtaQX78tfomePXv2cOqppwIH1JXL5QIgISGBq666iueeC+2MIofDwamnnsquXbtYsGABI0aMCOn1D4d5opwjtPerzsOBvQWlVD376Gx0hu6XTX9TJ+dYzEqcRaz2YK9Z2lj5hlS5MbDDJjvjuqzrMGqMbGzeyA+2H0Jyzc6w19qpy68DIPOIA6KnVWrlm1r5NTs+qfOwQVeJiYshLku+USjX6w0olVvKZyzYKL16ekODQskr+QaNhqpcXaG3lK3X7Zb/VsNRCWhMNPqGuzZXhLeCyy/Rk5CQQGurrNbi4+Mxm80UFxf7nrdarZSXlwfGwi7g8Xg499xzWblyJR9//DHTpnUt/h1KLBNkpVu0uEgVHoFwUvyD/Fnxpz9Pq9Tq81qcmHRiu+eVEmrlBhVsMmIyuCjjIgCeLnkatzd8VTPla+S/uaTBSb5eOgCrGlZR01pDkj6JIxPat3fwB6VJYd2u3iF6nA1ObAWy+EgbHSLRo/TqKWnC29q+UCSaaNzfSGtLK1qDloQBoZkJpdBbytbD1aMH5OGu5gz5h39LRXhHUfglekaNGsXGjRt9/546dSrPP/88JSUlFBcX8+KLL4Z09tZf//pXvvjiC0466SRqa2t55513DvpPDcQOi0Vn1NFc3uwbqtcb8bZ6Kf7xN9Ezq/uiZ03jGupa60jUJzI5fnK755Vf4aEIbylcmnEpyfpkip3FfFr9aciu+3s668+zoGYBIItEgyYw8fOkwbLoqd3VOz7L1Zvl0JY1x3qQoAwmlgwLOqMOySNFfehFCW0lDkr0y/vbE3pLV+Zw9ehRUIvo8SuR+aKLLuKFF17A6XRiNBq5//77Oe6443wl6gaDgU8/Dd2X/4YNGwD48ssv+fLLLzu0N9xoY7RkTstk/9L97Pt+X8jj1mqhYn0FrgYXxgSjX5US39Z+C8CcxDkd3sCVfIv6PfW4mlwhmdRs0Vm4NvNaHi5+mJfKXuLk5JOx6q1Bv+7v6WjIaENrAz/afgTglJTAVWwonp76/PqAralmlHBpsJsStkWj1RDfN566/DpshbagNdxUA76ZW2H4XuwN87fcdrcvfBcOTw8cyOuJyPDW5ZdfzurVq32VT0cddRRbt27l8ccf56mnnmLTpk0HlcsGm6VLlyJJUqf/qYXcY+TOw0p4pzei7D1nZg5aXfc+fg6vw9dR+ITkEzo8xpxmllueSwd+nYeC01JPIy82D5vHxusVr4fsugqSJPnCW209PYvrFuOSXAyKHcRQ09CAXU/x9PSW8JbiOQx1B2rftPUoT2b2jZ8YHvo+Zkp+WnNp+LsFBwul07UxwRgyT+XvUYunJ2DdiQYMGMCf//xnbrjhhpCGtiKJnGNyAPnGL3nVI8ZCiW/elh+hrZ9tP9PsbaZPTB/GWsZ2epwvxBWivB4AvUbPn7P/DMD7le9T6iwN2bVBdl07ah3ojLqDqou+qv0KgJNTTg5o3yJfTk9+nap+WASLUCcxK/gquAp7iegJcRIzHBA9TpsTV7Mr5NcPBb58noGJQRnG3BUi2tMj8I+MSRkYLAbsNXaqt0ZOJ9tA4XF7KFku9+fxZ97Wt3VyaOuEpBPQajr/6Cq/xkNVwaUwPX46k62TcUkuni19NqTXVkJbGRMzfFUS+xz72Ni8ES1aTko+KaDXSxiQgEarwdXoCvsvt2AjeSWf1zBU5eoKvWXaui+8NTz04a2Y+Bj0ZjnTQw0DMYOBL58nTKEtiDBPj1arRa/X+8rStVotOp3ukP/p9epp560WdDE6X75F6YrQegLUQPnactzNbkwppm5XwDR6GlluWw50XLXVFuXGFMpkZpArFBRvzzd137C1eWvIrt1REvPnNZ8DMC1+GmmGwN6s9Ua974Yc7WXr9XvqcTe70cfqfWG9UKGEt6K5bN1R5/DdCEM1aLQtGo3G5+1pKo3OZOa2np5woRZPT5eUyb333otGo/EJGeXfgu6TNS2Lfd/to2RFCWOv6TxEE4348nmOyen2YL+l9UtxSS7yYvMYbBp8yGPbhrckrxTSIYLDzcM5Oflkvqr9in/v/zevDHnlkF6pQPF70ePwOvi8+nMAzko9KyjXTBqchK3ARt2uOnKOzgnKNdSAEtpKGZmCVh9a57giLBv3RW+SrVLNGpcdR4w1+IUHHRGXFUf97vqozetRlaenPAKqt+bPn3/Ifwu6TtaRcpfospXtZ/5EOz3J51Ga652YdOJhBXfy0GR0Rh3uJje2AlvIf91cn3U939d/z8bmjXxe8zlnpp4Z1Ou57W6qNsg3ZkX0LKpbhM1jIzMmk+kJ7edsBYKkIUkULiqMek+PEiYNdT4PQHzf30RPcWPIBXyoCGdoS0F4eoKPInrC7enp9s+WlpYWJk6cyAsvvBAMe6KerKmy6KnLr6OlKrpzIdrS6myl9Gc5pNfdfJ4adw1rG9cCcj7P4dDqtaSOlEuLQ5nMrJARk8F1mdcBcsPCGndwR7JUrq/E2+rFnGEmvl88kiTxQeUHAJydejY6TXD6nvSWBoXhqtwC+Was0WrwuDy0VEbn90U4k5gVLFly6CUaRY/H7fGFR8Pp6VHCW856J63O8I2i6LboMZvNFBQUiPCWn8QmxfrKMpU5Sb2BslVltDpaMWeYu/2LbkndEjx4GGkeSW5s1wRTqMdR/J5z089lqGkojZ5Gntj/RFCv1Ta0pdFoWN24mp32ncRqYzk99fSgXVf5AlXa20crinAOZY8eBa1e62ueF63JzGoQPdHs6Wnc14jkkdDH6onLjAubHbFJsWgNsuSwV9rDZodfAeoTTzyRb7/9NtC29Bqyj8wGoGRFSZgtCR1tQ1vdFcxK1daJyYdOYG6LksysDIkMNXqNnrv63oUGDV/Xfc2qhlVBu9bvh4y+WfEmAKennE6iPjFo11Vc5ba9tqgtW3fanL5y8XB4euBAiKthX5SKnu2/DRoV4a2goPwoSRiYENbwqEarwZz+W15PGL2Wfomee+65h127dnHxxRfz008/UVJSQm1tbbv/BB2TNU0OcfWmCi4libm7oa1SZykbmzeiQcOcpDldPi91lPyrPJytAUZaRnJO2jkAPFD0AI2twUlGVTw9WVOz2Ny8mTWNa9Ch46L04HYiT+gvf4m6m91hL0MNFoqXx5prJTYp9jBHBwdrX7ljcDSKnlZnK/V764Ewe3qyo7dBoRryeRSUEFc4vy/8qisfOXIkANu2beO9997r9DiPx+OfVVGOksxcvrYcj9sT8lkzocbd4qZ0pSzwupvErHh5JlkndavsOmWk/Kuxfnc9rY5W9LHhaaFwQ9YNrGhYQbGzmEeLH+WhvIcCun5zebMc9tDIfaAeLX0UkJsRZhozD3N2z9DF6LD2tdJQ2EDd7josfSxBvV448DUlDHF/nrb4kpmjsIKrfnc9kkcixhojd1IPE209PZIkRVX6hhoqtxSm3DWFVnsraWPT2L5xe1hs8OtOIErWe0by0GSMiUac9U6qNlbRZ1KfcJsUVEpXlOJ1e4nLjuv2H17bqq3uYOljITYpFkedg9qdtWELTZh1Zv7R/x9cufNKvqn7hukJ0wPaKFDx8qSMSGGTZhOrG1ej1+i5us/VAbvGoUgcmEhDYQP1e+rJmR59ZetKEnM4KrcUfA0Ko9DT03b8RDjvKYrgcje7cTW6MMYbw2ZLoFGTp2fImfK0BrfbDRsPc3CQ8Ev0iJL1nqHRasialkXB1wWUriz1iZ4qVxWf13zOD/U/sN+5n1aplYyYDI6wHsEpKacw2jI6zJb7hy+fZ3b38nl223ez27EbvUbP7MTZ3bqmRqMhZWQKJT+VULO1JmyiB2C0ZTRX9rmSl8pf4tHiRxlnGRcwL0zbJGYlYfqs1LOC7uVRSByYyL7v9vm+WKONcI2faIsS3opGT48akpgBYiwxGBOMOG1Omkqbokv0/ObpSRoU2saaakWMoQgTSoirdEUpXsnL2xVvc8a2M3ih7AV22nfS7G3GKTnZ59zHJ9WfcNnOy7hx940UOyNvWKm/+TzKRPWj4o8iXh/f7esqZetqGPlxReYVjDSPpMnTxC17b6HFE5iYtiJ6SkeWstO+E6vOylV9rgrI2l3Bl8y8J/o6Bns9Xqo2qye8FY3VW2ro0aOglK1HU16P5JV8w0bV4OlRAz1KdPj5559Zt24dNpsNr9d70HMajYZ77rmnR8ZFM0oyc8nKEm4vuJ3v678HYKR5JGennc0Yyxj0Gj0FjgKW1C3h69qvWdGwgvO3n8+duXdyckroptj3BFeji7I18o25O/k8kiQdqNrqZmhLQcnrqdka3D45XcGgMfBY3mNcsvMSdtl3cV/RfTyW91iPujV7PV7K18qT1T/L/QyAP2X9iWRD6H41R3PZev2eelpbWtGbQj9+oi2K6LHX2HE1u4ixhKdrcTBQi6cH5Lye2u21UVXB1VTaRKujFa1e6wuT9nb8Ej21tbWcfPLJrFmzxpf0pZSsKv9fiJ5Dk3lEJhqthsaiRlbkr8CQbuC23Ns4PeX0g26EOcYcjk44mqv6XMU/9v2DX5t+5d6ieylwFHB91vWqz63a/9N+JI9EfP9438TorrCpeRMlrhLMWjNHJx7t17XV5OkByDRm8n8D/o9r8q/h+/rvebHsRa7Lus7v9Wq21+BqdOE1eynrX8YI84igjZzojGj29Pj684xKRasLn1PcmGAkJj4GV4OLxuJGUoaF3ysSCCRJUp3ogegqW1e6pcf3jw/5CBW14tercOutt7Jp0ybee+899u7dK/8q//Zbdu3axbXXXsu4ceMoLe095dj+oI/T4x7kBiBrexbPD36eM1PP7PSXf25sLi8MfoEr+1wJwOsVr/NY8WN4JW+Hx6sFJbTVd3b3qrYW1i4EYFbiLExak1/X9lVw7anHbXf7tUagGRs3lr/3/TsAr5S/wsdVH/u9Vvlq2ctTNbyKGH0MD/R/IGjdlzsjcUAiIHshHPWOkF472PiSmMMY2lKIxgquxv2NuJvdaPVaVYReolL0/NYtXemeLvBT9CxcuJBrrrmGc889F6tVTrLTarUMGjSIZ599lv79+/OXv/wlkHZGHe9UvkPR8CIAzi45m/Fx4w97jlaj5U9Zf+LuvnejQcPH1R/zZMmTQba0Z+z7vvvzttxeN4vrFgMwN3mu39c2p5sxpZhAOuBGVwPzUuZxRcYVADxa/CgfVX3k1zrrlq8DoGZUDbfk3EJebF7AbOwqMdYY30ydaEtmVkPllkI0VnDVbJPDzomDElXRtiMqRc9vnp7kIeH3pKkFv0RPfX29r1dPXNxvH5SmAx+U448/XnRsPgSbmzfzn5L/UDNS/qPXbuze23BG6hnc2+9eAN6tfNc3Z0ltOOodVK6Xx0B0J4l5RcMKbB4bKfoUJlsn+319pYIL1JHX05Y/Zf2JC9MvBOCx4sd4puQZPFLX+1r9ZPuJ3T/vBmDIkUNCHtZqi/IrPepEjwoqtxR8DQqjKJlZ+ZtU/kbDTTQmMgtPT3v8Ej1ZWVmUl8uudaPRSHp6Ohs3Hii6LykpUX2uSbhwep3ML5yPBw9Dpw0F5CaFkrd7bfznpczj+qzrAfj3/n/zY/2PAbe1p+xfth/JK5E0JAlrtrXL5ymhrROTT+xxuEb5QlVLXo+CRqPh5uybuSbzGgDeqHiD6/Kvo9R56LCwJEl8VPURt2+6nfg98q//a0+5Nqx/b9GY1+Ood/gERtro8IueaBxFofxNKrl34SYqPT1C9LTDL9EzY8YMFi9e7Pv3ueeeyz//+U8eeugh/vGPf/Dkk08ya9asgBkZTbxW/hqFzkJS9Cn89di/oo/V46x3+lX9cnnG5ZyRcgZevNxZcCfbW8LT4bIz/AltNXoaWWZbBvQstKWgfKGqzdMDsvC5OvNqHuz/ICatiV+bfuXsbWfz5P4nKXEePJdNkiQ2NG3gT7v/xGPFj5GwJQGNpMHaz0piVmJ4NvAbiuiJpgou3/iJvuEbP9GWaMzpUcJbavH0/L4rc6TjbfX6vK9C9BzAr+qtW265hcWLF+N0OjEajcyfP5+tW7f6qrVmzJjBM888E1BDo4ESV4lvGOStubeSaEokfUI6pStKKV9T3u24q0aj4Y6+d1DhrmBFwwpu33s77wx7x6+eNsFg33e/iZ5juy56vq/7HpfkYkDsAIaahvbYBrV6etpyUvJJjDCP4JHiR1jbuJa3K9/m7cq36WfsR44xB4/kYa9jL5VuOVRo0Bg4reg0Gmkke1p2mK0/ULYeTeEtJSybMT4jzJbIRNv8LUmSfKJHLZ4epSuzx+nBUefAlOxfAYVasBXa8LZ60cfqu+Vpj3b88vSMHj2aW265BaNR7lqZlJTEkiVLqK2txWazsXTpUjIzQ9MRNpL4T9l/cEtuplincFzicYBcug74etl0F71Gz0P9HyI7JpsSVwn3Fd2nioqu5vJmqrfIQqM7+Txf1X4FyEIgECEb5QvVVmDD1ezq8XrBol9sP54f9DxPDnySKdYpaNBQ5Czi54afWdW4ikp3JSatiXkp8/jviP+Suknel9LkMpxEY3irYl0FAOkTwtfJuy0+T09xY7dD4WqkcX8jrgYXWr02rD2Q2qI36uXCB6IjxKUkMScOTgzrdHW14ZenZ9u2bYwYMaLd44mJiT21J2rZF7OPHxp+QIuWW3Ju8d3Q+xwhj6AoX1Pu99rx+ngeG/AYV+y8gmW2ZbxV8RaX9bksEGb7jRLaSh+XjjnV3LVzHPv4telXtGgDEtoCMKeZMaWZsFfZqd1eq+o5ZxqNhqMTjubohKNpaG1gc/NmalrlX8NZMVmMtozGqDUieSXKVv02WX2aekRP4/5G3HY3BpMhvAYFgMp16vL0xGXFodFp8Lq9NFc0E5cZF26TeoQSbk4akoQuJvyVWwqWLAv2GjtNJU2kjQp/LldPEPk8HeOXp2fUqFGMGTOGhx9+mN27dwfapqhDkiR+SPgBgNNSTmOQaZDvOcXTU7m+Eo/L/6n0w83DuTX3VgCeLX2W9U3re2Bxzyn6Ti7H73tc10Nbn9d8DsC0+Gn0iQmcOFFbk8KuEK+P56iEo5iXMo95KfOYZJ2EUSt7Vmt31eKoc6A36VVRWWRKNRETL3cJthVEvrfHbXf7xiOoxdOj1WuJy5aFTjRUcCl/i2rJ51GIpmRmRfSIcvWD8Uv0PP/886SlpXHvvfcydOhQJk6cyL/+9S+KiooCbV9U8EvzLxTFFmHQGLgq8+C5SAkDEohNjsXj8viSJ/3ljJQzODn5ZLx4uavgLmyt4bkBSZJE0RL5s9Dv2H5dOsftdfNlzZeAXJIfSNRatu4vpSvlCq8+k/qoor+JRqOJqhBX9eZqJI+EOd3suwmqgWiq4FL+FtWSz6OgvN/RULYuPD0d45foueaaa/juu+8oKSnhqaeewmKxcMcddzBgwACmTZvGU089JToy/4YkSbxY/iIAZySf0c6DodFofCEuf/N62q51R+4d9DX2pcJdwQNFD4SlCqF+Tz2N+xrRGrRkH921RNtltmXUttaSok9hesL0gNoTiZ6eQ1G28rfJ6tPUkzcXTb162ubzqKn1RjRVcAlPT/Cp3SU3ZFVLzpRa6NEwjoyMDG644QaWLVvGvn37+Pe//41Go+Gvf/0r/fp17Rd+tCMhMS95HqnuVC5Nu7TDY5QQV0/yehTMOjMP5z2MXqNnqW0pH1f7P+bAX5SqraxpWV0ejvhZjTwwc17KPAyawOaERJunp2SFXM6uhnwehWgqW1dbPo9CtFRwqbFyS0FpUBjposdtd9NYLItj4ek5mIBNIMvMzGTkyJEMHz4cs9ncbup6b0Wr0TIveR7XlV9HiqHjXzWB8vQoDDcP58/Zfwbgif1PsKtlV0DW7Sq+fJ4ulqqXOktZ1bAKgNNSTwu4PcoXa0NRA64m9VZwdQWnzem7YahK9ERR2braKrcUosXT07CvAXeTG61B6/vcqAWltDvSRU/9nnqQwJhoxJQa2aX3gaZHokeSJH744QeuvfZaMjMzOfHEE/nf//7Heeedx6JFiwJlY1SgoXM3eZ/Jsuip3VGL0+YMyPXOTzuf6fHTcUku7iy4E7vHHpB1D4fklSj+Xh4y2u+4rnn7Pq/5HAmJKdYp5Bq7Xt7eVUwpJt98KEUwRCplq8tAgoS8BCwZlnCb4yNacno8bg/Vm+XQS8YEdXl6omX+luJxTR6arIqctLZEyyiKtvk8agrRqgG/RM/y5cu58cYbycrK4rjjjuPDDz9k7ty5fPXVV5SXl/PSSy9x7LHHBtrWqMWSbiG+fzxIUPFrRUDW1Gg0zO83n1RDKoXOQv61/18BWfdwVG2swl5jxxBn8Im5Q2H32vmk6hMAzkw9M2h2RUtej5LErCYvD7QRPb81RItUarbV4HF5MCYYSchLCLc5BxEt87fUms8DbXJ6ypoiuh+SqNzqHL9Ez8yZM3nzzTeZPXs2n332GRUVFbz++uuceOKJ6PV+tf7p9fS0SWFHJBmSeLD/g2jQ8L+a/7GoNvjet31L5Hye3GNyu/QrbkHNAmweG9kx2cxKDN7okmjJ6yldIYseNSUxA1hzrOiMOrxuLw3FkXtT9oW2xqsriRkOhLcctY6IDtOqtXILkL2nGpA8Ei1VLeE2x29E5Vbn+CV6Pv74YyorK3n33XeZN28eMTFdS1YVdE4gmhR2xGTrZK7ocwUAD+57sN1Mp0BTtEjO58k7Me+wx3okD+9UvgPAhekX9ni46KGIBk+Pt9XrS2LOOTonzNYcjEar8XlGIjnEpSQxqy2fB8AYb8SYIPdqUpJUIxE1e3q0eq0vbBzJeT0+0SMqt9rhl+g566yziI0N/xC+aMJXwbU2sKIH4OrMqxlrGUuzt5m/F/wdt+QO+DUAPC0eSn+WPRFdET0/1v/Ifud+4nXxzEuZFxSbFKLB01O5sRJ3kxtjgpHUUer7lRwNZeuKp0dt+TwKkV7BJXklarfLpdQpI9QneiA6ytaVERTC09OegFVvCXpG+oR0NFoNjfsbA/7HpsznsuqsbGnZwvOlzwd0fQX7JjveVi9Jg5N8N8BD8Xbl2wD8Ie0PmHTBrTBQPD2NxY04GwKTLB5q9i/bD0D29Gy0OvX96UZ62brX46Vqo9wgVK2iJ9IruGwFNtzNbnRGHUmD1HlDjvRkZnutnZZKOTQnPD3tUd83Zy8lxhLj80YEw9uTaczknr73APBmxZu+EvFA0rxO/pLof2L/wx67oWkDm5o3YdAYOCftnIDb8ntik2J9U5QjtYKrZLkc2upqw8dQo5QfR2p4q3ZnLe5mN3qzXrW/kCO9gkvpOp86MhWtXp23n0j39NTukD1p1hwrMVaRevJ71Pmp66UEI5m5LccmHctZqWcBcG/hvdS4A3fzlySJlvXyr4u8kw4f2nqp7CUA5ibPJdUQmlCN4u2JxBCXJEnsXy57enJnBL6sPxBEenhLyafrM6mPKj1pEPkVXJUb5ZwpNcyM64yIFz2/hQ+Th4vKrY5Q5192LyVYycxtuSXnFgbGDqSmtYb7iu7DI/k/5LQttdtraa1qRWfUkTvz0DfltY1rWd24Gr1Gz1V9rjrksYFE8aRFYjJz7Y5a7NV29CY9GRPVGXppK3rCMf6kpyg/NrrSaiFcRPr8repN8t9e2hgheoKFMiw3Zbg6c6bCjRA9KqJtMnOwekTEamN5JO8RjBojKxtWBiy/p+hbuWore0Y2BnPnYyQkSeLZ0mcBeUBqljF0/WYi2dOj5PNkTs1EF6Ouhm4K8f3j0Wg1uJvdtFREXrmvz9NzhPpFT6Tm9CjhLTWLnkjP6VFEj/D0dIwQPSoiZWQKepMep83py74PBgNNA7m7390AvF7xOl/Xft3jNQsXFQLQ/4T+hzzum7pv2Ny8GZPWxJV9ruzxdbtDJHt6lNCW2krV26I36rHmyuGXSAtxtTpafUnMyo8PNaKEtxr3N+L1RFYTSFejy/e5SB2jvupDhUj39Piq44Snp0OE6FEROoPOVzUSrLwehbnJc7k0Qx6A+o+if7C1eavfa7maXZQul0vV+x3f+eiJFk8LT5c8DcAVfa4gLSa0v/aUEtmmkiYc9Y6QXrun+ETPDPWKHojcvJ6qjVV4W72Y0ky+ZGE1EpcZh0anwev2Rpw3rXqL/GMjLisOc6o5zNZ0jiJ6miuaI667uNvuxlYoFxIIT0/HCNGjMkKR16Nwfdb1HB1/NE7Jyc17bqbYUezXOkWLi/C4POjT9SQN7bzq5bnS56h0V5Idk82F6Rf6a7bfxCbGEpctf6FFUgWXrchG475GtHotmVPV64WAyC1bV35kZB6RqbpOzG3R6rW+z3Ck5fX4KrdU7OUBMKeZ0eg0IMnCJ5Ko21UHklytak5Tr7AMJ0L0qIxgV3C1RafR8WDegww2DaamtYY/7f4Tla7Kbq+z+/PdAMQdEdfpDWNj00Y+qPoAgDv63oFRa/Tf8B4QiXk9Sql6xsQMYizqLkGN1LL1SMjnUUjoJ3e+jrQKLl/llorzeUDuLh6XGZkhrraVW2oW7+FEiB6VoXzpVm2ootXZGvTrxeni+M+g/5BrzKXUVcr1u6+nvrW+y+d7W73sXbAXAMuUjqd+N7Y2ck/hPUhIzEuZx5HxRwbCdL+IxLye4qWyB06t/XnaEqnhrUio3FKI1K7MSuVW+lj1jfj4PZGazCwqtw5P1Igep9PJ7bffTlZWFiaTiSlTprB48eJwm9VtEvISMKWY8Lg8PndwsEk1pPLcoOdIN6Sz17GXa/OvpcrVtWuX/FyCvcZObHIsphHtuyp7JS/zi+ZT4iohOyabW7JvCbT53SLSPD2SJFG0RK6M6zu7b5itOTw+0bO7Pqx2dAdHvcM3qygSRE8kVnBJkhQx4S2I3GRmUbl1eKJG9Fx22WU8/vjjXHjhhTz11FPodDrmzp3LTz/9FG7TuoVGowlpXo9CljGL5wY9R4o+hXx7PpftuowCe8Fhz1NCW3lz8+Q4+O94puQZltqWYtAYeDTvUax6a8Bt7w6R5umx7bXRUNSA1qBVdeWWgiJ67DV2nLbIGPdR8Ys8bythQIKqE2wVItHTYyu04Wp0oYvRkTxU/TfkSBU9onLr8ESF6FmzZg0ffPABjzzyCP/617+4+uqr+f777+nXrx+33XZbuM3rNoroCUVeT1vyTHm8PvR1+hr7Uu4q54pdV/BL4y+dHi9JErv/J4ueAfMGtHv+tfLXeKvyLQDu6nsXIywjgmN4N1AquJrLmnHUqb+Cq+g72cuTNTWLmDh15/MAxFhjMKfLwiFSQlxtk5gjgUj09CihrZQRKegM6uwz1ZZIFD1ej9fnsRSens6JCtHzySefoNPpuPrqq32PxcbGcuWVV7Jy5UqKi/2rSgoXviaFIfT0KGQbs3lt6GuMMo+iwdPAdfnX8ULpC7i97SezV2+uxlZgQx+rp9+cA6XqbsnN4/sf9zUhvD7rek5NOTVkezgUxnij75dyJHh79n23D4C+x6o/tKUQaRVckZTEDJHp6Ymk0BYcyOmJJNFjK7DJVbSxep8wFrQnKkTP+vXrGTJkCPHxB7/RRxxxBAAbNmwIg1X+o+QV1O6oDUuIIEmfxAtDXuDU5FPx4uXl8pe5YMcFLLMtO2i8QP7n+QD0m9MPg0XuwrzHsYdrdl3Du5XvAvCX7L9wRZ8rQr6HQxEpeT2SV/KJnn7Hdd7/SG0ooicSKrgkSaJsdeQkMcMBT4+j1oGryRVma7pGpFRuKVizZWEZSYnMSmgraWiSamfHqQF9uA0IBGVlZWRmtndNK4+VlpZ2eJ7T6cTpPCAqGhrkX05utxu3u71nw1+Utbq6piHRQHxePA0FDexftT8sCax69NyVfRdHWI7g8dLH2evYy817bmagcSBzk+YyOW4yuz7fBUDaSWl8V/sdH6d8zM78nXjxYtFauCvnLmYnzA7oaxkIkoYnUfB1AZWbK7tsW3ffw0BQtaEKe40dQ5yBlPEpQb92oPZozZNvGLX5tap67zvan63ARnN5M1qDlpQxwX+NA4HWpMWYaMRZ76R2T60vZAvh+Zx2BUX0JI1I6rFtodijMU1uqdFU2hTy19Lf/VVu+e01Htrz1zjYBOM97OpaUSF67HY7RmP7vi+xsbG+5zvikUce4f7772/3+KJFizCbA5/Q2J1qMm+2Fwpg2TvLSHaENz57lfYqfrb+zNq4texx7uGZ8meIK47j1PWn4tV6uW3wbThLnPDbSza8ZThz6ufgKHKwkIVhtb0jGtyyuN21bBctC7vX1TaUFYF1n8vhIcNQA98u/jZk1+3pHhsa5dd379q9LFyovve/7f4alsq2xuTFsOiHReEyqdtIiRLUw/f//R5LYftWEWqqXPU0e7Dtlr1+m6o3sXWh/93f2xLMPXoa5UHM9mo7C/63AK0h9J6T7u6vfIkcpq3WVavy764jAvketrR07bs8KkSPyWQ6yGOj4HA4fM93xJ133skttxwooW5oaCA3N5fjjz++XaisJ7jdbhYvXsycOXMwGDofxtmWdbvWsfyn5SQ2JDJ37tyA2eIvZ3EWDZ4GFtcv5qeGn7AvkYVk5eRKXMku+sX0I6Mmg+tGX8dw6/AwW3toytPL+fCZD9FUarr82vrzHvaUz1/4nGqqmXjuRCbMnRD06wVqj2XJZXz05Efo6nWq+OwqdLS/7xd+TwUVDJ87nBlzZ4TZwq7zxUtfUFBYwLA+wxg9d7Tv8XB8Tg/H/h/3s5e9WPtZOfW8nuf2hWKPkiTx7FXP4nF6mDFuRkhHk/i7vw8f/pBGGpl66lQGzx0cRAt7TjDeQyVScziiQvRkZmZSUlLS7vGyMjlWn5XV8SRvo9HYoYfIYDAE5Y+pO+tmT5Mb0VX8UqGaL68UQwrn9TmP8/qcxxtL36Caav5w5R94eNzDaDwaFi5cyHDrcNXY2xkZo+X5Zi0VLbQ2tGJK6VgUd0SwPhu/x+PyULJM/kwPOGFASF/Tnu4xdZicM9VU0oTGo0Efq66vmbb7K1spf0fkHp2r+s9tWxL6y12ZW0paOrQ7VJ/TrlC9US4Y6DOhT0BtCvYe47LisBXYcFQ6SBkU+hLw7uxP8kq+0TrpY9NV894fjkC+h11dJyqyncaNG8euXbvaKb3Vq1f7no800seno9FpaCptorFEXaWpVVuqqN5SjdagZezZY4nRqr+Uui0xcTHE95d/uam1gqtsdRmtLa2Y0kykjoqMihcFU6qJGGsMSHLOjFpx1Dt8QzCzj1J/t+u2+Cq4ImAURcWvch+kjIkZYbake0RS2bqtyIa7yY0uRkfS4M7nHwqiRPScffbZeDweXnrpJd9jTqeT119/nSlTppCbmxtG6/wjxhLju9mFo3T9UOx4fwcAeSflEZsUG2Zr/EPtFVwF38iNIfsd2w+NNrJm6Gg0mojozFy2qgwkudrMktHxCBW14pu/FQFl65Xr5ATbSBM9vrL1EvWLnpotv3ViHpYcEX2QwklUiJ4pU6bwhz/8gTvvvJPbbruNl156idmzZ1NYWMg///nPcJvnN0q/ntKVHVefhQNJktjxgSx6hp+v7tydQ6H2zswFC2XRkzc3L8yW+EfCQPmmrOYGhSU/y+HDrKM6Dn+rmUjp1eNqdFG7Sy6lTp+g/plbbVHK1iNB9FRt/q0P0ujI8gqHg6gQPQBvvfUWf/nLX3j77be56aabcLvdLFiwgBkzIic58fcoAyb3L9sfZksOUL6mHNteG3qzngGntu/CHCkonh4lvKEmGksaqdxQCRrIOzEyRU/SINnFHgmiJ9JCW3CgV0/T/ia8Hm+YremcivUVIIE1x4olPbK8adZcWfQ0FqsrvaAjqjfL32NC9BwedWUY9oDY2Fj+9a9/8a9//SvcpgSMnBnyrKXyX8pxNblUMYZA8fIMOm0QMZbw2+MviqdHjeEtxcuTOSUTc5r6Z0F1hNq7MnvcHl9TwkgUPZZMCxqdBm+rl+byZp9XQm1EamgLIkv0KJ6etNGR0fwxnESNpycaSeiXQHy/eCSPpIoQl8ftYfv724HIDm3BbwP5NHIfjuZKdXVd3btwLwAD5kauJy1xcCKAbxaQ2qjaWEVrSyvGRGNEDmfU6rRYc367Kat4BlekJjEDvte3oVjdIUSPy0PdTvnvLNKKHsKBED0qR/H27P8x/CGuvQv20lLRgjnDTP8T+4fbnB5hMBt83oiqjVXhNaYNrc5WihbLQ0YHnBy5okcRErYCG62O1jBb057SFfKPiKxpWRGXKK6ghLjUXMFV/qtchBGRoif3wCgKNYcQa3fW4m31Ykww+mwWdI4QPSond6ZceVb8Y/iHpm56ZRMAoy4bFRUVAhkT5C/iinUVYbbkACXLS3A3u7H0sZA+LrISP9tiTjdjTDSCpE5vTyTn8ygoDfPUmszsanJRu0NOYlb+1iKJtiHElorudW4PJb58nlGpaDSRKeBDiRA9KidnpuzpKVtdhqs5fMMFG4obKPymEIDRV44+9MERQvp4WVRUrq8MsyUH2PuVHNrKm5sXsR4IkMvWFW9PzXZ15U1JkkTxUvlHhFIsEImovYKramMVSHK/G0ufyEpiBjmEqPTqUXNej1KMIZKYu4YQPSoncWAi8f3i8bq9lCxv33U6VGx9YyuSVyJnZk7UNL9Ss+iJ5HweheRh8sw45de+WqjdVktLZQt6k57MKe0HFUcKSnhLrTk9kZzPo6CEi9Sc1+MrVxf5PF1CiB6Vo9Fo6HdcPwCKlhSFxQbJK7H51c0AjLlqTFhsCAaK6KnLr8PVFD4vmkJdfh11+XVo9Vr6zekXbnN6TPJwWfSozdOj5MdlH5WN3hi5BayKp8dWqM6u10o+T6T152lLJFRwiXL17iFETwTgEz2LwyN6ipYU0VDUgDHRyOCz1D3IrjtY0i2y+1pSRzLzrv/uAiD3mFyM8e1nwkUaSnhLbZ4eJbSVOyvyOrW3JXFAIgC2vTYkSQqvMR1QvloWPX0m9wmzJf7jq5BTqehxNjh9ieyiXL1rCNETAeTOlr+cqzZV0VwR+vLqjS9sBGDERSMwmCJjkF1XUX6FqiGZedcnsugZcvaQMFsSGJTwVt3OOtVUv0heyTfIte+svmG2pmfE948HDbib3bRUqivR1l5rp3anLHazpkZex2sFn6dnvzpFj9JnLC47LmJHAoUaIXoiAEu6hbSxsooPtbenfk89+Z/nAzD22rEhvXYo8FVw/Rpe0WMrslHxSwVoYNDpg8JqS6BIyEtAZ9TR6mhVTbKtq8iFo9aBwWIgY1Lk5poA6I16301ZbZ2vlXmBSYOTMKWYwmyN/6g9vCXyebqPED0RgpLYqiS6hopfn/oVJOh/Yn/f6IZoQnG9l68N71DX/P/KwjJnRk7EDb/sDK1OS9IQOem9drs6Qlwtm2WPSPbR2VHRdsE32FVloqd0ldwHKXNq5CaKg/pFj8jn6T5C9EQIA06RRU/BNwV43J6QXNNR52DLa1sAmPzXySG5ZqhRRE/N9hqcDc6w2eELbZ0VHaEtBbVVcNk32wHoOzuyQ1sKvhlnKptmX7ZKHvER6aInPleukGsua8bbqo4QbVsqN8iVp+ljIzdZPNQI0RMhZE7JxJRqwlnv9HWTDTYbX9qIu9lN2pg0+h4bHTeJ32PJsMhVMFL4QlxNpU2+93TwmdGTKA6oqleP1+PFvvU30RPh+TwKapxmL3kl31yzSM7nAbnJptagRfJKNJWqa9q65JUOiJ7xQvR0FSF6IgStTkveXHni9p4v9wT9eh6Xh/VPrwdg4i0To7rTZ+YR8q/RcIW48j+TQ1tZ07JUOzjSX9Tk6anaUIW3xUtMQkzU3CTUGN6q3VWLs96J3qSP+LCLRqshLvu3BoUqS2au212Hu8mN3qQneWhyuM2JGIToiSAGnjoQkG+SwS5R3fHBDppKm7BkWiJ+uOjhUEJcZWvKwnJ9JbQVTe0AFHxl6yrI6dm/9Lf+PNOz0eqi46vPF95SkehRQlt9JvWJirwpJcSltrwepalq2pg0tPro+DyHAvFKRRB5J+WhN+mx7bUFtcTa4/aw8oGVAEy4aQK6mMj/4joUfY74LZl5deg9PU1lTexfJt+Moy20BciJzBqw19hpqQpvWbVS+ai0gIgGFE+Pvcoe1py0tkRLPo+CWpOZlXtAJDd/DAdC9EQQMZYYX0Lzzo92Bu06W17fQv2eeszpZsbfOD5o11ELfSb1QaPV0Li/MeQu7O3vbkfySmQdmUViXmJIrx0KDGaDbzBmOENcriYXpT/JeVP9T+gfNjsCTYw1BlOaXBKuFm9PtFRuKahV9Cienozxkd16IdQI0RNhDDt3GCCLnmCEuFodraz6xyoApvx9CjGWmIBfQ23ExMX4Jpor07dDgSRJbH1zKwAjLxkZsuuGGjUkMxf/UIzH5UGfoSdxcGLY7AgGasrrcTW5fGXUkZ7ErKBG0SNJEpXrfktiFp6ebiFET4SRd1IeBouBhsKGoNygN76wkcb9jVhzrIy9JvqaEXZG1lHyF3TJT6ETPVUbq6jeUo3OqGPoOUNDdt1Q40tmDmNez96v5f5WlgmWqEvKV1NeT/kv5UheCWuu1TehPNJR49DRxv2N2GvsaPVa0ZiwmwjRE2EYzAbfDXLzK5sDurarycWqh2Uvz7R7p6GPjdxhjN0le3o2EFpPz9a3ZC/PwHkDo7qFvPKlrHSPDTWSJFHwdQEA5gnmsNgQTHxl6yro1VO2MrryeeDA/K2m/eopWVe8PCkjUiJ6aG44EKInAhnzR3nS+c6PduKodwRs3dWPrMZeZSdxYCIjL4vecEtHZB8li56qjVUhSQj1tnrZ/u52ILpDWyBXl4D82oZjMGbtzloaChvQxegwj44+0aOm8Nb+5b9VyB2ZHWZLAofi6WmuaMbjCk1j2MNRsV4kMfuLED0RSObUTFJGptBqb/XdOHtKXX4dv/zfLwDM/L+ZUVFq2h2s2Vbi+8fLjdVWBb90veDbAloqWzClmaIqsbYjUkamoNFqsFfbaakIfQVX4TeFAGQdnYU2Nvq+8tQierwery88nDMzJ6y2BBJTqkn2ekvQWKKOvB7F0yOSmLtP9H0D9AI0Go0v3+bXJ37t8QRrSZL47obv8Lg89D+hP4NOi46Bl90l52j5i3rfD/uCfq1tb20DYPgFw6NeYBpMBt8MrsqNlSG/vhLailZxmTgoEZATbVudrWGzo3JDJa5GFzHxMT7vXjSg0WiIy/mtQaFKkpmVyi3h6ek+QvREKKOuGEVsciz1e+rZ9emuHq215fUtFC4qRB+rZ/bTs6Mu0bOr9DuuHwD7vguu6GmuaPZ1YR55aXSHthR8Ia5Noc3rcbe4Kf6xGIB+x/cL6bVDhTnNjCHOABI0FIQv2VbpNxVNzR8VlLYLDUXhT2ZuqWqRW2toxMwtf4iuT2YvIsYSw4SbJgCw5pE1SF7/ciVsRTaW3rIUgCMfOJLkIb23nbkyX6z8l3IcdYHLlfo9m17ahNftJXNqZq9xT6eNPZDXE0r2/bAPj9ODta+V5OHR+dnWaDS+EJdtry1sdiiiJ3dm9DR/VEjoLyeLNxSGX/QoXp6kwUnEWKO/pUigEaInghl/w3hirDFUbqhk69tbu32+x+1hwXkLcNqcZE7NZNItk4JgZeRgzf7txigFL8TlcXvY+MJGACbcOCEo11Aj4fL07PmfPKduwMkDotqDqYS4wiV6JK/kEz05M6Inn0chvr/s6bEVhE9UKvg6MUfJ/LhQI0RPBGNKMTH1nqkALL9jOa5GV7fOX/rXpZStKsOYYOSU/2/vzsObKtP+gX+TLulGU9pCF7rSBWpLkbWALLIXlU0HUXEFhld/A+g7DpeCOCoDzDvqqKO44KWIuyzugAiyCgOtQIsUii1L6b7vaUnT5Pz+yCTCtEDbnOQ053w/18UfnCTPuZ+c9OTOs35+h+yapLvC3l1c5745h8biRngFeSH+D/F2OUd3ZEl6qrOr0XrZMeNOTEYTcr8xdyPG3ynv91rqlp7KM5W4XH0Zrl6uCBosv9ZLbbS5pacuT/qkx7KDvWXPQOocfss5ucFLB8Mv1g+6Uh32Pr63w6878foJZLxh3kU9dWOqtflW6SxJz8UfLtplenXGOvN7nrwoWfZ7ml2pR3gPeAZ4wtRqcth6PUWHi9Bc0QyPnh6ymk3Unp5x5oHiNbk1kpzf0soTOjJUlp9ry/1R6pYeQfh9dqlcVrx2NCY9Ts5V44qp702FSq1C1gdZOPXBjRcszHgzw5ogjf3HWMTNkt9Gl10VMTECrh6uqLtYh8rTlaKWXfFrBQoPFkLlolLUateAedxJ0DBzC0DpL47Z2DX3K3MrT8yMGNnPkLOuei3R/mZyHs8D/N691VDQAFOrbbNlbdFQ0ABdqQ5qVzVnbnURkx4ZCB8XjlHPjwIA7PrjLmR/1v7aPUaDEfue3Ic9i/cAAIb87xAMWzbMYXE6A3dvd0RMMg9otowHEcuJ108AMO+m3qNPD1HLdgbBQ83N8WXHyux+LkEQrEmPHHev/2+WpKfhUgNMesd+KQuCgMID8h3PAwA+IT5wcXeBYBQkXavH0rXVK7kX3DzdJIvDmTHpkYkRz4xA4sOJEIwCts/bjh8X/mht6m5pbMFvW37DR4M+wvFXjpufv3IEbv3nrbIe3NlVsTPM6xSd+/acaGU2FDVYt50Y8vgQ0cp1Jpakp/SY/Vt6yo6XoaGgAW7eboicLM+p6lfyCvSCZ6B5t/WWos6N7bNV7bla6Ep1cHF3QfBweY4zUalV1mnrUnZxWcfzpMjzfXYEbtohEyq1Cqnvp8KjpweOv3ocp94/hVPvn4KrlyuMeiMEo3l8ikdPD0x9fyriZsv/129XxUyPAWDuhmkoahClVebYP4/BZDAhbGyYdcsLpbEMvKw6XQVDkwFuXvb7pWpp5Ym+LVoxv4j9+/uj6FARWgocm/RYZjoGDw+W9XvtG+WLmtwaSaetW5KekBT57G3maGzpkRGVWoXxr4zHvYfuReTkSKjd1GhtaoVgFOAb5YsRz4zAwvMLmfDcgHewtzUxOfv5WZvL05XrcHK9eZp6yooUm8tzVj6hPvAO8YZgEqxrjdiDIAjWBTvlPmvrSgEJAQAAQ5HBoefN25UHALJvUbPO4JKopcdoMKLsuLlrmElP17GlR4b63NIHc3bNQUtjC5rKm+Dq4QrvEG92ZXXCTQ/chKLDRTj94WkMfdK29YvS1qShtakVwcOCETUlSpwAnVTw0GCc//48StJL7NbiVXm6EjU5NXBxd0H0bdF2OUd3ZBnX01LouJYeU6vJuryD3D/b1hlcEk1br8yqRGtzKzRajaIXkbUVW3pkzN3HHX59/eAT6sOEp5P63d0PLu4uqMyqtGkV4bpLddbFCMesHaP46xA6yjzNtuhwkd3OceZj875mUalR0Phq7Hae7say4rQjk57SY6XQ1+qh8dNYx2zJlbWl57w0SY91PM/wYKjUyr6P2IJJD1E7PHp6IGaGeWzPqQ03XgbgWg6tOARjixEREyKsawApWZ/R5tad4sPFdlkHyWQ0IfsT8+xFpexrZmFp6TEUGxw2rdrStRUxMQJqV3l/nVhWvZZqN3uO5xGHvD+lRDYYsHAAACBrQ1aX9uIq/LnQvHyAChj74lixw3NKwUOD4eLuAl2pzi6rB+fvzUdjcSM8enqg7+19RS+/O9NGauHq5QqhVXBYa8SlXZcAyL9rC/h91WtdqQ4tjY4dLA4w6RELkx6ia4iaEoXApEAYdAZkvZfVqdcaW4zW9ZCS/5iM4CHybvrvKFcPV+ssrsJDhaKXf+Yjc9dW/3v7w1WjrCGLKrUKAYnmwcyVp8RdWLM9+no9io8WA5D/IGbA3PrrGWBeFsDRrT36Or114UkmPbZh0kN0DSqVCkP/Yh7EnLkuE6bLHe8yOLL6CCp+rYBngCdGrxltrxCdkqWLq+iQuON6WhpakPOVedZW4oPK6tqyCBwQCMAxSU/BvgIIRgE943rCL9rP7ufrDqxdXOdqHXrekvQSQDCPK/Lq5eXQc8sNkx6i60i4NwG+kb7QlehQ81XH9jUq+ncR0tamAQAmvT0JXoG8SV3JkvRYti4QS85XOWhtakXP+J6yXSTvRhyZ9Finqk+RfyuPhVTjegp/Nv+tKHWNLzEx6SG6Dhd3F9z6yq0AgJqva254s2soasB3d30HwSig/7390W9OP/sH6WTCxoRB5aJCTU4N6i6JN/bE0rWV+GCiYmfJWZOeLMclPUoYz2MhVUuP5QeCXLf5cCQmPUQ3EDc7DuETwyEYBGyfux0tuvYHMerKdfhy2pfQleoQOCAQU96d4uBInYNGq7HuEJ33Y54oZdZeqLWuDJxwf4IoZTqjwCRz0lOfVw99nd5u56k9X4vac7VQu6oRfqs8NxltT8/Y/+xmf85xu9m36lutO6sz6bEdkx6iG1CpVJj87mS4aF1Q+WslvvvDd22+UCpOVWDTuE2oPFUJ7xBvzPpmFtx93CWKuPuLmhoF4PfWAltlvp0JCOa1ebSRWlHKdEYe/h5wDTAP4LZna49lX7o+Y/ooai0kKVp6Sn8phVFvhFdvL/SM7+mw88oVkx6iDugR3gMhT4fAReOCvJ15+PDmD5H2jzSc/ug0di7YiU+GfoLqs9Xw6eODew7cA7++flKH3K1ZukTyf8q3eU0ZQ7MBWRvMs+sG/b9Btobm9Nwjzcl2+Un7bfVx7htz0qO0LW0s09YbChpgaHbMdh9FP5sH/IeNDVNst62YmPQQdZBngifm7J8D30hf1OfV4+enf8YPD/2ArA1ZMLYYETM9Bg8cfwA94/hr7EaChgbBw98D+rrfpz131dkvzuJy9WX4RvoqatuJa9FEm1teyk/YJ+nRleusK2rHzoy1yzm6K89AT2i05vfXUYOZCw4WAGDXllicPunZs2cP5s+fj/j4eHh5eaFv375YuHAhSkpKpA6NZChoSBAe+vUhTF4/GTHTYxA1NQrJi5Ix98BczPp2FryDvKUO0SmoXdToe5t58cCcrTldLkcwCTj28jEAwM1/uhlqF6e/pdnMI84DgLlbxB4ubLsAwSQgaHAQfCN87XKO7kqlUllXvrasm2NPplYTig+bfxQw6RGH06/e9dRTT6G6uhpz5sxBXFwcLly4gHXr1mHbtm3IzMxEcLAyp66S/Wh8NRi4aCAGLhoodShOrd/d/XDmkzPI2ZKD8a+M79J+Qhd2XEDVmSq4+7rzevyHJtbcElF5uhKGJgPcvNxELd/StRU7S1mtPBb+Cf4oSStBdbb9k57yk+VoaWiBRquxDlIn2zh90vPKK69g9OjRUKt//4WXmpqKcePGYd26dVi9erWE0RHRtUROiYS7rzsaixtRfKS402uQCIKA9P9LBwAMfHSgtdtB6VwDXOEV7IWm0iaUZ5SLurZLS2OLdfC5YpMeB7b0WKaq9xndh62YInH6d3Hs2LFXJTyWY/7+/sjOzpYoKiK6EVeNq3VMyNkvznb69Xm78lB0uAguGhcMeXyI2OE5LZVKhaAhQQDE7+LK25UHo94IbV+tYlseAhLMW31UZVfZ/Vxcn0d8Tt/S057GxkY0NjYiMPD6f5R6vR56/e9Tj+vr6wEABoMBBoN4I/MtZYlZZncj9zrKvX6ANHWMnxuPMx+fwemPTmPEqhEdnuYvCAJ+XvEzACD5sWRoemluGLeSrmGvwb1wcftFFKcVI9mQLFr5lm0++s7oi9bWVtHK7Qypr6NvjHkcU/Vv1WjRt3SpW/Z6LPVqaWmxrsQcPCpYVp9be1zDjpalEgRBEO2s3cTq1avx7LPPYs+ePZgwYcI1n/f888/jhRdeaHP8s88+g5cXtw4gsjfBJODS4kswFBvQ69Fe8Ev169DrGg42oPSVUqg8VIhaHwVXrSx/v3WZ7oQOxauK4Rbihqi3o0QpUzAIuPDIBZgaTQhbEwbPRE9RynU2glHAubnngFYg6t0ouPUWd8yUhT5Pj/wn8qHSqBDzSQxUbpyufj1NTU247777UFdXB1/faw+w71ZJj8lkQktL+6vd/jeNRtPumgUHDx7ExIkTceedd2LTpk3XLaO9lp7w8HBUVlZe903rLIPBgN27d2Py5Mlwc7PPH4jU5F5HudcPkK6OGW9k4OCTB+Gf4I/7M+6/4S9nfb0eHw/4GLoSHUY+PxLDVwzv0HmUdA3HDhuL98PeBwRgYf5CeAfbPqvwwrYL+P7O7+EV7IUFFxdINsakO1zHjwd+jOrsasz8fqZ1oU2xWOoXmBWIIyuPIGpaFGZ+O1PUc0jNHtewvr4egYGBN0x6utXPo4MHD2L8+PEdem52djb69+9/1bGzZ89i9uzZSEpKwnvvvXfDMjQaDTSatoMf3dzc7PLHZK9yuxO511Hu9QMcX8eBCwbiyHNHUJ1djfNfnkfCfdffRuLAXw9AV6JDz7ieSHk6Ba5unbuNKeEa+vTyQe+BvVGeWY7SI6Xof3f/G7/oBnI35wIAEu5JgMZD+kHjUl7HgIQAVGdXoy63Dm532CeGwr3mrq2+qX1l+3kV8xp2tJxulfT0798fH3zwQYeeGxISctX/CwoKMGXKFGi1WuzYsQM9evSwR4hEJDKNVoOUp1NwaOUhHHz6IGJnxV5zmnXuN7nIfCsTADDxzYlw1XSrW1i3EjY2DOWZ5Sg8UGhz0tPS2GLdeiJhnnL3NrMIuCkAuV/louq0fQYzm/QmFB8yr8+jpF3sHaFb3TGCg4Px8MMPd/p1VVVVmDJlCvR6Pfbs2dMmISKi7m3In4fg5Lsn0ZDfgINPHcTENya2eU7FrxXY+chOAMDQJ4cianKUg6N0LmHjwnDi9RMoOFBgc1m5X+eitbkVPeN6WmeGKVmvgb0A2G+rj+asZhhbjOgR0QP+/fztcg6lcvop6zqdDrfddhuKioqwY8cOxMUpay8YIjlw83TDpDcnAQAy1mXg+GvHceVww5L0EmyetBn6Wj1CR4VizNoxUoXqNCzTnKtOV6Gpssmmsk69fwoAcNMDN3H/JwC9B/YGAFRlVdm8d1x7mjLM1yt6ajTfb5F1q5aerpg3bx7S09Mxf/58ZGdnX7U2j4+PD2bNmiVdcETUYTF3xGD0mtE49Mwh7PvffcjbnYfISZGoOFmBMx+fsW59cOf2O+Hi7iJ1uN2eV6AXAhIDUHW6CgX7CtBvTr8ulVNzrgaFBwoBFZD4cKLIUTonvxg/uHm7waAzoCa3xrp2j1h0mToAEH2QNMkg6cnMzAQAbNiwARs2bLjqscjISCY9RE4kZXkKAODfz/8bF3dcxMUdF62PJdyXgInrJsLDz0Oq8JxOdGo0qk5X4fy2811Oeiw72EdNjYJvuLL22roWlVqFwAGBKDlagoqTFaImPfX59TAUGqBSqxAxMUK0csnM6ZOevLw8qUMgIpGoVCqMWDECsTNjkf1pNqrPVsM32hexM2IRPi5c6vCcTsz0GBz75zFc3HERJqOp09PMW/WtOLXB3LU1YMEAe4TotHoP7I2SoyUoP1mO/vfYPjvOIn93PgAgeHgwE3w7cPqkh4jkJzAxkON2RNDnlj7Q+GnQXNmMkqMlnd6H67dNv6GprAk+fXysW4aQmWUwc8XJClHLvbTrEgAgYjJbeezB6QcyExFR+9SuakRPiwYAnP/+fKdeKwgCjr92HAAwaPEguLhxHNWVrDO4MsWbwWU0GJG/19zSw6nq9sGkh4hIxqybum46C8HU8QX4C/YVoDyjHK6erkj+o3j7d8lFr4G9oFKroCvRoaGoQZQyCw8WoqWuBS5aFwQN5dIA9sCkh4hIxmJmxECj1aA+rx75+/I79BpBEPDv5/8NAEianwTPAGXus3U97t7uCBxg3tS6JK1ElDLPfWNeANJ7mLdk23zIHd9VIiIZc/N0Q//7zANtLevt3EjBvgIU/lwIF42LdUYdtRU6IhQAUHLU9qRHEATrqtfeKbbvlUbtY9JDRCRzlplXuV/lQleuu+5zTUYTDiw7AABIXpSMHn24pc+1hIwwr/4vRtJTdqIMDQUNcPN2g1eyl83lUfuY9BARyVzQ4CAEDwuGUW9E+v+lX/e5J985ibITZdD4aTBi5QgHReicQlLMSU/psVKbV2bO2ZIDwDyAWa3hV7O98J0lIpI5lUqF0atHAwAy38pEfUF9u8+rvVCLQ88cAgCMXjMa3r3ZzXI9/v38odFq0NrciopTXZ+6LggCzm46CwCIvzterPCoHUx6iIgUIHJyJMLGhcGoN2LXol0wGa9umTA0GfDtnd9CX6dH6MhQDPyfgRJF6jxUapW1i6vo56Iul1OaXor6vHq4+bghalqUSNFRe5j0EBEpgEqlwsTXJ8LV0xV5O/NwYNkBa+LTXNWMrVO3ouJkBTx7eWL65umcPdRBERPMiwhe+ulSl8s4+4W5lSd2ZizcvNxEiYvaxxWZiYgUoldyL0x9byq2z9uO468eR/6efAQmBeLCjgvQ1+rh7uuOmV/NRI8wDl7uqMhJ5kUEC/YXwGgwdnoRR6PBiOzPzBtli7mdBbWPSQ8RkYIk3JcAY4sRex/fi4pfK1Dxq3ksin8/f8z4cgYCEwMljtC59L65NzwDPNFc1YzSX0rRZ1Tntvq4sP0Cmsqb4BXkhaipUTDBtgHRdH1MeoiIFCbp4SREp0bj/Pfn0VTehN6DeyN6ajRUapXUoTkdy27ov23+DZd2X+p00mNZOynxoUS4uLnAZGDSY0/stCUiUiDvYG8k/zEZI54Zgb7T+jLhsYGli+viDxc79bqGogZc3GF+TdIjSaLHRW0x6SEiIrJB3zv6AirzdhR1l+o6/LrMNzMhmASEjQ1DQP8AO0ZIFkx6iIiIbOAT4oPwceEAgN82/9ah17ToWnDynZMAgCH/O8RusdHVmPQQERHZyDLz6rcvOpb0nP7wNC7XXIZfjB9ipsfYMzS6ApMeIiIiG8XdFQeViwplJ8pQebryus81NBmQtiYNgLmVh2siOQ7faSIiIht5BXohdmYsAOCXl3657nNPvHECjcWN8I30xYCFAxwRHv0Hkx4iIiIRDH9qOAAg+9Ns1Oe3v79ZQ1ED0v9u3vT1llW3wFXDlWMciUkPERGRCEKGhyBiQgRMrSYcevZQm8cFQcCPC36Evk6PoKFBSJiXIEGUysakh4iISCS3/O0WqNQqnPnoDM5uPms9LggCDq08hLwf8+Dq4YrbPrqNY3kkwHY1IiIikfQZ1QcpK1JwdPVR7HxoJ5rKmhA6KhSZb2Uia0MWAGD8v8YjIIHr8kiBSQ8REZGIRv51JMozynFh+wXsXbr3qsfGvzoeAxcNlCgyYtJDREQkIhc3F8z+bjZOvH4CJ9efRFOZeX+zEStHIOLWCKnDUzQmPURERCJTqVUY8sQQDHmCqy13JxxFRURERIrApIeIiIgUgUkPERERKQKTHiIiIlIEJj1ERESkCEx6iIiISBGY9BAREZEiMOkhIiIiRWDSQ0RERIrApIeIiIgUgUkPERERKQKTHiIiIlIEJj1ERESkCEx6iIiISBFcpQ6gOxEEAQBQX18varkGgwFNTU2or6+Hm5ubqGV3F3Kvo9zrB8i/jnKvH8A6yoHc6wfYp46W723L9/i1MOm5QkNDAwAgPDxc4kiIiIiosxoaGqDVaq/5uEq4UVqkICaTCcXFxejRowdUKpVo5dbX1yM8PBwFBQXw9fUVrdzuRO51lHv9APnXUe71A1hHOZB7/QD71FEQBDQ0NCA0NBRq9bVH7rCl5wpqtRphYWF2K9/X11e2H2ILuddR7vUD5F9HudcPYB3lQO71A8Sv4/VaeCw4kJmIiIgUgUkPERERKQKTHgfQaDR47rnnoNFopA7FbuReR7nXD5B/HeVeP4B1lAO51w+Qto4cyExERESKwJYeIiIiUgQmPURERKQITHqIiIhIEZj0EBERkSIw6bEjvV6Pp556CqGhofD09ERKSgp2794tdVii+eWXX7B48WIkJibC29sbERERuPvuu5GTkyN1aHazZs0aqFQqJCUlSR2KqE6cOIEZM2bA398fXl5eSEpKwuuvvy51WKLIzc3FPffcg7CwMHh5eaF///5YtWoVmpqapA6t0xobG/Hcc88hNTUV/v7+UKlU2LhxY7vPzc7ORmpqKnx8fODv748HHngAFRUVjg24CzpSR5PJhI0bN2LGjBkIDw+Ht7c3kpKSsHr1aly+fFmawDuoM9fQwmAw4KabboJKpcLLL7/smEBt0Jk6mkwmvP3227j55pvh6emJgIAATJgwASdPnrRLbFyR2Y4efvhhbN26FU888QTi4uKwceNG3Hbbbdi3bx9Gjx4tdXg2+8c//oHDhw9jzpw5SE5ORmlpKdatW4fBgwfj6NGjsksMCgsLsXbtWnh7e0sdiqh27dqF6dOnY9CgQXj22Wfh4+OD8+fPo7CwUOrQbFZQUIDhw4dDq9Vi8eLF8Pf3x5EjR/Dcc8/h+PHj+Pbbb6UOsVMqKyuxatUqREREYODAgdi/f3+7zyssLMTYsWOh1Wqxdu1aNDY24uWXX8apU6eQnp4Od3d3xwbeCR2pY1NTEx555BGMGDECjz76KHr37m29rnv27MHevXtF3UpITB29hld64403kJ+fb//gRNKZOs6fPx+ffvopHnzwQSxevBg6nQ4ZGRkoLy+3T3AC2UVaWpoAQHjppZesx5qbm4WYmBhh5MiREkYmnsOHDwt6vf6qYzk5OYJGoxHmzZsnUVT2M3fuXGHChAnCuHHjhMTERKnDEUVdXZ0QFBQkzJ49WzAajVKHI7o1a9YIAISsrKyrjj/44IMCAKG6ulqiyLrm8uXLQklJiSAIgvDLL78IAIQPPvigzfMee+wxwdPTU7h06ZL12O7duwUAwvr16x0Vbpd0pI56vV44fPhwm9e+8MILAgBh9+7djgi1Szp6DS3KysoErVYrrFq1qs13SnfV0Tpu2rRJACB89dVXDouN3Vt2snXrVri4uGDRokXWYx4eHliwYAGOHDmCgoICCaMTx6hRo9r8YoyLi0NiYiKys7Mliso+Dh48iK1bt+K1116TOhRRffbZZygrK8OaNWugVquh0+lgMpmkDks09fX1AICgoKCrjoeEhECtVnfrFo/2aDQaBAcH3/B5X375Je644w5ERERYj02aNAnx8fHYvHmzPUO0WUfq6O7ujlGjRrU5Pnv2bADo1vefjl5Di6effhr9+vXD/fffb8eoxNXROr7yyisYPnw4Zs+eDZPJBJ1OZ/fYmPTYSUZGBuLj49tspjZ8+HAAQGZmpgRR2Z8gCCgrK0NgYKDUoYjGaDRiyZIlWLhwIQYMGCB1OKL66aef4Ovri6KiIvTr1w8+Pj7w9fXFY4891u3HRnTErbfeCgBYsGABMjMzUVBQgE2bNuHtt9/G0qVLZddVCQBFRUUoLy/H0KFD2zw2fPhwZGRkSBCVY5SWlgKAbO4/6enp+PDDD/Haa6912+66rqqvr0d6ejqGDRuGFStWQKvVwsfHB3379rVrYs4xPXZSUlKCkJCQNsctx4qLix0dkkN8+umnKCoqwqpVq6QORTTvvPMOLl26hJ9++knqUESXm5uL1tZWzJw5EwsWLMDf//537N+/H2+88QZqa2vx+eefSx2iTVJTU/G3v/0Na9euxXfffWc9/swzz2D16tUSRmY/JSUlAHDN+091dTX0er0stzl48cUX4evri2nTpkkdis0EQcCSJUswd+5cjBw5Enl5eVKHJKrz589DEAR88cUXcHV1xYsvvgitVot//etfuOeee+Dr64vU1FTRz8ukx06am5vbval4eHhYH5ebs2fP4k9/+hNGjhyJhx56SOpwRFFVVYW//vWvePbZZ9GrVy+pwxFdY2Mjmpqa8Oijj1pna915551oaWnB+vXrsWrVKsTFxUkcpW2ioqIwduxY3HXXXQgICMD27duxdu1aBAcHY/HixVKHJzrLveVG9x+5JT1r167FTz/9hLfeegt+fn5Sh2OzjRs34tSpU9i6davUodhFY2MjAPM99ujRo0hJSQEAzJgxA9HR0Vi9ejWTHmfi6ekJvV7f5rily8DT09PRIdlVaWkpbr/9dmi1Wut4JjlYuXIl/P39sWTJEqlDsQvL5/Dee++96vh9992H9evX48iRI06d9HzxxRdYtGgRcnJyEBYWBsCc1JlMJjz11FO49957ERAQIHGU4rJcUyXdfzZt2oSVK1diwYIFeOyxx6QOx2b19fVYvnw5li1bhvDwcKnDsQvLZzA6Otqa8ACAj48Ppk+fjk8++QStra1wdRU3TeGYHjsJCQmxNjNfyXIsNDTU0SHZTV1dHaZNm4ba2lrs3LlTNnXLzc3Fu+++i6VLl6K4uBh5eXnIy8vD5cuXYTAYkJeXh+rqaqnDtInlWv33QN/evXsDAGpqahwek5jeeustDBo0yJrwWMyYMQNNTU2yHN9i6da61v3H399fVq08u3fvxoMPPojbb78d77zzjtThiOLll19GS0sL5s6da73vWJaQqKmpQV5eHlpaWiSO0jbXuvcA5vuPwWCwy8BmJj12cvPNNyMnJ8c6e8QiLS3N+rgcXL58GdOnT0dOTg62bduGm266SeqQRFNUVASTyYSlS5ciOjra+i8tLQ05OTmIjo52+rFLQ4YMAWCu65UsY86cvUuvrKwMRqOxzXGDwQAAaG1tdXRIdtenTx/06tULx44da/NYenq6bO49gPl+Onv2bAwdOhSbN28WvVVAKvn5+aipqUFiYqL1vjNmzBgA5m686OhonDlzRuIobRMaGorg4OA29x7AfP/x8PBAjx49RD8vkx47+cMf/gCj0Yh3333Xekyv1+ODDz5ASkqKLJosjUYj5s6diyNHjmDLli0YOXKk1CGJKikpCV9//XWbf4mJiYiIiMDXX3+NBQsWSB2mTe6++24AwPvvv3/V8ffeew+urq7W2U/OKj4+HhkZGW1WCf/888+hVquRnJwsUWT2ddddd2Hbtm1XLY2xZ88e5OTkYM6cORJGJp7s7GzcfvvtiIqKwrZt22TVZbd06dI2953169cDMC96+/XXXyM6OlriKG03d+5cFBQUXLVTQWVlJb799ltMmDABarX4KYo80uJuKCUlBXPmzMHy5ctRXl6O2NhYfPjhh8jLy2vzBeOsnnzySXz33XeYPn06qqur8cknn1z1uDOtK9GewMBAzJo1q81xy1o97T3mbAYNGoT58+djw4YNaG1txbhx47B//35s2bIFy5cvd/quymXLluGHH37AmDFjsHjxYgQEBGDbtm344YcfsHDhQqes37p161BbW2ttjfv++++tXR9LliyBVqvFihUrsGXLFowfPx6PP/44Ghsb8dJLL2HAgAF45JFHpAy/Q25UR7VajalTp6KmpgbLli3D9u3br3p9TExMt/4RdqP6DR48GIMHD77qNZbZW4mJiU5x7+nI53T58uXYvHkz7rrrLvz5z3+GVqvFO++8A4PBgLVr19onMIctg6hAzc3Nwl/+8hchODhY0Gg0wrBhw4SdO3dKHZZoxo0bJwC45j+5ktOKzIIgCC0tLcLzzz8vREZGCm5ubkJsbKzw6quvSh2WaNLS0oRp06YJwcHBgpubmxAfHy+sWbNGMBgMUofWJZGRkdf8m7t48aL1eVlZWcKUKVMELy8vwc/PT5g3b55QWloqXeCdcKM6Xrx48br3noceekjqKlxXR6/hlSx1doYVmQWh43U8f/68MHv2bMHX11fw9PQUJkyYIKSnp9stLpUgCIJ90ikiIiKi7oNjeoiIiEgRmPQQERGRIjDpISIiIkVg0kNERESKwKSHiIiIFIFJDxERESkCkx4iIiJSBCY9REREpAhMeoiIiEgRmPQQERGRIjDpISJZUqlUHfq3f/9+qUMlIgfhLutEJEsff/zxVf//6KOPsHv37jbHExISHBkWEUmIG44SkSIsXrwYb775JnjLI1Iudm8RERGRIjDpISIiIkVg0kNERESKwKSHiIiIFIFJDxERESkCkx4iIiJSBCY9REREpAhMeoiIiEgRmPQQERGRIjDpISIiIkXgNhRERESkCGzpISIiIkVg0kNERESKwKSHiIiIFIFJDxERESkCkx4iIiJSBCY9REREpAhMeoiIiEgRmPQQERGRIjDpISIiIkVg0kNERESKwKSHiIiIFIFJDxERESnC/wfEUL56j4i6gwAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -900,39 +3557,54 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 15, "id": "d42f3288", "metadata": {}, "outputs": [], "source": [ - "def epde_discovery(t, x, use_ann = False):\n", + "def epde_discovery(t, x, diff_mode = 'FD', use_pretrained_nn: bool = True):\n", " dimensionality = x.ndim - 1\n", " \n", "\n", - " epde_search_obj = epde.EpdeSearch(use_solver = False, dimensionality = dimensionality, boundary = 30,\n", - " coordinate_tensors = [t,])\n", - " if use_ann:\n", - " epde_search_obj.set_preprocessor(default_preprocessor_type='ANN', # use_smoothing = True poly\n", - " preprocessor_kwargs={'epochs_max' : 50000})# \n", - " else:\n", - " epde_search_obj.set_preprocessor(default_preprocessor_type='poly', # use_smoothing = True poly\n", + " epde_search_obj = epde.EpdeSearch(use_solver = True, dimensionality = dimensionality, boundary = 30,\n", + " coordinate_tensors = [t,], verbose_params = {'show_iter_idx' : True})\n", + " if diff_mode == 'ANN':\n", + " epde_search_obj.set_preprocessor(default_preprocessor_type='ANN',\n", + " preprocessor_kwargs={'epochs_max' : 50000})\n", + " elif diff_mode == 'poly':\n", + " epde_search_obj.set_preprocessor(default_preprocessor_type='poly',\n", " preprocessor_kwargs={'use_smoothing' : False, 'sigma' : 1, \n", " 'polynomial_window' : 3, 'poly_order' : 3}) \n", - " # 'epochs_max' : 10000})\n", + " elif diff_mode == 'FD':\n", + " epde_search_obj.set_preprocessor(default_preprocessor_type='FD')\n", + " else:\n", + " raise NotImplementedError('Incorrect differentiation mode selected.')\n", + " \n", " eps = 5e-7\n", " popsize = 8\n", - " epde_search_obj.set_moeadd_params(population_size = popsize, training_epochs=55)\n", + " epde_search_obj.set_moeadd_params(population_size = popsize, training_epochs=10)\n", " trig_tokens = epde.TrigonometricTokens(freq = (2 - eps, 2 + eps), \n", - " dimensionality = dimensionality)\n", - " factors_max_number = {'factors_num' : [1, 2], 'probas' : [0.65, 0.35]}\n", + " dimensionality = dimensionality)\n", + " factors_max_number = {'factors_num' : [1, 2], 'probas' : [0.7, 0.3]}\n", + "\n", + " custom_grid_tokens = epde.GridTokens(dimensionality = dimensionality, max_power=1)\n", + "\n", + " if use_pretrained_nn:\n", + " import pickle\n", + " try:\n", + " with open(r\"/home/maslyaev/Documents/EPDE/examples/saved_objs/well_enough_ann.pickle\", \n", + " 'rb') as input_file:\n", + " pretrained_nn = pickle.load(file = input_file)\n", + " except:\n", + " pretrained_nn = None\n", + " else:\n", + " pretrained_nn = None\n", "\n", - " custom_grid_tokens = epde.GridTokens(dimensionality = dimensionality)\n", - " \n", " epde_search_obj.fit(data=[x,], variable_names=['u',], max_deriv_order=(2,),\n", - " equation_terms_max_number=6, data_fun_pow = 1, \n", + " equation_terms_max_number=7, data_fun_pow = 1, \n", " additional_tokens=[trig_tokens, custom_grid_tokens], \n", " equation_factors_max_number=factors_max_number,\n", - " eq_sparsity_interval=(1e-6, 1e-2))\n", + " eq_sparsity_interval=(1e-5, 1e-1), data_nn = pretrained_nn)\n", "\n", " epde_search_obj.equations(only_print = True, num = 1)\n", " \n", @@ -948,7 +3620,19 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 16, + "id": "7c8a27cd", + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "with open(r\"/home/maslyaev/Documents/EPDE/examples/saved_objs/well_enough_ann.pickle\", 'rb') as input_file: \n", + " pretrained_nn = pickle.load(file = input_file)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, "id": "7c734869", "metadata": {}, "outputs": [ @@ -956,634 +3640,1411 @@ "name": "stdout", "output_type": "stream", "text": [ - "setting builder with \n", - "setting builder with \n", + "setting builder with \n", + "setting builder with \n", "trig_token_params: VALUES = (0, 0)\n", - "OrderedDict([('power', (1, 1)), ('dim', (0, 0))])\n", - "Deriv orders after definition [[None], [0], [0, 0]]\n", + "Deriv orders after definition [[0], [0, 0]]\n", + "160\n", + "initial_shape (160,) derivs_tensor.shape (160, 2)\n", + "Size of linked labels is 3\n", + "self.tokens is ['du/dx0', 'd^2u/dx0^2']\n", + "Here, derivs order is {'du/dx0': [0], 'd^2u/dx0^2': [0, 0]}\n", + "self.tokens is ['u']\n", + "Here, derivs order is {'u': [None]}\n", "initial_shape (160,) derivs_tensor.shape (160, 2)\n", - "self.tokens is ['u', 'du/dx0', 'd^2u/dx0^2']\n", - "Here, derivs order is {'u': [None], 'du/dx0': [0], 'd^2u/dx0^2': [0, 0]}\n", - "The cardinality of defined token pool is [3 2 1]\n", - "Among them, the pool contains [3 1]\n", - "Creating new equation, sparsity value [4.04371548e-06]\n", + "Using pre-trained ANN\n", + "The cardinality of defined token pool is [1 2 2 1]\n", + "Among them, the pool contains [1 2 1]\n", + "self.vars_demand_equation {'u'}\n", + "Creating new equation, sparsity value [0.00241682]\n", "New solution accepted, confirmed 1/8 solutions.\n", - "Creating new equation, sparsity value [0.00581916]\n", + "Creating new equation, sparsity value [0.00124314]\n", "New solution accepted, confirmed 2/8 solutions.\n", - "Creating new equation, sparsity value [6.99509856e-05]\n", + "Creating new equation, sparsity value [0.00052582]\n", "New solution accepted, confirmed 3/8 solutions.\n", - "Creating new equation, sparsity value [5.32778731e-06]\n", + "Creating new equation, sparsity value [0.00711448]\n", "New solution accepted, confirmed 4/8 solutions.\n", - "Creating new equation, sparsity value [1.10277863e-05]\n", + "Creating new equation, sparsity value [0.01888821]\n", "New solution accepted, confirmed 5/8 solutions.\n", - "Creating new equation, sparsity value [0.00670686]\n", + "Creating new equation, sparsity value [8.97378747e-05]\n", "New solution accepted, confirmed 6/8 solutions.\n", - "Creating new equation, sparsity value [1.87153502e-06]\n", + "Creating new equation, sparsity value [1.39739433e-05]\n", "New solution accepted, confirmed 7/8 solutions.\n", - "Creating new equation, sparsity value [0.00113239]\n", + "Creating new equation, sparsity value [0.03085147]\n", "New solution accepted, confirmed 8/8 solutions.\n", - "[0.54, 0.45999999999999996] [[0.16, 0.84], [0.0, 1.0], [0.64, 0.36], [0.46, 0.54], [0.98, 0.020000000000000018], [0.54, 0.45999999999999996]]\n", + "[0.9, 0.09999999999999998] [[0.06, 0.94], [0.9, 0.09999999999999998], [0.72, 0.28], [0.84, 0.16000000000000003], [0.74, 0.26]]\n", "best_obj 2\n", "Multiobjective optimization : 0-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.3045354291192121 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -1.1356217841768323 * t{power: 1.0, dim: 0.0} + 0.5867988641368029 * d^2u/dx0^2{power: 1.0} + 3.6636349393798056 * u{power: 1.0} + 1.0382187984537603 * du/dx0{power: 1.0} + -0.6721872058339817 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000002187112553, dim: 0.0} + -1.2761631041671366 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002416817402355008}}\n", + "Step = 1000 loss = 0.009176.\n", + "Step = 2000 loss = 0.000661.\n", + "fitness error is 2.2227062007781884, while loss addition is 0.0010076235048472881\n", + "solving equation:\n", + "-0.04588643450619638 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.5303374342862501 * t{power: 1.0, dim: 0.0} + 0.04955026522312803 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -0.7150159672673443 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + 0.20929685465960385 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 1.580036832806393 * u{power: 1.0} + 2.2449952616843585 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.001243136765844326}}\n", + "Step = 1000 loss = 0.001075.\n", + "Step = 2000 loss = 0.001305.\n", + "fitness error is 2.1241529762219473, while loss addition is 0.004127428401261568\n", + "solving equation:\n", + "0.8966981693155306 * u{power: 1.0} + 0.38980815413105985 * d^2u/dx0^2{power: 1.0} + -0.4357505073603655 * t{power: 1.0, dim: 0.0} + 1.7995906796527037 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000002061147546, dim: 0.0} + 0.05947958019404172 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.046528051540193516 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -0.03887996838157147 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0005258217895730893}}\n", + "fitness error is 1.2237323569125735, while loss addition is 7.976697816047817e-06\n", + "solving equation:\n", + "0.14442289048298915 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000003517169938, dim: 0.0} + 1.1526715690848583 * t{power: 1.0, dim: 0.0} + 0.12013443705024661 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -2.9314718093530843 * u{power: 1.0} + 0.0791765667943347 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.7018592388665876 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00711447789434163}}\n", + "Step = 1000 loss = 0.001320.\n", + "fitness error is 1.9498030450845052, while loss addition is 0.03700912743806839\n", + "solving equation:\n", + "-0.34297286414169265 * du/dx0{power: 1.0} + 1.9989108297050273 * d^2u/dx0^2{power: 1.0} + -0.14318841419209768 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 1.5932592581749612 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999997078252758, dim: 0.0} + -1.8177804028957731 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000002585559478, dim: 0.0} + 0.0 * u{power: 1.0} + -5.399892625895887 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.018888213628177674}}\n", + "Step = 1000 loss = 0.002795.\n", + "Step = 2000 loss = 0.004575.\n", + "fitness error is 2.2217626468679854, while loss addition is 0.0004497589834500104\n", + "solving equation:\n", + "0.4108320999549999 * u{power: 1.0} * cos{power: 1.0, freq: 1.9999997048669242, dim: 0.0} + -0.2170384846973597 * d^2u/dx0^2{power: 1.0} + -0.1260364363975524 * t{power: 1.0, dim: 0.0} + -0.11939082048939033 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.6708216536467625 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996970353613, dim: 0.0} + 0.2345220482822457 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 1.3791128473886902 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 8.973787469187584e-05}}\n", + "Step = 1000 loss = 0.008967.\n", + "Step = 2000 loss = 0.009778.\n", + "fitness error is 6.473098856521441, while loss addition is 0.002877991646528244\n", + "solving equation:\n", + "0.49866996837502764 * d^2u/dx0^2{power: 1.0} + -0.30223788183205536 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.1921247043006492 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.9881947787690353 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + -0.12751929673176046 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.3694536160979601 * t{power: 1.0, dim: 0.0} + -2.4568568453004223 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 1.3973943251071031e-05}}\n", + "Step = 1000 loss = 0.000326.\n", + "Step = 2000 loss = 0.006853.\n", + "fitness error is 2.513096284784833, while loss addition is 0.001661611022427678\n", + "solving equation:\n", + "-0.26373504652293683 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 3.812425582815499 * d^2u/dx0^2{power: 1.0} + 1.4155448160422524 * t{power: 1.0, dim: 0.0} + 0.30282293779411495 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -5.413548330058982 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.030851466308869778}}\n", + "Step = 1000 loss = 0.011147.\n", + "Step = 2000 loss = 0.000226.\n", + "fitness error is 5.799229945109833, while loss addition is 0.0004654070071410388\n", + "solving equation:\n", + "-2.3819716730020883 * t{power: 1.0, dim: 0.0} + -0.1423731716279737 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + 5.502707831820092 * u{power: 1.0} + 0.9626169656285564 * d^2u/dx0^2{power: 1.0} + -1.1692448008035028 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + -0.49781012153175463 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000061561971, dim: 0.0} + 0.5084421933254488 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0006627419042054699}}\n", + "Step = 1000 loss = 0.000229.\n", + "fitness error is 1.1329446188307535, while loss addition is 0.0006727319560013711\n", + "solving equation:\n", + "0.18804191911338416 * t{power: 1.0, dim: 0.0} + 1.7191521760784174 * d^2u/dx0^2{power: 1.0} + 1.1289247194237633 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.000000332481665, dim: 0.0} + 0.0 * u{power: 1.0} + 0.24643088002139665 * u{power: 1.0} * du/dx0{power: 1.0} + -0.7960364095574757 * du/dx0{power: 1.0} + -1.6282544981255942 = u{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0021021040131383394}}\n", + "Step = 1000 loss = 0.009171.\n", + "Step = 2000 loss = 0.003209.\n", + "Step = 3000 loss = 0.000982.\n", + "fitness error is 5.693913952555483, while loss addition is 0.006725414656102657\n", + "solving equation:\n", + "0.021669301702454652 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -0.0334109610332024 * d^2u/dx0^2{power: 1.0} + 0.34359770433670944 * du/dx0{power: 1.0} + 0.43886643581115126 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999997315090372, dim: 0.0} + 1.2531328499735543 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000112995997, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.030010265422957448}}\n", + "Step = 1000 loss = 0.000021.\n", + "fitness error is 1.2094037592692497, while loss addition is 8.626422641100362e-06\n", + "solving equation:\n", + "0.16205552234989853 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000004271695198, dim: 0.0} + 0.4119252280348857 * du/dx0{power: 1.0} * u{power: 1.0} + -0.23143849108194095 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000346269171, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + -0.43144986425213355 * u{power: 1.0} + 0.16235440184037586 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999997203078859, dim: 0.0} + 1.2562389041404742 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004586109997259287}}\n", + "Step = 1000 loss = 0.001479.\n", + "Step = 2000 loss = 0.000222.\n", + "fitness error is 5.079721854063879, while loss addition is 0.000449841347290203\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "-3.872530533372672 * u{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000004699365936, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999997078252758, dim: 0.0} + 1.5025352545279127 * t{power: 1.0, dim: 0.0} + -0.11877142172342492 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996385700907, dim: 0.0} + 0.5996511543650989 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.017989922285153788}}\n", + "fitness error is 1.9689530309046106, while loss addition is 7.4845684139290825e-06\n", + "solving equation:\n", + "4.045256390551648 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997515245362, dim: 0.0} + 2.5619385016158884 * du/dx0{power: 1.0} + 0.0986777554501528 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 1.9564857697515177 * u{power: 1.0} * du/dx0{power: 1.0} + 0.6953976901205252 * t{power: 1.0, dim: 0.0} + 0.9912792408519939 * d^2u/dx0^2{power: 1.0} + -4.898772860842348 = t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002434250398991524}}\n", + "Step = 1000 loss = 0.012229.\n", + "fitness error is 2.4616332219808363, while loss addition is 0.02240552380681038\n", + "solving equation:\n", + "1.1820436004993171 * du/dx0{power: 1.0} + -5.116173981860433 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000003517169938, dim: 0.0} + 1.7048651641641146 * d^2u/dx0^2{power: 1.0} + -2.6426110624234456 * t{power: 1.0, dim: 0.0} + 6.78717516574896 * u{power: 1.0} + -0.8458840093297435 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999995387607294, dim: 0.0} + -2.0571925262602977 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0004982480984776229}}\n", + "Step = 1000 loss = 0.008869.\n", + "fitness error is 2.0289777960341477, while loss addition is 0.006521215662360191\n", + "solving equation:\n", + "0.129034342732876 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.15047193220024058 * du/dx0{power: 1.0} + 1.511418168505495 * t{power: 1.0, dim: 0.0} + -3.730822381014767 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.2775521262807794 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006387733048797462}}\n", + "fitness error is 1.767887954703919, while loss addition is 7.979821930348407e-06\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.04251132157576463 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.952698032671498 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000112995997, dim: 0.0} + 1.1693061603950738 * t{power: 1.0, dim: 0.0} + -3.4228764403485528 * u{power: 1.0} + -0.8349424311421922 * d^2u/dx0^2{power: 1.0} + 0.4138258621806142 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.18191323600312745 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004632708778700998}}\n", + "Step = 1000 loss = 0.000058.\n", + "fitness error is 1.97583185009682, while loss addition is 7.644413926755078e-06\n", + "solving equation:\n", + "0.0 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000004271695198, dim: 0.0} + 0.19078501327782238 * du/dx0{power: 1.0} * u{power: 1.0} + -0.3560405257314793 * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.14898280227812902 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 1.097916468738502 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000346269171, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.030981872476527374}}\n", + "Step = 1000 loss = 0.000241.\n", + "Step = 2000 loss = 0.000093.\n", + "fitness error is 2.801474033856177, while loss addition is 0.00025567496777512133\n", + "solving equation:\n", + "-3.997792375449434 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -0.00633356987573519 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999998578789744, dim: 0.0} + -0.012365537354649143 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.0552932758048856 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + 1.4949145786059626 * t{power: 1.0, dim: 0.0} + -0.026335762063727643 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0006799398782020238}}\n", + "Step = 1000 loss = 0.000166.\n", + "fitness error is 1.6904879237783477, while loss addition is 9.638874871598091e-06\n", + "solving equation:\n", + "-0.08797705671868322 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0619061943733841 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000004699365936, dim: 0.0} + -0.6715991871662127 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999998523554716, dim: 0.0} + -0.8281984426740736 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996385700907, dim: 0.0} + 0.7655415560862905 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.017372129185037218}}\n", + "fitness error is 2.8519042231922027, while loss addition is 7.150912097131368e-06\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "1.5063477960295286 * t{power: 1.0, dim: 0.0} + 0.014275703787707261 * du/dx0{power: 1.0} + -0.001065520408571676 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -3.9995813938090787 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + -1.0012699907415628 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998834214243, dim: 0.0} + -0.028420128297692453 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0005168896046652416}}\n", + "Step = 1000 loss = 0.000054.\n", + "fitness error is 1.743549072639443, while loss addition is 9.569640496920329e-06\n", + "solving equation:\n", + "1.949068582257032 * t{power: 1.0, dim: 0.0} + -1.6919512617164298 * du/dx0{power: 1.0} + 0.2427115982282783 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.7464354915235373 * u{power: 1.0} * du/dx0{power: 1.0} + 0.14571404888952136 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.8587283141650728 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -1.54773030089138 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0011671682675720738}}\n", + "Step = 1000 loss = 0.000442.\n", + "Step = 2000 loss = 0.001450.\n", + "fitness error is 5.43111995839105, while loss addition is 0.0013140111695975065\n", + "solving equation:\n", + "-0.41047757267267465 * d^2u/dx0^2{power: 1.0} + 0.3815840607381645 * t{power: 1.0, dim: 0.0} + -1.4824892672201468 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999995479788957, dim: 0.0} + -0.08858854479452181 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.07964480648697952 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000004699365936, dim: 0.0} + 1.1810717767559125 * du/dx0{power: 1.0} + -0.4197029276755594 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.000784171200013208}}\n", + "Step = 1000 loss = 0.000137.\n", + "fitness error is 5.162289832946507, while loss addition is 0.006068309303373098\n", + "solving equation:\n", + "1.0376917809354074 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.2665663228894467 * t{power: 1.0, dim: 0.0} + 0.27568837861733 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996385700907, dim: 0.0} + 1.7785701076564475 * u{power: 1.0} * cos{power: 1.0, freq: 1.9999997048669242, dim: 0.0} + -0.31013926210022774 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -1.5649912870525737 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999997078252758, dim: 0.0} + 0.7046649868816606 = d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000003685360266, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.01689233235791751}}\n", + "Step = 1000 loss = 0.006030.\n", + "Step = 2000 loss = 0.000093.\n", + "fitness error is 0.8512913237619719, while loss addition is 0.002585913520306349\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "-6.120771249411868 * u{power: 1.0} + 0.32922204367559993 * d^2u/dx0^2{power: 1.0} + 5.820379525477624 * du/dx0{power: 1.0} + 4.202697191542786 * t{power: 1.0, dim: 0.0} + -0.860237471628435 * u{power: 1.0} * du/dx0{power: 1.0} + -8.183840881656351 * u{power: 1.0} * cos{power: 1.0, freq: 1.9999996899765908, dim: 0.0} + -4.135249962886135 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0011692386418670356}}\n", + "Step = 1000 loss = 0.038179.\n", + "fitness error is 6.861662772644996, while loss addition is 8.428142791672144e-06\n", + "solving equation:\n", + "-1.8671779746283095 * u{power: 1.0} * sin{power: 1.0, freq: 2.000000284493596, dim: 0.0} + -0.05743204100303978 * d^2u/dx0^2{power: 1.0} + 0.2840726152878821 * du/dx0{power: 1.0} + 0.17093817852239374 * t{power: 1.0, dim: 0.0} + 0.8322648629346842 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996385700907, dim: 0.0} + -0.016629988087008427 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.8805734235198669 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999997078252758, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.016088130792824153}}\n", + "Step = 1000 loss = 0.000104.\n", + "Step = 2000 loss = 0.000087.\n", + "fitness error is 0.6501430078089683, while loss addition is 0.0022454294376075268\n", + "solving equation:\n", + "1.2788189686632159 * du/dx0{power: 1.0} + 0.43145434959036977 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999996702539322, dim: 0.0} + -3.4679121118160836 * u{power: 1.0} + 0.5624766482750556 * t{power: 1.0, dim: 0.0} + 0.3014429263856118 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -2.0259373120748814 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000000909284537, dim: 0.0} + 1.6941828998646702 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00033370522044635584}}\n", + "Step = 1000 loss = 0.006073.\n", + "fitness error is 2.082608902205271, while loss addition is 0.006904528476297855\n", + "solving equation:\n", + "-0.7356725108988328 * du/dx0{power: 1.0} + -7.390991573211792 * t{power: 1.0, dim: 0.0} + 18.193502492755137 * u{power: 1.0} + 5.839098327942282 * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.9999997611103542, dim: 0.0} + -0.3440740396342888 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + -2.3708158916183133 = d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006595247875635322}}\n", + "Step = 1000 loss = 0.039491.\n", + "fitness error is 3.515511943100501, while loss addition is 0.03942855820059776\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "0.026717177572366854 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.00000015773781, dim: 0.0} + 0.01857963023131423 * du/dx0{power: 1.0} + -3.9838059988778656 * u{power: 1.0} + 1.4981247977422745 * t{power: 1.0, dim: 0.0} + 0.0024827301734697458 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.9921448586506346 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + 0.027488995713033072 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.000608224058084266}}\n", + "fitness error is 1.709826925275033, while loss addition is 7.750620170554612e-06\n", + "solving equation:\n", + "1.310036702372271 * t{power: 1.0, dim: 0.0} + 0.48723217982088973 * du/dx0{power: 1.0} + -1.419926376082331 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.000000127052496, dim: 0.0} + -16.242336658450462 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997902638036, dim: 0.0} + -4.865351969908554 * du/dx0{power: 1.0} * u{power: 1.0} + 1.3336455153032452 * u{power: 1.0} + 1.437592284854535 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.001396693626220075}}\n", + "Step = 1000 loss = 0.024736.\n", + "fitness error is 2.744274814911794, while loss addition is 0.046552278101444244\n", + "solving equation:\n", + "-0.7206797172942113 * t{power: 1.0, dim: 0.0} + -1.7064220763248499 * u{power: 1.0} * sin{power: 1.0, freq: 2.0000000228937833, dim: 0.0} + -0.21355667491262034 * d^2u/dx0^2{power: 1.0} + 0.08827000519208449 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.09078739187716757 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.6521181144756035 * u{power: 1.0} + 2.145444078646929 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0020555978292859465}}\n", + "Step = 1000 loss = 0.001138.\n", + "fitness error is 0.8740231250185997, while loss addition is 0.0007054670131765306\n", + "solving equation:\n", + "-0.009530100186852176 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.0000004298362026, dim: 0.0} + 1.462405852030637 * t{power: 1.0, dim: 0.0} + -3.8909027435038523 * u{power: 1.0} + -0.9769816121995973 * d^2u/dx0^2{power: 1.0} + -0.003722150095954235 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -0.014966134044542731 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + -0.030511709323362755 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998834214243, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.001683384758693324}}\n", + "fitness error is 1.6880767868812345, while loss addition is 8.609381438873243e-06\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "28.60971956112206 * t{power: 1.0, dim: 0.0} + 0.17056034500620498 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -20.169757544648043 * u{power: 1.0} + -6.0392988169769515 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 4.622012095886113 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + 6.773151796205527 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000061561971, dim: 0.0} + -34.84456588798815 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0003778812198361923}}\n", + "Step = 1000 loss = 2.414371.\n", + "fitness error is 14.050176676433631, while loss addition is 3.3937103748321533\n", + "solving equation:\n", + "-1.5721359482277675 * u{power: 1.0} + 0.274306009223865 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.34566702957668893 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000002868930804, dim: 0.0} + 0.024583852097631913 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.7986177725166155 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + -1.0340474936243462 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000277490798, dim: 0.0} + 0.1001750732425323 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0013732284234355616}}\n", + "Step = 1000 loss = 0.010104.\n", + "fitness error is 2.545616923099089, while loss addition is 0.005657621659338474\n", + "solving equation:\n", + "0.5242281237777024 * du/dx0{power: 1.0} + 8.694570307107378 * u{power: 1.0} + 3.400844602078797 * d^2u/dx0^2{power: 1.0} + 0.5902601632889802 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000001492729074, dim: 0.0} + -1.0160670107411542 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} + -10.365996130848762 = u{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006122265748361167}}\n", + "Step = 1000 loss = 0.065703.\n", + "Step = 2000 loss = 0.018139.\n", + "fitness error is 4.9060663328494485, while loss addition is 0.02263115718960762\n", + "solving equation:\n", + "-0.36966224493848976 * du/dx0{power: 1.0} * u{power: 1.0} + 1.3971286051809113 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + -0.8107612559780644 * t{power: 1.0, dim: 0.0} + 0.4111377633392642 * d^2u/dx0^2{power: 1.0} + 2.000444968147317 * u{power: 1.0} + -1.7279883395064397 * u{power: 1.0} * sin{power: 1.0, freq: 2.0000000854961413, dim: 0.0} + 0.5482398033432795 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0007823957253709687}}\n", + "Step = 1000 loss = 0.000522.\n", + "fitness error is 1.1932885354358855, while loss addition is 0.0007614530040882528\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.010947588719233514 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.9518689076102879 * d^2u/dx0^2{power: 1.0} + -3.8177099384321482 * u{power: 1.0} + 0.008248464812801504 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 1.4302549238526354 * t{power: 1.0, dim: 0.0} + -0.03256203869354507 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.000000481491419, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.007174983559006738}}\n", + "fitness error is 1.7977665851209463, while loss addition is 6.617110102524748e-06\n", + "solving equation:\n", + "1.3344440030569156 * t{power: 1.0, dim: 0.0} + 0.7917256530262717 * d^2u/dx0^2{power: 1.0} + -0.47679008219726693 * du/dx0{power: 1.0} + 1.4267344752695326 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + 4.1036733336384055 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + -0.3146545603697152 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -5.010389258436589 = u{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0006866500145718357}}\n", + "fitness error is 0.8845356489720293, while loss addition is 9.025308827403933e-06\n", + "solving equation:\n", + "-0.3685802811773473 * u{power: 1.0} + -0.628967532435663 * du/dx0{power: 1.0} + -2.324980347344142 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999995859869577, dim: 0.0} + 0.4963829079553718 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.18063692779110732 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -1.612255531670757 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999998003347839, dim: 0.0} + 4.545553500115826 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0002772385550746484}}\n", + "fitness error is 3.4222936561392294, while loss addition is 7.232359166664537e-06\n", + "solving equation:\n", + "14.509169967115893 * u{power: 1.0} + 0.20925980901251773 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 3.630951688512192 * d^2u/dx0^2{power: 1.0} + -5.728666441720111 * t{power: 1.0, dim: 0.0} + 0.0217427164968037 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 3.653320724646101 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + 1.1819791457877347 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.000830485192215008}}\n", + "Step = 1000 loss = 0.040852.\n", + "fitness error is 3.4783193768635297, while loss addition is 0.007884914986789227\n", "Multiobjective optimization : 1-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "1.8978464640071755 * t{power: 1.0, dim: 0.0} + 0.7885206016244524 * du/dx0{power: 1.0} + -4.344285340633521 * u{power: 1.0} + 0.5331758093219751 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999998578789744, dim: 0.0} + 0.11407661281402014 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.08603624931963601 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -0.20154886153524837 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0013713388246746725}}\n", + "fitness error is 1.2115072375183733, while loss addition is 6.154523362056352e-06\n", + "solving equation:\n", + "-0.4494504765321946 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999998450163115, dim: 0.0} + -0.7823315016615169 * u{power: 1.0} + -0.49276475200299863 * d^2u/dx0^2{power: 1.0} + -1.537956758530447 * u{power: 1.0} * sin{power: 1.0, freq: 2.000000320180692, dim: 0.0} + -0.16477303260951554 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.2697517327875207 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + 0.49988803201743753 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0013679714575261224}}\n", + "Step = 1000 loss = 0.000766.\n", + "fitness error is 3.0869200028034083, while loss addition is 0.0015128232771530747\n", + "solving equation:\n", + "-0.9687886224611026 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999997116937474, dim: 0.0} + 0.03682088166237806 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000002108013364, dim: 0.0} + -0.00922116190222014 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -3.9902118097196686 * u{power: 1.0} + 1.499628066073614 * t{power: 1.0, dim: 0.0} + 0.008373205429794531 * du/dx0{power: 1.0} * u{power: 1.0} + 0.07121318448337395 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0012036906369755512}}\n", + "Step = 1000 loss = 0.000934.\n", + "fitness error is 2.2439969202425845, while loss addition is 0.002781837945804\n", + "solving equation:\n", + "-0.7778652548442361 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 0.8912584885209403 * t{power: 1.0, dim: 0.0} + -0.41675323099317707 * du/dx0{power: 1.0} + -3.287507732600167 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.00000013677268, dim: 0.0} + 1.2101330922043 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 1.40329018203285 = d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0047889316577861725}}\n", + "Step = 1000 loss = 0.004043.\n", + "fitness error is 1.4890215727072977, while loss addition is 0.016163766384124756\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "0.09682890695124487 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 0.8025546651942528 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.999999839228841, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + -0.0923078237554494 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.6114501632148958 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + -0.16547531888225747 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.9506289472535694 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0047523112464458}}\n", + "Step = 1000 loss = 0.001055.\n", + "fitness error is 2.9467792186896813, while loss addition is 0.0010257740505039692\n", + "solving equation:\n", + "12.146943517611332 * t{power: 1.0, dim: 0.0} + -33.207621447667634 * u{power: 1.0} + -8.339380955176646 * d^2u/dx0^2{power: 1.0} + 0.032038790816267794 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -8.727732616134007 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999996156992814, dim: 0.0} + 0.5253607033614404 * du/dx0{power: 1.0} * u{power: 1.0} + 0.9421753480189913 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 5.059894543132286e-05}}\n", + "Step = 1000 loss = 0.053803.\n", + "fitness error is 5.098399656320402, while loss addition is 0.07270368933677673\n", + "solving equation:\n", + "7.693172010654292 * u{power: 1.0} + -0.5312302815099572 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.38288880332088854 * du/dx0{power: 1.0} + -3.177870679295475 * t{power: 1.0, dim: 0.0} + 0.059621803354922884 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 1.9449601141320612 * d^2u/dx0^2{power: 1.0} + -1.9744766415584154 = d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999999805077424, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005690113962285937}}\n", + "Step = 1000 loss = 0.006921.\n", + "fitness error is 2.076990155763728, while loss addition is 0.02377759851515293\n", + "solving equation:\n", + "5.189098131145329 * u{power: 1.0} + 1.3939962265597416 * d^2u/dx0^2{power: 1.0} + 0.27064431409865025 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.19056623817143606 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.7253692854364283 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.6837512322264829 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + -2.980895654865253 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0008707030470582973}}\n", + "Step = 1000 loss = 0.010121.\n", + "Step = 2000 loss = 0.009008.\n", + "fitness error is 14.70272361385181, while loss addition is 0.009507441893219948\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "0.4923788430504797 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999997646848908, dim: 0.0} + 0.8454743932744108 * t{power: 1.0, dim: 0.0} + 0.1977931908732724 * du/dx0{power: 1.0} * u{power: 1.0} + -0.3369028978793398 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0999413287925007 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000112995997, dim: 0.0} + 0.023428348155272682 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.7674724049432388 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004175287327549248}}\n", + "Step = 1000 loss = 0.000361.\n", + "Step = 2000 loss = 0.000553.\n", + "fitness error is 1.0260748025325706, while loss addition is 7.075176836224273e-05\n", + "solving equation:\n", + "-3.837613001402724 * u{power: 1.0} + 0.02369820808689438 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -1.8942308811133814 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + 0.9036819630005608 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999996139274525, dim: 0.0} + 1.5251323546256994 * t{power: 1.0, dim: 0.0} + -0.015898307549122716 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 1.3229517923809568 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0011822242066788814}}\n", + "Step = 1000 loss = 0.000674.\n", + "fitness error is 1.4966736319667517, while loss addition is 8.618889296485577e-06\n", + "solving equation:\n", + "-2.9767194976247677 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000254888724, dim: 0.0} + 3.0516714936985463 * du/dx0{power: 1.0} + 0.6233350922409261 * u{power: 1.0} + 2.546818724343718 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000004329786445, dim: 0.0} + -6.191625692971576 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.99999979960678, dim: 0.0} + 1.1758160439346879 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 2.5660513524153727 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00015663574789717694}}\n", + "Step = 1000 loss = 0.031217.\n", + "Step = 2000 loss = 0.059604.\n", + "fitness error is 2.184230634633652, while loss addition is 0.07814682275056839\n", + "solving equation:\n", + "0.11727714470825246 * du/dx0{power: 1.0} + 1.523882550620707 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.00393576881243699 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -3.750905729959632 * u{power: 1.0} + 0.443609672729384 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999996465664467, dim: 0.0} + 1.04393141203941 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005854920856540219}}\n", + "fitness error is 1.2708880314236073, while loss addition is 9.18058412935352e-06\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "2.105773241573329 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000002834763464, dim: 0.0} + 4.289188104783296 * du/dx0{power: 1.0} + 0.8721948155779293 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.5197358341602993 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -2.228619402342589 * u{power: 1.0} + 0.6688171429045976 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000003485375664, dim: 0.0} + 0.3368492219670672 = du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006061688372406351}}\n", + "Step = 1000 loss = 0.002828.\n", + "Step = 2000 loss = 0.003309.\n", + "fitness error is 10.21602714056106, while loss addition is 0.003108642529696226\n", + "solving equation:\n", + "-0.03787431549900731 * du/dx0{power: 1.0} + 1.453480708960131 * t{power: 1.0, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -3.908081033530488 * u{power: 1.0} + 0.025482526571223744 * du/dx0{power: 1.0} * u{power: 1.0} + -0.9783532810817748 * d^2u/dx0^2{power: 1.0} + 0.02548824407763965 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998471636156, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0035262143966587178}}\n", + "fitness error is 1.4813035476651675, while loss addition is 7.2924212872749195e-06\n", + "solving equation:\n", + "1.2790595361601642 * u{power: 1.0} + 1.811993183428446 * d^2u/dx0^2{power: 1.0} + 0.8105914053184785 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -3.0345480524863846 * u{power: 1.0} * cos{power: 1.0, freq: 1.999999688807925, dim: 0.0} + 1.3896605785759648 * du/dx0{power: 1.0} + 0.14667338041744424 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 1.0631229982992323 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0013572343368889186}}\n", + "Step = 1000 loss = 0.000259.\n", + "Step = 2000 loss = 0.003089.\n", + "fitness error is 6.083036415767325, while loss addition is 0.049870334565639496\n", + "solving equation:\n", + "1.215770780102745 * u{power: 1.0} + 1.3138261782612761 * d^2u/dx0^2{power: 1.0} + 0.9350526622850562 * du/dx0{power: 1.0} + 0.6923718403861888 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -4.459374768464559 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + -2.305988615379072 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997255203816, dim: 0.0} + 2.4746441555096808 = d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0017809855254236773}}\n", + "Step = 1000 loss = 0.000622.\n", + "fitness error is 3.5145888567572316, while loss addition is 0.006005854811519384\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "-0.21331080654450113 * du/dx0{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} + -0.48092651507694495 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 2.390766674269713 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000002257602776, dim: 0.0} + 1.2151437852602813 * d^2u/dx0^2{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000301333256, dim: 0.0} + 2.725913826729674 = d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0062908259576039535}}\n", + "Step = 1000 loss = 0.005984.\n", + "fitness error is 2.0979123691158654, while loss addition is 0.010404510423541069\n", + "solving equation:\n", + "0.3820618773726327 * du/dx0{power: 1.0} + 0.1601009189367539 * t{power: 1.0, dim: 0.0} + 0.7974278049945632 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.99999990183825, dim: 0.0} + -0.0903193218507012 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.5546964450214267 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 1.0410472157500739 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004091428058018362}}\n", + "Step = 1000 loss = 0.002353.\n", + "Step = 2000 loss = 0.000613.\n", + "fitness error is 5.3041180554783445, while loss addition is 0.0038391067646443844\n", + "solving equation:\n", + "7.334829150950631 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 2.3232915318149168 * d^2u/dx0^2{power: 1.0} + -2.0440220281822192 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000061561971, dim: 0.0} + -13.97497610789702 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.017392127647788753}}\n", + "Step = 1000 loss = 0.000963.\n", + "fitness error is 3.77231906239145, while loss addition is 0.00196690927259624\n", + "solving equation:\n", + "2.449128465951414 * t{power: 1.0, dim: 0.0} + 0.8874324690937683 * du/dx0{power: 1.0} + -5.781975324860738 * u{power: 1.0} + 0.3955974273422095 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + 0.04416277550384576 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.7327991274779098 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997127492546, dim: 0.0} + -0.6313837927155906 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0008542869387281683}}\n", + "Step = 1000 loss = 0.001919.\n", + "fitness error is 1.421012610086767, while loss addition is 0.00024982556351460516\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "-0.7601001643577879 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000003476617767, dim: 0.0} + -0.5543446961974006 * u{power: 1.0} + -2.0287806599720017 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + -0.2265967277112389 * t{power: 1.0, dim: 0.0} + 0.3806281108599728 * du/dx0{power: 1.0} * u{power: 1.0} + -1.092747609969355 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0010020894550411105}}\n", + "Step = 1000 loss = 0.000545.\n", + "Step = 2000 loss = 0.000507.\n", + "fitness error is 2.404446085976575, while loss addition is 0.002682725666090846\n", + "solving equation:\n", + "0.17830119711696224 * du/dx0{power: 1.0} + 0.6289748391501976 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.7539798283464584 * t{power: 1.0, dim: 0.0} + -6.836871952491815 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000002906006036, dim: 0.0} + 0.06393811599379871 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.26174174505548714 * u{power: 1.0} * du/dx0{power: 1.0} + 1.7345503999681444 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004860729001575576}}\n", + "Step = 1000 loss = 0.021007.\n", + "Step = 2000 loss = 0.001130.\n", + "fitness error is 4.80615168200843, while loss addition is 0.002698279218748212\n", + "solving equation:\n", + "0.04946818642145084 * t{power: 1.0, dim: 0.0} + -0.30689214306635737 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.7254156087076735 * d^2u/dx0^2{power: 1.0} + 2.0373307228528965 * u{power: 1.0} + 1.8369774332963527 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + 0.30445235578084845 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999999980275416, dim: 0.0} + -2.092749455024746 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.000898971317062309}}\n", + "fitness error is 1.527976562997835, while loss addition is 7.689585800108034e-06\n", + "solving equation:\n", + "-0.010007311999421948 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 1.4511197309710573 * t{power: 1.0, dim: 0.0} + -0.9760609337928273 * d^2u/dx0^2{power: 1.0} + -3.872987866371835 * u{power: 1.0} + 0.0116227138351733 * du/dx0{power: 1.0} + 0.005256946962252276 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -0.0072779768633095656 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.999999526209329, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005683852696069273}}\n", + "fitness error is 1.7340593743775312, while loss addition is 5.997173957439372e-06\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.08817038384075873 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + 0.01723804500041437 * u{power: 1.0} + -0.06248439338219003 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.09663696760562723 * u{power: 1.0} * du/dx0{power: 1.0} + 0.2598438585561368 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + 1.4056384134553233 * u{power: 1.0} * cos{power: 1.0, freq: 1.999999788064778, dim: 0.0} + -0.10495006577862644 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0003096326312548763}}\n", + "Step = 1000 loss = 0.001451.\n", + "fitness error is 2.2339817978535126, while loss addition is 0.0004419142205733806\n", + "solving equation:\n", + "0.2827781326672692 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -3.377629608997605 * t{power: 1.0, dim: 0.0} + 0.9213958947389522 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.11924144143240796 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 1.0143253531951586 * u{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 5.481767152262648 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004839539739976827}}\n", + "Step = 1000 loss = 0.000580.\n", + "fitness error is 3.8377067995179788, while loss addition is 0.003544933395460248\n", + "solving equation:\n", + "0.3285493264362154 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999998154531378, dim: 0.0} + 0.5351719063110927 * du/dx0{power: 1.0} + -0.16234882390112335 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.15953872096866595 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 0.43747685913321827 * d^2u/dx0^2{power: 1.0} + 0.8589868474523983 * u{power: 1.0} + -0.5775135209067954 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000003264246966, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00395922237169901}}\n", + "Step = 1000 loss = 0.000055.\n", + "fitness error is 2.1589674181505853, while loss addition is 8.293280188809149e-06\n", + "solving equation:\n", + "-2.3455183091782854 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000001104328136, dim: 0.0} + -0.09576205035526042 * du/dx0{power: 1.0} + -0.9729165461656646 * d^2u/dx0^2{power: 1.0} + 0.8968435980442311 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.19887685832252672 * u{power: 1.0} + 0.015129237226390315 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -1.6911866496002435 = d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0004860203403595218}}\n", + "Step = 1000 loss = 0.000327.\n", + "Step = 2000 loss = 0.004289.\n", + "fitness error is 3.6527134160673365, while loss addition is 0.0033078440465033054\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-0.5827857652120042 * t{power: 1.0, dim: 0.0} + 0.36432327729849745 * d^2u/dx0^2{power: 1.0} + -0.511461816329238 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000001823838702, dim: 0.0} + 0.3565322106603333 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 1.559666148857421 * du/dx0{power: 1.0} + 0.8340162769269718 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000003722137363, dim: 0.0} + -1.6541357423859566 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003939913165569571}}\n", + "Step = 1000 loss = 0.000651.\n", + "Step = 2000 loss = 0.003101.\n", + "fitness error is 3.623119469815269, while loss addition is 0.0023406464606523514\n", + "solving equation:\n", + "0.10939709487052095 * t{power: 1.0, dim: 0.0} + -0.9660251997651769 * u{power: 1.0} + 0.5251083106170451 * du/dx0{power: 1.0} + 0.6735222316237681 * du/dx0{power: 1.0} * u{power: 1.0} + -0.9161324579627949 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + -2.9143537729537865 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + 2.530725209206709 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002495178274877232}}\n", + "fitness error is 2.0017312905135265, while loss addition is 7.64220931159798e-06\n", + "solving equation:\n", + "-0.009972693912105573 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.00448461990571869 * u{power: 1.0} * du/dx0{power: 1.0} + -3.820006226027542 * u{power: 1.0} + -0.9524787012304935 * d^2u/dx0^2{power: 1.0} + 1.4286361202476345 * t{power: 1.0, dim: 0.0} + -0.022951399721563 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.000000447788768, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0011864706733477409}}\n", + "fitness error is 1.661296671704447, while loss addition is 9.244716238754336e-06\n", + "solving equation:\n", + "0.0 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.4424421526680569 * du/dx0{power: 1.0} + -0.006869049750744044 * t{power: 1.0, dim: 0.0} + 0.27529137798095743 * u{power: 1.0} + -0.052245462137656726 * du/dx0{power: 1.0} * u{power: 1.0} + 0.46548591860714295 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999849825895, dim: 0.0} + 1.029520740865443 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999995846282665, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005547490287492612}}\n", + "Step = 1000 loss = 0.000070.\n", + "fitness error is 1.2863691536940614, while loss addition is 9.949922969099134e-06\n", "Multiobjective optimization : 2-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", + "solving equation:\n", + "2.7498350778859524 * du/dx0{power: 1.0} + 3.224055900160561 * t{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + -0.9418751071868845 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -3.9658324398586733 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999997078252758, dim: 0.0} + 0.8976472392237774 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996385700907, dim: 0.0} + -3.7356402207156623 = du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.016374938553726792}}\n", + "Step = 1000 loss = 0.003747.\n", + "fitness error is 6.687707051017465, while loss addition is 0.005435504950582981\n", + "solving equation:\n", + "2.9998788165763655 * du/dx0{power: 1.0} + 0.23478501187798173 * t{power: 1.0, dim: 0.0} + 2.625006038544751 * u{power: 1.0} + 0.7821855742293224 * d^2u/dx0^2{power: 1.0} + -2.1092987195153166 * u{power: 1.0} * sin{power: 1.0, freq: 2.0000003305900997, dim: 0.0} + -0.15858306825885074 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -3.965241757335329 = du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00323739370649087}}\n", + "Step = 1000 loss = 0.000463.\n", + "fitness error is 6.018995213269232, while loss addition is 0.0010737928096204996\n", + "solving equation:\n", + "-1.187335824038408 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.000000060093103, dim: 0.0} + 2.3451212781305673 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.06312267379892497 * t{power: 1.0, dim: 0.0} + 0.17836533458265638 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.5321549834043553 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000003770934733, dim: 0.0} + -0.17753227484037368 = du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0022069977782633926}}\n", + "Step = 1000 loss = 0.000216.\n", + "Step = 2000 loss = 0.000095.\n", + "fitness error is 2.2065156286672054, while loss addition is 0.0003554362338036299\n", + "solving equation:\n", + "0.14149977489586643 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999996899243855, dim: 0.0} + -0.3202034884733938 * d^2u/dx0^2{power: 1.0} + -0.5517548476325119 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999999266588773, dim: 0.0} + -0.31013256651380233 * t{power: 1.0, dim: 0.0} + 0.06886451294622925 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.47215683406159664 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + 1.3453338999762423 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 2.319000220848814e-05}}\n", + "Step = 1000 loss = 0.005894.\n", + "Step = 2000 loss = 0.003582.\n", + "fitness error is 4.985400768600565, while loss addition is 0.007434476166963577\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "-0.025941684252380453 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.3847486624469948 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + -0.4841053850032254 * t{power: 1.0, dim: 0.0} + 0.46846754549943076 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.15021095561868664 * du/dx0{power: 1.0} + -3.3337122404893735 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + 3.2339565910465664 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.000411820172904111}}\n", + "Step = 1000 loss = 0.000934.\n", + "fitness error is 3.0241274122722337, while loss addition is 0.00012937499559484422\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} + 0.11727780774626949 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.210859166439677 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999995275587583, dim: 0.0} + 0.6457248409308829 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999997704468366, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02838174361535579}}\n", + "Step = 1000 loss = 0.016757.\n", + "Step = 2000 loss = 0.011211.\n", + "Step = 3000 loss = 0.013055.\n", + "fitness error is 13.79234720915771, while loss addition is 0.011635851114988327\n", + "solving equation:\n", + "0.6221930413893145 * du/dx0{power: 1.0} + -3.4820328479173828 * u{power: 1.0} + 0.48517405410859776 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.33759727263592937 * du/dx0{power: 1.0} * u{power: 1.0} + 0.13735548194329394 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 2.373898081356812 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002722022327740832}}\n", + "Step = 1000 loss = 0.012093.\n", + "Step = 2000 loss = 0.012675.\n", + "fitness error is 5.348660305387506, while loss addition is 0.007700991351157427\n", + "solving equation:\n", + "0.0 * t{power: 1.0, dim: 0.0} + -0.3634914201521433 * u{power: 1.0} + 1.8384506093024933 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999995846282665, dim: 0.0} + 0.6816580560457949 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000001321110035, dim: 0.0} + 0.2058175065818296 * d^2u/dx0^2{power: 1.0} + -0.7927128636415707 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999849825895, dim: 0.0} + -2.135714295795234 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003913764311915317}}\n", + "Step = 1000 loss = 0.000153.\n", + "fitness error is 1.4691553489562275, while loss addition is 7.870785339036956e-06\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "1.7323402199410702 * t{power: 1.0, dim: 0.0} + -4.75059773237802 * u{power: 1.0} + -1.199650272854585 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999998549352818, dim: 0.0} + 2.624133402574268 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + -0.7055599763523918 * du/dx0{power: 1.0} + -0.013196093601260787 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -2.614087518501041 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0002395023465059362}}\n", + "Step = 1000 loss = 0.000374.\n", + "fitness error is 3.6326508635578856, while loss addition is 0.026040375232696533\n", + "solving equation:\n", + "-0.08127429944659816 * t{power: 1.0, dim: 0.0} + 0.017650945295709597 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + 0.12139923019598467 * u{power: 1.0} + 0.5990350973007952 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000271232848, dim: 0.0} + -0.10842272845071448 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -1.3160347738023035 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997127492546, dim: 0.0} + 1.2020475753073312 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0003899405016932693}}\n", + "Step = 1000 loss = 0.004547.\n", + "fitness error is 3.850160960658751, while loss addition is 0.001543428166769445\n", + "solving equation:\n", + "-0.6810979969145601 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000000483916525, dim: 0.0} + 0.40910776157369577 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999999980275416, dim: 0.0} + -1.0633593502606706 * t{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999999232057692, dim: 0.0} + 0.14439469963781845 * d^2u/dx0^2{power: 1.0} + 2.0465987423101937 * u{power: 1.0} + 0.8030698709415968 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002637395863644038}}\n", + "Step = 1000 loss = 0.000710.\n", + "Step = 2000 loss = 0.001773.\n", + "fitness error is 2.1926215690966324, while loss addition is 0.0025793297681957483\n", + "solving equation:\n", + "-0.05563465970996921 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -1.0657698136114975 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000112995997, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} + 0.4065689057805203 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.24701147843546606 * du/dx0{power: 1.0} * u{power: 1.0} + 2.321171030396608 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.027962111495207945}}\n", + "Step = 1000 loss = 0.000884.\n", + "Step = 2000 loss = 0.001033.\n", + "fitness error is 2.4618778680997737, while loss addition is 0.00014989588817115873\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "0.9404744771016792 * t{power: 1.0, dim: 0.0} + -2.658239592313123 * u{power: 1.0} + 0.08211473206909321 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999995846282665, dim: 0.0} + 0.08915673666720458 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.12434900625335188 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999996000464901, dim: 0.0} + 0.6604946098375556 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006946011649327583}}\n", + "Step = 1000 loss = 0.083790.\n", + "Step = 2000 loss = 0.003073.\n", + "fitness error is 3.468365911409627, while loss addition is 0.0011288834502920508\n", + "solving equation:\n", + "-0.38548965930856066 * t{power: 1.0, dim: 0.0} + 0.06498641195642185 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -1.9551062351258373 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999998846902969, dim: 0.0} + 2.3473153594952727 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + 0.07876904277865979 * d^2u/dx0^2{power: 1.0} + 4.275231023316801 * u{power: 1.0} * sin{power: 1.0, freq: 1.999999763548462, dim: 0.0} + -1.783554018655267 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0011418234979572225}}\n", + "Step = 1000 loss = 0.001522.\n", + "fitness error is 0.8905223201022254, while loss addition is 0.0007218177197501063\n", + "solving equation:\n", + "0.0 * t{power: 1.0, dim: 0.0} + -0.04388592598469041 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.32724068652300964 * d^2u/dx0^2{power: 1.0} + 0.3869613038630843 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000112995997, dim: 0.0} + 0.6233773426467398 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.00000048246694, dim: 0.0} + 0.0 * u{power: 1.0} + 0.43156389895828406 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02985264522386271}}\n", + "Step = 1000 loss = 0.001088.\n", + "fitness error is 1.9849719243893662, while loss addition is 9.224326277035289e-06\n", + "solving equation:\n", + "0.19683620869982665 * t{power: 1.0, dim: 0.0} + -0.3267506346080111 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.7165965866472707 * d^2u/dx0^2{power: 1.0} + 0.833951946179605 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999999182925214, dim: 0.0} + -0.4081357909290395 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999995814631037, dim: 0.0} + 1.9116469566069243 * u{power: 1.0} + -1.9264089978649748 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0006393341418334604}}\n", + "Step = 1000 loss = 0.012721.\n", + "fitness error is 2.3227913040770267, while loss addition is 0.01141292043030262\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "-0.17697121241327612 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999996710571666, dim: 0.0} + -2.684075808946146 * u{power: 1.0} + 0.720241277646614 * du/dx0{power: 1.0} + 0.24137436622598113 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + -1.3491960490008472 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + 1.068321123921808 * t{power: 1.0, dim: 0.0} + 0.1635849602143189 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00031528425914882066}}\n", + "Step = 1000 loss = 0.001660.\n", + "fitness error is 2.282731357121816, while loss addition is 0.001987621420994401\n", + "solving equation:\n", + "-1.0891212048311039 * du/dx0{power: 1.0} + -0.39846851641729036 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + 0.45212936516128605 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + -2.5190374816727177 * u{power: 1.0} * sin{power: 1.0, freq: 2.0000003587971684, dim: 0.0} + -0.6639241766163285 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + 0.012165232202501089 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 1.2279277938462614 = d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.000000154152356, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0001776918858380096}}\n", + "fitness error is 2.2182017570084858, while loss addition is 8.509236067766324e-06\n", + "solving equation:\n", + "-3.235276540643523 * t{power: 1.0, dim: 0.0} + 5.201933283769383 * d^2u/dx0^2{power: 1.0} + -4.341070774299407 * du/dx0{power: 1.0} + 2.1551380609894077 * u{power: 1.0} * du/dx0{power: 1.0} + -1.8730540052718632 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 19.510477051010238 * u{power: 1.0} + -9.505192299247884 = u{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002489864600794304}}\n", + "Step = 1000 loss = 1.086857.\n", + "fitness error is 7.642521953884448, while loss addition is 1.9093413352966309\n", + "solving equation:\n", + "-0.3737238231110011 * t{power: 1.0, dim: 0.0} + -1.029044305090177 * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.9158976250246094 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 0.5680268974862891 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000112995997, dim: 0.0} + -2.568403135163201 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999608238559, dim: 0.0} + -0.7214824708747328 = du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.027004157847296338}}\n", + "Step = 1000 loss = 0.007128.\n", + "fitness error is 4.114044776211813, while loss addition is 0.0063855526968836784\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "-0.19818982431699145 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000296478369, dim: 0.0} + 0.0 * u{power: 1.0} + 1.031627049489758 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000002141992277, dim: 0.0} + -0.5140234673503367 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.07092502060590909 * du/dx0{power: 1.0} + 0.007472873270492455 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.033155739051069714 = d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000004417776056, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0018519023564962643}}\n", + "Step = 1000 loss = 0.000377.\n", + "Step = 2000 loss = 0.001215.\n", + "fitness error is 1.987421125715551, while loss addition is 0.00020611558284144849\n", + "solving equation:\n", + "-3.18524559548122 * u{power: 1.0} + -1.3919190937637789 * du/dx0{power: 1.0} + 0.1407574307494104 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -1.3153198385270382 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.999999553186753, dim: 0.0} + 0.0966022474919395 * t{power: 1.0, dim: 0.0} + -7.513345723491784 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004281605723, dim: 0.0} + -3.3191942421505067 = d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0021028348444138297}}\n", + "Step = 1000 loss = 0.040846.\n", + "fitness error is 8.383658686137824, while loss addition is 8.836170309223235e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 1.4179666720003519 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -3.5455565651083054 * u{power: 1.0} + 0.4596290629765684 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999996134725897, dim: 0.0} + -0.08565713624332735 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999996385700907, dim: 0.0} + 1.1924911670521539 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.016624029601797586}}\n", + "Step = 1000 loss = 0.004722.\n", + "fitness error is 2.1965593091696616, while loss addition is 0.006798572838306427\n", + "solving equation:\n", + "-0.621407246626848 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -4.2988730494228475 * t{power: 1.0, dim: 0.0} + 10.693667630997034 * u{power: 1.0} + 2.973932718022726 * d^2u/dx0^2{power: 1.0} + 0.022646619339366784 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -1.1125261470759513 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000002967400965, dim: 0.0} + -2.592419750821484 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003379516649781616}}\n", + "Step = 1000 loss = 0.007179.\n", + "fitness error is 2.645060459877798, while loss addition is 0.034959305077791214\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "-0.10018293644521399 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.18390899413231832 * d^2u/dx0^2{power: 1.0} + 0.8312410243722314 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.0000000247081706, dim: 0.0} + -0.5766769880951693 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + -1.3259791679063309 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + -0.06309856254103531 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 1.4495220221928775 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0013141699640828214}}\n", + "Step = 1000 loss = 0.000808.\n", + "Step = 2000 loss = 0.009760.\n", + "fitness error is 9.036736364925252, while loss addition is 0.0041170367039740086\n", + "solving equation:\n", + "-0.6141821601301154 * t{power: 1.0, dim: 0.0} + 0.06952186639211777 * d^2u/dx0^2{power: 1.0} + 0.18367623379803236 * u{power: 1.0} * du/dx0{power: 1.0} + 0.03288266754820393 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + 0.1354818483268833 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.7282001620379782 * u{power: 1.0} + 1.2089720883710835 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0006190553466603488}}\n", + "Step = 1000 loss = 0.001993.\n", + "fitness error is 5.066713126744537, while loss addition is 9.884795872494578e-06\n", + "solving equation:\n", + "1.2731277030831725 * du/dx0{power: 1.0} + 0.5195222673040657 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.2124642676114611 * u{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999995158100616, dim: 0.0} + -7.3754227263764305 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + 0.05043503659464181 * du/dx0{power: 1.0} * u{power: 1.0} + -0.6733266608612885 = u{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0009512351170187724}}\n", + "Step = 1000 loss = 0.020213.\n", + "fitness error is 2.746340857180312, while loss addition is 0.00723910890519619\n", + "solving equation:\n", + "0.12887828607602111 * du/dx0{power: 1.0} + 1.5234741180142213 * t{power: 1.0, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -3.739026583114698 * u{power: 1.0} + 0.4523041325503298 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000004904018502, dim: 0.0} + -0.042862939843628094 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000003659480967, dim: 0.0} + 1.0714668965715426 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002855858419425347}}\n", + "Step = 1000 loss = 0.001184.\n", + "fitness error is 1.951356714886067, while loss addition is 0.0017730711260810494\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "1.4899066636166822 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -3.964064491734732 * u{power: 1.0} + -0.0067970957948739665 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.000000431035131, dim: 0.0} + -1.0011891144654992 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998471636156, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + -0.0158441980780844 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003599804302340236}}\n", + "Step = 1000 loss = 0.000146.\n", + "fitness error is 1.6443338642866574, while loss addition is 7.298514901776798e-06\n", + "solving equation:\n", + "1.4418079861419508 * t{power: 1.0, dim: 0.0} + 0.5134479130183895 * du/dx0{power: 1.0} + -4.172605290308439 * u{power: 1.0} + 0.14196979888623806 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.09513702119621877 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.12758348528294172 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.38959623361877016 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0003470066285396656}}\n", + "Step = 1000 loss = 0.000023.\n", + "fitness error is 2.0941763057731526, while loss addition is 9.920119737216737e-06\n", + "solving equation:\n", + "-0.2330362595218965 * t{power: 1.0, dim: 0.0} + 0.2091904247265883 * du/dx0{power: 1.0} * u{power: 1.0} + 0.6375460083156494 * d^2u/dx0^2{power: 1.0} + 1.1735746485583813 * u{power: 1.0} * cos{power: 1.0, freq: 1.9999998333815485, dim: 0.0} + -0.26947943290784815 * u{power: 1.0} * t{power: 1.0, dim: 0.0} + 2.255817675692839 * u{power: 1.0} + -1.2369778613411944 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0017735213143435396}}\n", + "Step = 1000 loss = 0.002087.\n", + "fitness error is 3.302659724202644, while loss addition is 0.004163678269833326\n", + "solving equation:\n", + "0.9491687783934961 * t{power: 1.0, dim: 0.0} + 0.3392975253413595 * d^2u/dx0^2{power: 1.0} + 1.494775893440897 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999999468202594, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -2.3237835173776538 * u{power: 1.0} + 0.8570289577530461 * u{power: 1.0} * du/dx0{power: 1.0} + -2.2007142622843117 = d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999894138392, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00019256024210495396}}\n", + "Step = 1000 loss = 0.008699.\n", + "fitness error is 1.6507672116932524, while loss addition is 0.0009186765528284013\n", "Multiobjective optimization : 3-th epoch.\n", "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 4-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 5-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 6-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 7-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 8-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 9-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 10-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 11-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 12-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 13-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 14-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 15-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 16-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 17-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 18-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 19-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 20-th epoch.\n", - "During MO : processing 0-th weight.\n" + "solving equation:\n", + "-2.999042831017728 * du/dx0{power: 1.0} * u{power: 1.0} + 1.0710383697114583 * d^2u/dx0^2{power: 1.0} + 5.596347563127775 * du/dx0{power: 1.0} + 0.4426408042310938 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} + 1.1330846113868076 * u{power: 1.0} + -4.9135091412980065 = du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.015063454732520033}}\n", + "Step = 1000 loss = 0.036300.\n", + "Step = 2000 loss = 0.000288.\n", + "fitness error is 5.729538849740193, while loss addition is 0.0011933732312172651\n", + "solving equation:\n", + "0.421052380730633 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.08322908518370492 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 1.46812736694803 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + 0.16374920821868683 * d^2u/dx0^2{power: 1.0} + -0.7757284687885762 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000048118826, dim: 0.0} + -0.023630376521834473 * t{power: 1.0, dim: 0.0} + -2.4903516512616837 = d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999999077299973, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0004859169690900973}}\n", + "Step = 1000 loss = 0.000176.\n", + "fitness error is 3.8465294554083536, while loss addition is 9.586306987330317e-06\n", + "solving equation:\n", + "-0.11456664909066015 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -0.8236149599790481 * t{power: 1.0, dim: 0.0} + 1.704647158796211 * u{power: 1.0} + 0.15620777557899512 * d^2u/dx0^2{power: 1.0} + -0.7972133104172048 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + 0.142506500245818 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 1.1778211525607738 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0005592640471560996}}\n", + "Step = 1000 loss = 0.001944.\n", + "Step = 2000 loss = 0.052664.\n", + "fitness error is 2.127584821051608, while loss addition is 0.001265546539798379\n", + "solving equation:\n", + "-0.21374202022215388 * du/dx0{power: 1.0} + -0.11244429932774758 * t{power: 1.0, dim: 0.0} + -0.37207685626890624 * u{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} + -2.106767220101035 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998471636156, dim: 0.0} + 0.008388034690337673 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -2.3291146748750062 = d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999999462866673, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0034345959803861476}}\n", + "Step = 1000 loss = 0.000105.\n", + "fitness error is 1.2362588337718774, while loss addition is 0.011949693784117699\n", + "During MO : processing 1-th weight.\n", + "solving equation:\n", + "-0.3011239794038745 * t{power: 1.0, dim: 0.0} + 0.194787902048936 * d^2u/dx0^2{power: 1.0} + 0.16524550016618558 * u{power: 1.0} * du/dx0{power: 1.0} + 0.3874056024084903 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 2.0000003943579157, dim: 0.0} + 1.2639104271950725 * u{power: 1.0} + -0.07483947785415947 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.9331324876414214 = du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002572428241311516}}\n", + "Step = 1000 loss = 0.000043.\n", + "Step = 2000 loss = 0.000212.\n", + "fitness error is 3.3835968799601144, while loss addition is 0.00025158797507174313\n", + "solving equation:\n", + "1.9544864485260698 * du/dx0{power: 1.0} + 0.578048620495696 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -1.39086750637608 * d^2u/dx0^2{power: 1.0} + -3.828817260280972 * u{power: 1.0} + -4.198359289896555 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + -0.2121171690044779 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 1.7253177455967852 = d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999998896060969, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0005558675438906982}}\n", + "Step = 1000 loss = 0.009503.\n", + "fitness error is 3.1615403276236957, while loss addition is 0.0024076011031866074\n", + "solving equation:\n", + "0.0842289103142219 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.2571157265615504 * t{power: 1.0, dim: 0.0} + 0.48475315001885155 * du/dx0{power: 1.0} * u{power: 1.0} + -0.0699560971194956 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999997315090372, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} + 0.8984548431737787 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02821363499979169}}\n", + "Step = 1000 loss = 0.001168.\n", + "Step = 2000 loss = 0.000044.\n", + "fitness error is 5.663359250362306, while loss addition is 0.0013711602659896016\n", + "solving equation:\n", + "0.11438056801454469 * du/dx0{power: 1.0} + 1.524953933882853 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999995846282665, dim: 0.0} + -3.7507399536268964 * u{power: 1.0} + -0.019709850819626038 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999849825895, dim: 0.0} + 0.4508555513794651 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999999546800966, dim: 0.0} + 1.0205475340560148 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006676665701918895}}\n", + "fitness error is 1.292504654612506, while loss addition is 7.203732366178883e-06\n", + "During MO : processing 2-th weight.\n", + "solving equation:\n", + "1.195266540039882 * t{power: 1.0, dim: 0.0} + -0.41301002723299995 * d^2u/dx0^2{power: 1.0} + 0.32244438884741167 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999999980275416, dim: 0.0} + 1.5373534177701198 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + -1.1463014508649636 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000004770770934, dim: 0.0} + -3.3498240378947024 * u{power: 1.0} + -0.5812867434793757 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0003685818997836353}}\n", + "Step = 1000 loss = 0.000349.\n", + "fitness error is 1.737852522411991, while loss addition is 0.0001415482984157279\n", + "solving equation:\n", + "-3.3997788661224417 * t{power: 1.0, dim: 0.0} + 8.725983299009915 * u{power: 1.0} + -0.508730729845459 * du/dx0{power: 1.0} + 2.262946391343607 * d^2u/dx0^2{power: 1.0} + -0.07579073200270635 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.1391286668210761 * u{power: 1.0} * du/dx0{power: 1.0} + -2.4168880438948466 = d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999998670996337, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0015363088781690803}}\n", + "Step = 1000 loss = 0.001594.\n", + "fitness error is 3.0221421877006454, while loss addition is 0.003710658522322774\n", + "solving equation:\n", + "-0.6182888675838 * t{power: 1.0, dim: 0.0} + 2.221293228964115 * du/dx0{power: 1.0} + 0.5116941366743346 * d^2u/dx0^2{power: 1.0} + 0.48406631193745187 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 1.131591357296359 * du/dx0{power: 1.0} * u{power: 1.0} + -1.2349757839168645 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999999185109145, dim: 0.0} + -0.17325181830694888 = du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.028280580423791998}}\n", + "Step = 1000 loss = 0.000581.\n", + "fitness error is 6.9237182654419716, while loss addition is 0.0012985985958948731\n", + "solving equation:\n", + "-0.41265161949869544 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.9364801940294942 * t{power: 1.0, dim: 0.0} + -0.5875876066133855 * d^2u/dx0^2{power: 1.0} + 0.573341342788803 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.6871069160166258 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + -6.418140822837025 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997127492546, dim: 0.0} + 0.5184631552319121 = du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0022698087440382883}}\n", + "Step = 1000 loss = 0.013139.\n", + "fitness error is 3.291277038991713, while loss addition is 0.003070780076086521\n", + "During MO : processing 3-th weight.\n", + "solving equation:\n", + "1.5722531749556583 * t{power: 1.0, dim: 0.0} + 0.480257072012096 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999996208800932, dim: 0.0} + 0.11538118373694763 * du/dx0{power: 1.0} + -0.015798027172236318 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -3.8582086765236183 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 1.0516340320028084 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004385620043509257}}\n", + "fitness error is 1.275264246970528, while loss addition is 7.110092155926395e-06\n", + "solving equation:\n", + "0.20237518054348916 * t{power: 1.0, dim: 0.0} + -0.6346921910866566 * du/dx0{power: 1.0} * u{power: 1.0} + 1.265293750160836 * du/dx0{power: 1.0} + 0.3872223661225953 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000001350025194, dim: 0.0} + -0.274347021908405 * d^2u/dx0^2{power: 1.0} + 0.24890983301488231 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -2.1183191337754774 = d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.0000000469142845, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0022700549153133213}}\n", + "Step = 1000 loss = 0.002843.\n", + "Step = 2000 loss = 0.001876.\n", + "fitness error is 4.7995077376154045, while loss addition is 0.004395042546093464\n", + "solving equation:\n", + "0.04793494592245225 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.2799451101349 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -1.0931348903396219 * t{power: 1.0, dim: 0.0} + -0.30821383860225865 * d^2u/dx0^2{power: 1.0} + -0.3942583182267982 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.000000164347668, dim: 0.0} + -0.7401136955079444 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999995346810528, dim: 0.0} + 2.947053350525596 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0030469686750136684}}\n", + "Step = 1000 loss = 0.000604.\n", + "fitness error is 2.264139744944713, while loss addition is 0.0020833290182054043\n", + "solving equation:\n", + "-0.03288654697888663 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 0.08916842109971229 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.16925606215287603 * t{power: 1.0, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 2.000000236255651, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 0.49746621867404694 * du/dx0{power: 1.0} * u{power: 1.0} + 0.885971163505032 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.029322397602360276}}\n", + "Step = 1000 loss = 0.003245.\n", + "Step = 2000 loss = 0.000572.\n", + "fitness error is 6.766683697953267, while loss addition is 0.00298174936324358\n", + "During MO : processing 4-th weight.\n", + "solving equation:\n", + "0.2862398823590704 * t{power: 1.0, dim: 0.0} + -0.6419228344202788 * d^2u/dx0^2{power: 1.0} + 0.7078489669228775 * du/dx0{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000004846212955, dim: 0.0} + 0.23318550363699383 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000004751069107, dim: 0.0} + -2.2358896318675465 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997127492546, dim: 0.0} + 0.11945598732593093 = du/dx0{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0036932978505943917}}\n", + "fitness error is 2.621468758789186, while loss addition is 9.676155968918465e-06\n", + "solving equation:\n", + "1.916987238707764 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999996142329444, dim: 0.0} + 0.2459298601204578 * d^2u/dx0^2{power: 1.0} + -0.47672287358849214 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.08445821782655057 * du/dx0{power: 1.0} * u{power: 1.0} + -1.8171287176746491 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999997315090372, dim: 0.0} + 0.38311773581115793 = du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02858776663104846}}\n", + "Step = 1000 loss = 0.003353.\n", + "Step = 2000 loss = 0.004667.\n", + "fitness error is 2.0377494421008904, while loss addition is 0.00022436899598687887\n", + "solving equation:\n", + "1.2788182831091905 * du/dx0{power: 1.0} + 0.5624780989428501 * t{power: 1.0, dim: 0.0} + 0.3014422055159586 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -3.4679106009919285 * u{power: 1.0} + -2.025937433404775 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + 0.4314509235975629 * u{power: 1.0} * sin{power: 1.0, freq: 2.000000496860147, dim: 0.0} + 1.6941799388950793 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 6.899801761884184e-05}}\n", + "fitness error is 1.5738934273051708, while loss addition is 8.662251275382005e-06\n", + "solving equation:\n", + "14.902370729396724 * t{power: 1.0, dim: 0.0} + 4.665112328196182 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999997644464207, dim: 0.0} + 0.8121590921730483 * u{power: 1.0} * du/dx0{power: 1.0} + -5.560507930192908 * du/dx0{power: 1.0} + 2.509683976288756 * d^2u/dx0^2{power: 1.0} + -4.784863949637351 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -22.49689964990922 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0025581786634845958}}\n", + "Step = 1000 loss = 0.010852.\n", + "fitness error is 2.3867086446400227, while loss addition is 0.02583261951804161\n", + "During MO : processing 5-th weight.\n", + "solving equation:\n", + "-1.331751952575388 * u{power: 1.0} + -0.7106772706723111 * du/dx0{power: 1.0} + 0.5662413680527985 * du/dx0{power: 1.0} * u{power: 1.0} + 0.3163331250491388 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -0.9852562356267118 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.0000004751069107, dim: 0.0} + -0.37218166350427495 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998471636156, dim: 0.0} + 3.5078239913280593 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0037559449453366442}}\n", + "Step = 1000 loss = 0.000196.\n", + "fitness error is 4.092182547852194, while loss addition is 0.0028794724494218826\n", + "solving equation:\n", + "0.7242499017288763 * t{power: 1.0, dim: 0.0} + 1.2565154616760918 * d^2u/dx0^2{power: 1.0} + 1.967160444396128 * du/dx0{power: 1.0} + 0.0657209075435542 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.20581420441097867 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 4.212818948592087 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000285877669, dim: 0.0} + -5.326658811040809 = t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003720353457511248}}\n", + "Step = 1000 loss = 0.007004.\n", + "Step = 2000 loss = 0.010022.\n", + "fitness error is 5.092621827071974, while loss addition is 0.000636996584944427\n", + "solving equation:\n", + "22.414214557741598 * u{power: 1.0} + 0.0190685372019459 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 5.875429731054028 * d^2u/dx0^2{power: 1.0} + -2.581526079488901 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999995737459444, dim: 0.0} + -9.07179710952913 * t{power: 1.0, dim: 0.0} + -0.03471543616952582 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -6.037663777189366 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.001976738952625546}}\n", + "Step = 1000 loss = 0.061911.\n", + "fitness error is 4.927834700263175, while loss addition is 0.03427521884441376\n", + "solving equation:\n", + "-0.06561725212395249 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.2432333329877731 * t{power: 1.0, dim: 0.0} + -0.2211317508265811 * d^2u/dx0^2{power: 1.0} + -1.0307184263445208 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000450024561, dim: 0.0} + -0.7590196553086702 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000000816050254, dim: 0.0} + 0.5414440729445341 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000001429428447, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.016641755659909118}}\n", + "Step = 1000 loss = 0.000687.\n", + "fitness error is 2.3404613522348745, while loss addition is 0.0002987610932905227\n", + "During MO : processing 6-th weight.\n", + "solving equation:\n", + "0.0 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0} + -5.333610774379812 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000000869827925, dim: 0.0} + -2.482275895593486 * d^2u/dx0^2{power: 1.0} * cos{power: 1.0, freq: 1.9999995392877328, dim: 0.0} + 0.28573292099706604 * d^2u/dx0^2{power: 1.0} + -0.5562622702355199 * t{power: 1.0, dim: 0.0} + -6.019514739348394 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0067172404170881715}}\n", + "Step = 1000 loss = 0.012368.\n", + "fitness error is 1.22147037007329, while loss addition is 0.003890463151037693\n", + "solving equation:\n", + "-0.00627317638792714 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.5812398021698992 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000001931243037, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} + -0.2976662104716583 * u{power: 1.0} + 1.5356231748791798 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999995846282665, dim: 0.0} + 0.06925629690744162 * d^2u/dx0^2{power: 1.0} + -2.0935281952468503 = d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999849825895, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.001118686254680279}}\n", + "Step = 1000 loss = 0.000458.\n", + "Step = 2000 loss = 0.000842.\n", + "fitness error is 1.9219268796200484, while loss addition is 0.0010553320171311498\n", + "solving equation:\n", + "1.1579167313346388 * du/dx0{power: 1.0} + 1.9295183287964988 * t{power: 1.0, dim: 0.0} + -4.366721583992155 * u{power: 1.0} + 0.2221441284020523 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999998578789744, dim: 0.0} + 0.19463064024717852 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 1.3390441139436313 * u{power: 1.0} * sin{power: 1.0, freq: 2.0000004133995994, dim: 0.0} + -0.6354281611279635 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0010449017668881357}}\n", + "Step = 1000 loss = 0.000201.\n", + "fitness error is 2.3603735166176794, while loss addition is 0.0011686673387885094\n", + "solving equation:\n", + "-0.0032656122920383555 * t{power: 1.0, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} + -2.9358386036046036 * u{power: 1.0} + 0.8635047051760071 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 3.9595505876526453 * du/dx0{power: 1.0} + 0.0888961255378593 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -1.1299528614239334 = du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0017020534470675603}}\n", + "Step = 1000 loss = 0.051637.\n", + "fitness error is 10.906464049449546, while loss addition is 0.05276825651526451\n", + "During MO : processing 7-th weight.\n", + "solving equation:\n", + "-3.817719081216277 * u{power: 1.0} + 1.4302578521341467 * t{power: 1.0, dim: 0.0} + -0.9518712859682879 * d^2u/dx0^2{power: 1.0} + 0.008248062530848357 * du/dx0{power: 1.0} + -0.010947046287361407 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997127492546, dim: 0.0} + -0.03255999607292324 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.999999910913633, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00404085805746803}}\n", + "Step = 1000 loss = 0.001657.\n", + "fitness error is 1.8613983535442515, while loss addition is 9.833283911575563e-06\n", + "solving equation:\n", + "0.0 * du/dx0{power: 1.0} * t{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 4.116822766530647 * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.4690149009546897 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.9999997315090372, dim: 0.0} + -1.02414850750991 = d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.02797158550324969}}\n", + "Step = 1000 loss = 0.015328.\n", + "Step = 2000 loss = 0.041789.\n", + "fitness error is 6.114410878115872, while loss addition is 0.007449643686413765\n", + "solving equation:\n", + "5.028138299035254 * t{power: 1.0, dim: 0.0} + -4.650906492659681 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.0000001769399742, dim: 0.0} + -3.296240155931756 * du/dx0{power: 1.0} * u{power: 1.0} + -1.3586219451960664 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + 4.5361049463410525 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 1.9999996333868326, dim: 0.0} + -1.9898989905208484 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999998471636156, dim: 0.0} + -12.644229044260477 = t{power: 1.0, dim: 0.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 7}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.7, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003251968550852142}}\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[17], line 5\u001b[0m\n\u001b[1;32m 2\u001b[0m t_train \u001b[38;5;241m=\u001b[39m t[:t_max]; t_test \u001b[38;5;241m=\u001b[39m t[t_max:] \n\u001b[1;32m 3\u001b[0m x_train \u001b[38;5;241m=\u001b[39m solution[:t_max, \u001b[38;5;241m0\u001b[39m]; x_test \u001b[38;5;241m=\u001b[39m solution[t_max:, \u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m----> 5\u001b[0m epde_search_obj \u001b[38;5;241m=\u001b[39m \u001b[43mepde_discovery\u001b[49m\u001b[43m(\u001b[49m\u001b[43mt_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mx_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mpoly\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n", + "Cell \u001b[0;32mIn[15], line 39\u001b[0m, in \u001b[0;36mepde_discovery\u001b[0;34m(t, x, diff_mode, use_pretrained_nn)\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 37\u001b[0m pretrained_nn \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m---> 39\u001b[0m \u001b[43mepde_search_obj\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvariable_names\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mu\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_deriv_order\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 40\u001b[0m \u001b[43m \u001b[49m\u001b[43mequation_terms_max_number\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m7\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata_fun_pow\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 41\u001b[0m \u001b[43m \u001b[49m\u001b[43madditional_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mtrig_tokens\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcustom_grid_tokens\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 42\u001b[0m \u001b[43m \u001b[49m\u001b[43mequation_factors_max_number\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfactors_max_number\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 43\u001b[0m \u001b[43m \u001b[49m\u001b[43meq_sparsity_interval\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m1e-5\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1e-1\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata_nn\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mpretrained_nn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 45\u001b[0m epde_search_obj\u001b[38;5;241m.\u001b[39mequations(only_print \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m, num \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m)\n\u001b[1;32m 47\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m'''\u001b[39;00m\n\u001b[1;32m 48\u001b[0m \u001b[38;5;124;03mHaving insight about the initial ODE structure, we are extracting the equation with complexity of 5\u001b[39;00m\n\u001b[1;32m 49\u001b[0m \u001b[38;5;124;03m\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;124;03mIn other cases, you should call epde_search_obj.equations(only_print = True), or epde_search_obj.\u001b[39;00m\n\u001b[1;32m 51\u001b[0m \u001b[38;5;124;03mwhere the algorithm presents Pareto frontier of optimal equations.\u001b[39;00m\n\u001b[1;32m 52\u001b[0m \u001b[38;5;124;03m'''\u001b[39;00m\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/interface/interface.py:826\u001b[0m, in \u001b[0;36mEpdeSearch.fit\u001b[0;34m(self, data, equation_terms_max_number, equation_factors_max_number, variable_names, eq_sparsity_interval, derivs, max_deriv_order, additional_tokens, data_fun_pow, deriv_fun_pow, optimizer, pool, population, data_nn)\u001b[0m\n\u001b[1;32m 823\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 824\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moptimizer \u001b[38;5;241m=\u001b[39m optimizer\n\u001b[0;32m--> 826\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43moptimize\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43moptimizer_exec_params\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 828\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mThe optimization has been conducted.\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m 829\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msearch_conducted \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/optimizers/moeadd/moeadd.py:465\u001b[0m, in \u001b[0;36mMOEADDOptimizer.optimize\u001b[0;34m(self, epochs)\u001b[0m\n\u001b[1;32m 463\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDuring MO : processing \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mweight_idx\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m-th weight.\u001b[39m\u001b[38;5;124m'\u001b[39m) \n\u001b[1;32m 464\u001b[0m sp_kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mform_processer_args(weight_idx)\n\u001b[0;32m--> 465\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msector_processer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpopulation_subset\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpareto_levels\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 466\u001b[0m \u001b[43m \u001b[49m\u001b[43mEA_kwargs\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43msp_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 467\u001b[0m stats \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpareto_levels\u001b[38;5;241m.\u001b[39mget_stats()\n\u001b[1;32m 468\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_hist\u001b[38;5;241m.\u001b[39mappend(stats)\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/optimizers/moeadd/strategy_elems.py:19\u001b[0m, in \u001b[0;36mMOEADDSectorProcesser.run\u001b[0;34m(self, population_subset, EA_kwargs)\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrun\u001b[39m(\u001b[38;5;28mself\u001b[39m, population_subset, EA_kwargs : \u001b[38;5;28mdict\u001b[39m):\n\u001b[0;32m---> 19\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinked_blocks\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtraversal\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpopulation_subset\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mEA_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 20\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlinked_blocks\u001b[38;5;241m.\u001b[39moutput\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/optimizers/blocks.py:201\u001b[0m, in \u001b[0;36mLinkedBlocks.traversal\u001b[0;34m(self, input_obj, EA_kwargs)\u001b[0m\n\u001b[1;32m 199\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m vertex \u001b[38;5;129;01min\u001b[39;00m processed:\n\u001b[1;32m 200\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m vertex\u001b[38;5;241m.\u001b[39mavailable:\n\u001b[0;32m--> 201\u001b[0m \u001b[43mvertex\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mEA_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 202\u001b[0m processed_new\u001b[38;5;241m.\u001b[39mextend(vertex\u001b[38;5;241m.\u001b[39m_outgoing)\n\u001b[1;32m 203\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m vertex\u001b[38;5;241m.\u001b[39mterminal:\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/optimizers/blocks.py:128\u001b[0m, in \u001b[0;36mEvolutionaryBlock.apply\u001b[0;34m(self, EA_kwargs)\u001b[0m\n\u001b[1;32m 126\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcheck_integrity()\n\u001b[1;32m 127\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m {kwarg_key : EA_kwargs[kwarg_key] \u001b[38;5;28;01mfor\u001b[39;00m kwarg_key \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39marg_keys}\n\u001b[0;32m--> 128\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moutput \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_operator\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcombinator\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[43mblock\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43moutput\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mblock\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_incoming\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 129\u001b[0m \u001b[43m \u001b[49m\u001b[43marguments\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 130\u001b[0m \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39mapply()\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/operators/multiobjective/moeadd_specific.py:358\u001b[0m, in \u001b[0;36mOffspringUpdater.apply\u001b[0;34m(self, objective, arguments)\u001b[0m\n\u001b[1;32m 354\u001b[0m temp_offspring \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msuboperators[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mchromosome_mutation\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(objective \u001b[38;5;241m=\u001b[39m offspring,\n\u001b[1;32m 355\u001b[0m arguments \u001b[38;5;241m=\u001b[39m subop_args[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mchromosome_mutation\u001b[39m\u001b[38;5;124m'\u001b[39m])\n\u001b[1;32m 356\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msuboperators[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mright_part_selector\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(objective \u001b[38;5;241m=\u001b[39m temp_offspring,\n\u001b[1;32m 357\u001b[0m arguments \u001b[38;5;241m=\u001b[39m subop_args[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mright_part_selector\u001b[39m\u001b[38;5;124m'\u001b[39m]) \n\u001b[0;32m--> 358\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msuboperators\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mchromosome_fitness\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mobjective\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mtemp_offspring\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 359\u001b[0m \u001b[43m \u001b[49m\u001b[43marguments\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43msubop_args\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mchromosome_fitness\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 361\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mall\u001b[39m([temp_offspring \u001b[38;5;241m!=\u001b[39m solution \u001b[38;5;28;01mfor\u001b[39;00m solution \u001b[38;5;129;01min\u001b[39;00m objective\u001b[38;5;241m.\u001b[39mpopulation]):\n\u001b[1;32m 362\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msuboperators[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mpareto_level_updater\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(objective \u001b[38;5;241m=\u001b[39m (temp_offspring, objective),\n\u001b[1;32m 363\u001b[0m arguments \u001b[38;5;241m=\u001b[39m subop_args[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mpareto_level_updater\u001b[39m\u001b[38;5;124m'\u001b[39m])\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/operators/utils/operator_mappers.py:27\u001b[0m, in \u001b[0;36mOperatorCondition.apply\u001b[0;34m(self, objective, arguments)\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcondition(objective):\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124minplace\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moperator_tags:\n\u001b[0;32m---> 27\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_conditioned_operator\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mobjective\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43marguments\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 28\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mstandard\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moperator_tags:\n\u001b[1;32m 29\u001b[0m objective \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_conditioned_operator\u001b[38;5;241m.\u001b[39mapply(objective, arguments)\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/operators/common/fitness.py:135\u001b[0m, in \u001b[0;36mSolverBasedFitness.apply\u001b[0;34m(self, objective, arguments)\u001b[0m\n\u001b[1;32m 132\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124msolving equation:\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m 133\u001b[0m \u001b[38;5;28mprint\u001b[39m(objective\u001b[38;5;241m.\u001b[39mtext_form)\n\u001b[0;32m--> 135\u001b[0m loss_add, solution \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msolve_epde_system\u001b[49m\u001b[43m(\u001b[49m\u001b[43msystem\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mobjective\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgrids\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 136\u001b[0m \u001b[43m \u001b[49m\u001b[43mboundary_conditions\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[1;32m 138\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mg_fun_vals \u001b[38;5;241m=\u001b[39m global_var\u001b[38;5;241m.\u001b[39mgrid_cache\u001b[38;5;241m.\u001b[39mg_func\n\u001b[1;32m 140\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m eq_idx, eq \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(objective\u001b[38;5;241m.\u001b[39mvals):\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/interface/solver_integration.py:737\u001b[0m, in \u001b[0;36mSolverAdapter.solve_epde_system\u001b[0;34m(self, system, grids, boundary_conditions, mode, data, use_cache, use_fourier, fourier_params, use_adaptive_lambdas)\u001b[0m\n\u001b[1;32m 734\u001b[0m \u001b[38;5;66;03m# print(f'Before grid creation {grids}')\u001b[39;00m\n\u001b[1;32m 735\u001b[0m domain \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcreate_domain(grid_var_keys, grids)\n\u001b[0;32m--> 737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msolve\u001b[49m\u001b[43m(\u001b[49m\u001b[43mequations\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mform\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mform\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43msystem_solver_forms\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdomain\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mdomain\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 738\u001b[0m \u001b[43m \u001b[49m\u001b[43mboundary_conditions\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbconds_combined\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 739\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_fourier\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43muse_fourier\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfourier_params\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mfourier_params\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 740\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_adaptive_lambdas\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43muse_adaptive_lambdas\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/interface/solver_integration.py:769\u001b[0m, in \u001b[0;36mSolverAdapter.solve\u001b[0;34m(self, equations, domain, boundary_conditions, mode, epochs, use_cache, use_fourier, fourier_params, use_adaptive_lambdas)\u001b[0m\n\u001b[1;32m 766\u001b[0m model \u001b[38;5;241m=\u001b[39m Model(net \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mnet, domain \u001b[38;5;241m=\u001b[39m domain, equation \u001b[38;5;241m=\u001b[39m equations_prepared, \n\u001b[1;32m 767\u001b[0m conditions \u001b[38;5;241m=\u001b[39m boundary_conditions)\n\u001b[1;32m 768\u001b[0m model\u001b[38;5;241m.\u001b[39mcompile(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiling_params)\n\u001b[0;32m--> 769\u001b[0m loss \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain\u001b[49m\u001b[43m(\u001b[49m\u001b[43moptimizer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_training_params\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 771\u001b[0m grid \u001b[38;5;241m=\u001b[39m domain\u001b[38;5;241m.\u001b[39mbuild(mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode)\n\u001b[1;32m 772\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mnet \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mnet\u001b[38;5;241m.\u001b[39mto(device \u001b[38;5;241m=\u001b[39m device_type())\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/solver/model.py:162\u001b[0m, in \u001b[0;36mModel.train\u001b[0;34m(self, optimizer, epochs, info_string_every, mixed_precision, save_model, model_name, callbacks)\u001b[0m\n\u001b[1;32m 160\u001b[0m closure()\n\u001b[1;32m 161\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 162\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43mclosure\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 163\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m optimizer\u001b[38;5;241m.\u001b[39mgamma \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mt \u001b[38;5;241m%\u001b[39m optimizer\u001b[38;5;241m.\u001b[39mdecay_every \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m 164\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mscheduler\u001b[38;5;241m.\u001b[39mstep()\n", + "File \u001b[0;32m~/Documents/EPDE/.venv/lib/python3.12/site-packages/torch/optim/optimizer.py:391\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 386\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 387\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[1;32m 388\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 389\u001b[0m )\n\u001b[0;32m--> 391\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 392\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[1;32m 394\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[0;32m~/Documents/EPDE/.venv/lib/python3.12/site-packages/torch/optim/optimizer.py:76\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 74\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m'\u001b[39m])\n\u001b[1;32m 75\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[0;32m---> 76\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 77\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 78\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[0;32m~/Documents/EPDE/.venv/lib/python3.12/site-packages/torch/optim/adam.py:148\u001b[0m, in \u001b[0;36mAdam.step\u001b[0;34m(self, closure)\u001b[0m\n\u001b[1;32m 146\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m closure \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 147\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39menable_grad():\n\u001b[0;32m--> 148\u001b[0m loss \u001b[38;5;241m=\u001b[39m \u001b[43mclosure\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 150\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m group \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mparam_groups:\n\u001b[1;32m 151\u001b[0m params_with_grad \u001b[38;5;241m=\u001b[39m []\n", + "File \u001b[0;32m~/Documents/EPDE/examples/../epde/solver/optimizers/closure.py:61\u001b[0m, in \u001b[0;36mClosure._closure\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 59\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaler\u001b[38;5;241m.\u001b[39mupdate()\n\u001b[1;32m 60\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m---> 61\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 63\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmodel\u001b[38;5;241m.\u001b[39mcur_loss \u001b[38;5;241m=\u001b[39m loss_normalized \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mnormalized_loss_stop \u001b[38;5;28;01melse\u001b[39;00m loss\n\u001b[1;32m 65\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[0;32m~/Documents/EPDE/.venv/lib/python3.12/site-packages/torch/_tensor.py:525\u001b[0m, in \u001b[0;36mTensor.backward\u001b[0;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[1;32m 515\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m 516\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[1;32m 517\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[1;32m 518\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 523\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[1;32m 524\u001b[0m )\n\u001b[0;32m--> 525\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 526\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[1;32m 527\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/EPDE/.venv/lib/python3.12/site-packages/torch/autograd/__init__.py:267\u001b[0m, in \u001b[0;36mbackward\u001b[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[1;32m 262\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[1;32m 264\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[1;32m 265\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[1;32m 266\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[0;32m--> 267\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 268\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 269\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 270\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 271\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 272\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 273\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 274\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 275\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/EPDE/.venv/lib/python3.12/site-packages/torch/autograd/graph.py:744\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[0;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[1;32m 742\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[1;32m 743\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 744\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[1;32m 745\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[1;32m 746\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[1;32m 747\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 748\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "t_max = 160\n", + "t_train = t[:t_max]; t_test = t[t_max:] \n", + "x_train = solution[:t_max, 0]; x_test = solution[t_max:, 0]\n", + "\n", + "epde_search_obj = epde_discovery(t_train, x_train, 'poly')" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "d2d363bf", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} \\cdot cos^{1.0}(2.0 x_{0.0}) = -2.182\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + 1.285\\cdot 10^{-1} \\frac{\\partial ^2u}{\\partial x_0^2} + -2.431\\cdot 10^{-1} t + -2.018\\frac{\\partial u}{\\partial x_0} \\cdot sin^{1.0}(2.0 x_{0.0}) + -2.368 \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = 1.921\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + 1.646t + -4.162u + 4.317\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = 8.346\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + -2.665u + 1.301\\cdot 10^{-2} t \\cdot sin^{1.0}(2.0 x_{0.0}) + 1.077t + -1.361u \\cdot cos^{1.0}(2.0 x_{0.0}) + 5.495\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = 1.525\\cdot 10^{-1} \\frac{\\partial ^2u}{\\partial x_0^2} \\cdot u + -4.877\\cdot 10^{-2} t + 1.868u \\cdot cos^{1.0}(2.0 x_{0.0}) + 2.103\\cdot 10^{-1} \\frac{\\partial ^2u}{\\partial x_0^2} + 1.821\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = 7.183\\cdot 10^{-1} u \\cdot cos^{1.0}(2.0 x_{0.0}) + 1.543u + -7.668\\cdot 10^{-1} t + 2.597\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} \\cdot u + 6.02\\cdot 10^{-1} \\frac{\\partial ^2u}{\\partial x_0^2} + 2.753\\cdot 10^{-1} \\end{eqnarray*}$\n" ] }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABAIAAAGyCAYAAAB+0mZnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAB9B0lEQVR4nO3dT4wj6X3f/0/veH4Lr6LpanaMQNIurKm2kGNsdk9OlmB4igqwhsbImuwGAvgULYnkIiNAWGpfYh2CXjI+WJdEZCenBSx3s7QQ1vACWtbahqVLMCStnBXWyJBWimENu7qFFbw72OXvQFcti3+L/5r/3i9gsNvFIvnwqaeeeuqp5/k+O+12uy0AAAAAALAVnlt2AgAAAAAAwO2hIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL/NKyEzCLjz76SD/5yU/0yU9+Ujs7O8tODgAAiKHdbuvnP/+5Pv3pT+u553gmAQDAbVvrjoCf/OQneumll5adDAAAMIUf/ehHevHFF5edDAAAts5adwR88pOflNRpSNy7d2/gPs+ePdPbb7+tL37xi7p79+5tJm9rkMeLRf4uHnm8eOTx4q1THt/c3Oill14Kr+MAAOB2rXVHQDAd4N69eyM7Al544QXdu3dv5RtG64o8Xizyd/HI48UjjxdvHfOYaX0AACwHE/MAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL0BEAAAAAAMAWoSMAAAAAAIAtQkcAAAAAAABbhI4AAAAAAAC2CB0BAAAAAABsEToCAAAAAADYInQEAAAAAACwRegIAAAAAABgi9ARAAAAAADAFqEjAAAAAACALUJHAAAAAAAAW4SOAAAAAAAAtggdAQAAAAAAbBE6AgAAAAAA2CJ0BAAAAAAAsEXoCAAAAAAAYIvQEQAAAAAAwBahIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL0BEAAAAAAMAWoSMAAAAAAIAtQkcAAAAAAABbhI4AAAAAAAC2yE673W4vOxHTurm50e7urq6vr3Xv3r2B+zx79kxvvfWWXn75Zd29e/eWU7hafv/3f1/X19dz/9x2u6333ntPn/jEJ7SzszP3z9925O/ikceLRx4v3jrl8UcffaQf/OAH+tznPqfnnuOZBABguN3dXb3++uvLTsbG+aVlJwC35/r6Wm+++eaykwEAAAAAsTx69GjZSdhIS+8IKJfL8n1fhmGo2Wzq9PRUhmEsO1kAAAAAAGykpXYEFItFZbPZ8Mbf9329+uqrqlQqy0wWAAAAAAAba6kT86rVauTpv2EY8n1/aekBAAAAAGDTLXVEgGEYSqVSqlQqMgxDnufJNM1lJgkjOI6jVquler2uTCYjy7KWnSQAwIJR9wMAsHmW2hFwfn6uw8ND7e3tKZ/P6+DgQKVSaej+77//vt5///3w75ubG0mdlQGePXs28D3B9mGvb5M4C0T4vi/XdZVMJiOdMo1GQ5KUzWbl+77u37+vq6urhaUVwHIMqwOw2aj7gdtHfQtgmZY+IsC2bVWrVRWLRVmWpePj46HBAs/OzvS1r32tb/vbb7+tF154YeR3VavVeSR5rb333nsjX3ddV77vy7Is1Wo1FQqFsGOm1WqpWq0qnU7LMAwlEgk1Gg0lk8nbSDqAWzCqDsDmou4Hbh/1LYBl22nHeUy8ILZtK5VKybIseZ6nTCYj3/fVbDYH7j9oRMBLL72kn/3sZ7p3797A9zx79kzValWpVEp3795dyO9YF6+88or+/M//fOBrvu/r7OxMhUIh3OY4jh4/fhzZFtjb2+OpELBBJq0DsBmo+4HbR30LTObRo0csgb4ASwsW6Hle2BMqSaZpql6vyzAMOY4z8D3PP/+87t27F/knSXfv3h35L84+2/BvZ2dn6PFwXVeu60a2WZalcrnct28ul9P5+flUxx3AapqkDsDmoO4Hbh/1LYBVsNSOgEFTAHK53O0nBkqn06rX65FtwSoO3Ss5OI6jVCqldDp9yykEsEhx6wBsFup+4PZR3wJYBUuLEWBZlgqFgnzfj3QI1Ot15kgtSblcViKRkNSZFxqM1mi1WjIMQ67ryjAMWZalRqMhwzBkmmY4z63VaimbzUrqDHt7+PBh34UOwOoaVQfUajXO8w1F3Q/cPupbAMu21GCBlUpFZ2dn2t/fD3tCmRu1HKlUSoVCIRIAKhidYZpmGMMh4Pu+2u22PM9TIpGQaZpKpVLhBct13fACB2D1jaoDJHGebyjqfuD2Ud8CWAVLXzWAG//ls21byWSyLwp0rVaLxHAYFCDK8zxZlqVisRh5fxCgEcDqG1cHcJ5vJup+4PZR3wJYFUuLEYDVUSwWdXJy0rfd87yxMRuCxuLFxUVk31qtxvJSwJoYVwdwnm8m6n7g9lHfAlgVS10+cFY3Nzfa3d3V9fX1yOUD33rrLb388stbv3zgoKU3XNdVKpVSbzFoNBo6PDzs2z6I7/va29uL7LuzsxPrvcvk+34YoTefzw/cJ1jBotVqyTTN8AINrLI4ZTsQtw5Y1/N8W40rA9tc92O0SeoPTGbb6lvaWZgXlg9cDEYEQKZp9m07OzuLHbTR87zIZzQajfDv3uVxVonrunr69OnQ1z3PU7VaVTqdVjabZRoL1sa4st0rTh2wruf5topTBra17sdok9YfmMw21be0s4DVRkfAlrMsS61WK7It6J0NAtSMYxhGZOWHi4uLcPia53nzSegCpNNpHRwcDH09iJQdCKJnD7PuS/6se/rxsXFlu1vcOmBdz/NtNa4MbHPdj9EmqT8wmW2rb+fZzlr3Nsq6px+baanBArEa6vW6bNvW/v6+pE5FXKlUYr/fNE0dHR2pWCzKNE2dnJzo7OxM5XI5doNyFTWbzTBPpE4U32EVuW3ba9+TXS6XlU6nBz6twGaLUwds6nm+zaj7Ma3eJ9aIbx3r26Dt033TPg9x21m0sYDFoCMAMk1z5gq2dyjpJI3JddLbky91KvfewFrFYlFS5yIn9efPIMViMbzI+r5/63Mz8/m8crlc7GHByzJN3vq+r8vLS1UqFVWr1b7Xy+WyfN+XYRhqNps6PT2NNHim+c5Z0nPbZSFuHbAq5/m4/Btk3DEcVQZc11WpVFIqlZJpmqpWq3rw4IHS6fTCfsNtlIFtrPtnPZdTqVTkeAXHMfhMz/N0fn4eqT+mKa+jrELZOTg4WMn56ou49s56/ei1bvWtJL366qs6OTmZqc6Lq7edRRvrdkyap3HqPqnTiROMCkkkEpEyNOm5g/mjIwAY4uDgINIzHQSy6eZ5nur1eqSHvrfnOpfL9TUeewUVcPd6wcu4YGQyGRWLxZUNEDVN3jYaDdVqNfm+P7Ajp1gsKpvNRhoIr776atjomuY7R4mTHml4WSgWiwPnXO7v76/scRtnkqeL4/JvkHHHcFwZ8H1fruvKcRyZpinbtmdqEFMGBlv0U+ZZz2XHcfqGLdu2Ldu2w3TncjllMpnwM6cpr6OsQtnpnq8+iVU8vuPya9brx6ZY1O8Z186ijdWxiufOuLrP9309fPhQ77zzjgzD6AuIuS3nzsprr7Hr6+u2pPb19fXQfT744IP2t7/97fYHH3xwiylbTV/60peWnYSVUyqV2oVCIbLt6uqq3W63281ms51Op8PtyWSy7/35fL7dbDYj77UsK/yMdrvdrtfrbUmR/XoZhhF5T7vdbi/r9Bz0O1fBtHkbqFQqA3+bZVlDt836ndOkZ15lYVDZXlXZbHbi9wzLv15xjuGoMhB8V+8xmQfKQNQ05SCuWc/lq6urdqlU6jsOlmVF8rhQKLQNw+h7f9zyGtcyy06hUJjqWK3i8Y2bX9NcP9AxSzuLNlbHKp474+q+bDbbd9yr1Wrk/YM+cxjuYRaDYIFYC8ViMXwaEwTW6X6tXC6H/7r5vh95b/fTHNd1Va1WVa1WI595eHgo3/fDeXmO46hcLuv09LQvXa7r9vXS1mq1SDCf4PVh8QU8zwuHRg36/IDjOHIcR7lcTp7nqVwuy7btuQcOMk1TjUZjrp85L5PmbRyGYSiVSoWf0dvzvojvHCZuWRhnWNneVuOO4bgycJsoA4szy7l8eXmp4+Pjvu3VajXydO/x48dLW/5s0WXHdV0Vi0WdnZ1J6lx7Vylw3aKuvaOsUt0xrXHtpKAN1K27/RHsY9t2336ztrNoY92OaerGcXVfEBMhKCOSIq9vwrmzCZgagJUWDC2qVCoyTVOu6+rs7CwclptKpVQqlcLK4/DwUEdHR2F03e6hS57nybbtsCKyLGtggy2YHyVp5PBfz/OUSCQi2wzD0NXVVWRbUAEOq+CGXWQMwwgryHK5rOPjYxmGoWq1Ktu2ValUlEqlph6mOUwqlZLrumEeropp8jaO8/NzHR4eam9vT/l8XgcHB+FwwUV95zBxykIcw8r2NopzDEeVgcDl5aUSiYRarZaazebCAldRBhZjlnPZdd1Yeek4jnzfX9rQ1kWXnWB7MIx4lebyLuraO06cumPVDWsnBW0c0zSVSqXCYfWNRkOWZcn3fWUymbB9lk6ntbe3FxnGP0s7izbW7ZhHO6e37gvyPMg70zTDqQNBediEc2cT0BGAlWbbtk5OTsLK6OjoKDJ3O5lMRiqqo6OjSAVbq9XCRotpmnNtoAW92eME6wNP2mgKbjqC/w/e73leGDhnHoGnBn1v90V6kN7APcMcHh4uNMLxtHnbzTAM2batarWqYrEoy7LCBsGivnNS3WUBs+s9huPKQFCfBOd7uVwOG8C3hTIwf3HP5aCuH3ZzGATNCm6MVukGWZpv2Qka9qv2GweZx7V3nEmvH6toWDvJ8zxZlqVyuRy5aW21Wkomk2o2m7Isa2Gj5La5jbVsk9SNg+q+oCPAMIyw7BQKBd2/fz/sdNiEc2cT0BGw5XZ2dm7le9pDogsHw8kGVYq+76tcLkdeM7rW1i0Wi6rX65H3eJ4XWbO2UCgok8no4OBAyWSyb/9ZeJ43tsIKOjKmuRnuboh095jXarWBNyDBsLsg2M6gXvg4+5imqYuLi5Fpm0evreM4Y79Hkk5PTwf2nM+St72fk0qlVKlU5HmeMpmMDg8PB5bJcd85qjzPYp43gMs+5wd1IgWBm7qlUqmFRKgedAzHlYHexujx8bFyudzQ4aaLKAfz7gRY93JwW/VHnOXaDMMI9ymXy9rb29OTJ0+matCuetmp1WqxnmSuy/EdZJL8Gld3LPs8i1OehrWTgvZBqVSKjIAKtruuG2mLuK6ro6OjmX9LYFvbWOt07gyr+wLd5SEYgRGMsJqk7YUFWm6IgtkQLHAyqxhoo16vRwLFdKtWqwODLgXvG/SapHa9Xh+4v2ma7UqlMluCu1QqlXY+nx/5eqlUGvs5zWZzYNAaSZHAKsG+g353s9mMBJMZFHAlzj7tdiffFxmYZh7i5m3ve3qD9PTmSSCZTPaVlTjfOao8x01P3LKwSRYZLLD3Pb3HME4ZGFRvDKtr2u345YAyEHVb9U7c+qNer0eO8dXVVeTYXF1dtfP5fCTIVnD8BtUfccrrqpeddDo9cd0bWLXjO0l+zXr9WJZJrkmD2knD2hy950K7PVvZGIQ21sdW7dwZV/cF/98boNEwjHapVJrq3FnFe5hNQLBALFUymRw5vHbQsLBgGZ/euWOO4yiZTIY9nAcHB+HwpGQyqVwuN9chR8F8ukGC+VVBT6nv+0P3DYZZDnq9tze5d65q8D2u60Z+m2EYfcGO4uwTpLV7VMUguVwu1r/ewEHzMEnejjPsiUNvz3vc7xxXnseZpCxgMsOO4bgyEAx57D4mwRDYYcNWZykHlIHFmqT+aLVaYYC8YrEo27YlfRy81vM8FYvFyJPF7gCU01j1stNoNMKnfKsYhHJR195B4l4/lmlceRrXTnIcJwyS2d1eqNVqA9snx8fHajQac5kmsM1trGWYJE/H1X1BXIDe9/u+r6Ojo7U4d7YFHQGYqyDyay6XmyhK8SBBsJpuQfRRy7L6hu+VSqXIut/pdDrSUJ93NOdhF6lGo6FGo6FkMhneaJTL5bDjIqhAu52envZFrw0qY8dxlEqlJHXmqwWf4/t+WPE2m03t7++H708kEn0X4jj7BOkb14gtlUqx/s07PsA0eRsYNNzTsqyBjZZ6vR4OuRv3ndMaNvx0VFnAx4bl36AyMOoYjisDhmEon89H6pIgGvKsHYubVAbmWfcvyqT1h2VZyufz4b+gkZrP55VOp5VMJvvKxsXFhZLJZN+1Zt5TO5ZVdjzPW5kgZ73mfe3tNu31Y5XFaSc9fvw4jOzefc2rVquRchAE9jMMQ7VabS4PXba5jXXbJs3TOHVfoVCITFFwHEeWZYX7rPO5s0l22u0hE4vWwM3NjXZ3d3V9fa179+4N3OfZs2d666239PLLL+vu3bu3nMLV8ujRI7355psj9wnm7/QG4Yuj0WjI8zyl02n5vh8JCjIt13XDyikIHBNcfILXgotFNpvt68kOtFqthQQhSaVSkWAywe8eVPkHp1q5XFahUOibB1UsFsM8f/z4cTgnr9FohEvodM+rkj6e12bbtvb398OlXHK5XF+gvjj7SFImk9H5+fnKXaimzVvP88I5c41GQ/l8Xg8ePAjzzvd9nZ2daX9/P5zDFpSlON85qXHpkYaXhUWY5Zyfl1wuFzvuxLj86y0DcY7hqDIQvN49uuXp06czHZNVKwPSatT9k5SDSc1SN0sfz7t1HEf5fF6pVCqMnN5dNoIVJbqDZo071pNYdtnp/uxJf8OqHt9R+TXL9WNYOpdd33Yb105yHGfgfPdg1F93Z0AQFT54GjwPtLEUftaqnTvj6r7gfcHn9l43Jz134tzDYArLmpMwD8QImMy4+TXVarVdqVTaV1dXU81h6n2PaZpD59BuikKhsBLzdkulUrtQKIR/p9PpvnTF2SfYju0w6zk/L6sek2LTrUrdTznYbNt+fFelvl0ntLE6KCvECFgUOgK2yKiTKAj80W1coJZxhgX62zSrcOPcbDYj6egOahQEaxm1T2BVLrpYvEWc89NqNpu3/p3oWKW6n3Kw2bb5+K5SfbtuaGNt97kToCNgMYgRAEmdYfa98zqD9WOnkcvldH5+Po+krbyTk5OlB00yTTNMR7lc1unpafja4eFhOK1i2D5SZ5jW06dPCUi2JeZ9zs9iFYbIbqtVqvspB5ttm4/vKtW364Y21nafO1gsYgRskWnm1+zs7Ojq6mqiuUxBhb1NAT+CFQvWubIuFovh3DZsr2nOeWwe6n5g8ahv46GNBWIELMYvLTsBWB3dEUJbrVbYa9lqtcKLVNDQq1arsm1bruuq2Wwql8vJNM1w+ZQgImiwjMim24SGLxeo7TOPcx7rj7ofWDzq2+nRxgIWg44ASOpEZi0UCn0RYKWPhySVy+UwomxwkapUKkqlUmo0GpI60VADvu9PHVkdwGLN45zf5obppqDuBxaP+hbAKqIjALJtW8lksm9t4FqtFpnLFKwRK3WW1AkuYt1Lu8y6XCCAxZvnOR88wRq0xBRWG3U/sHjUtwBW1nJjFc6GVQMmMyzipqSBSz0ZhtGuVCoD32MYRhgpFcB6mdc532w2I8saWZY113Risaj7gcWjvgVmx6oBi0GwwC0yKNCG67pKpVJ9wzgbjYYODw8HDu/0PE+Hh4cDnwBtem/17//+7+v6+nrZyQCm9otf/EI/+tGP9C//5b+MbP/Hf/xH/d3f/V3fdqlTj/7d3/2dfu3Xfi2y3fd9PXv2TL/yK78iSfrJT34iwzD0wgsvLO4HYC7mWQ4k6ec//7kk6cMPP9T/9//9f5QBQNS32D67u7t6/fXX5/65BAtcDKYGYOC8s7OzM5VKpYH7u64bucEP/vY8T9VqNXxfKpXauI6A6+trKiIAAACgx6NHj5adBEzguWUnYKt8+KH0138tffObnf9++OGyUyTLstRqtSLbgqf62Ww2si2VSknqzFcLIt/6vi/f9yUpjBodMAyjb91cAAAAAMByMSLgtrzxhvSVr0g//vHH2158Ufr616VXXlleuiTV63XZtq39/X1JnRv4SqUS2cc0TaVSKTmOo/Pzc9m23bdmdLPZDD9D6gS+CToJAAAAAACrgY6A2/DGG1I6LfXOt3/33c52x1lqZ4BpmioUCiP36Y14O2zaQK/e0QYAOhzHUavVUr1eVyaT2bhpNAAAdOO6B6wWOgIW7cMPOyMBBsVkbLelnR3pD/5A+t3fle7cufXkzdPBwUFkBEAQMHDb+b4v13WVTCbJjy006PgHa69ns1n5vq/79++z/BoAYO0Na/Nw3QNWDzECFu27341OB+jVbks/+lFnvzVnWZYeP34c/u153tb39rquGwmmGKwLjO0w7Pi3Wq1wbWjDMJRIJMJGEgAA62hUm4frHrB66AhYtJ/+dL77rTDTNHVyciLHcVQul3V6errsJC2V7/uqVqtKp9MyDEOWZSmVSsm27WUnDbdg1PG3LCsyvabVakWm3gAAsE7GtXm47gGrh46ARfvUp+a734pLp9NKp9PKZrNhEMFtFfSMd7MsS+VyeUkpwm2Ke/xzuZzOz89vM2kAAMzVJG0ernvAaqAjYNE+//nO6gA7O4Nf39mRXnqpsx82SjqdVr1ej2wzDCOy5CI2V5zjHyzLue2dZgCA9Ra3zcN1D1gdBAtctDt3OksEptOdm/7uoIFB58Cf/MnaBwrEYOVyWYlEQlJnGFwQM6HVaqlWq8n3fbVaLWWzWUmdoXUPHz7su5hiPY07/sHwyUajIcMwwsBKrutSNgAAa2XUNc8wDLmuy3UPWCF0BNyGV17pLBH4la9EAwe++GKnE2CJSwdicVKplAqFQmQOXHfgnEQiIdM0lUqlwoue67rhRRTrbdzxz2Qy4f/7vq/2P3USep5H2QAArJVR1zzTNOV5Htc9YMXQEXBbXnmls0Tgd7/bCQz4qU91pgPc4kiA3d1dPXr0aO6f22639d577+kTn/iEdoZNgdgQ7777bqz9bNtWMpnsC4RTq9XCaLqWZalYLEb2qVarSqVSc00zbt+442+a5tBlkygbAIB1Mu6aJ4nr3pZ49913F3Kvsbu7O/fPBB0Bt+vOHem3fmtpX//6668v5HOfPXumt956Sy+//LLu3r27kO9YFXErt2KxOHAom+d5Oj09DS+MFxcXKhQK4eu1Wi3SY471NO74j0LZAACsk1mueRLXvU3ymc98Rm+++eayk4GYdtrt7knr6+Xm5ka7u7u6vr7WvXv3Bu6zTTepy7JNefzo0aOxFZzrukqlUuo9tRqNhg4PD8Ptvu9rb28vst/Ozk7f+7BafN8PoyDn8/m+1+Me/3HfQdlAHI7jqNVqqV6vK5PJhA1qrJdx9YrUOdZSZ761aZoca9yaUeVzHte84Du47q2uOHWUFK+djNXBqgHAAgTBb7qdnZ1F1tD1PC+yX6PRiATNwWpyXVdPnz4duU+c4z8KZQNxNBoNSVI2m1WhUODJ2RobV694nheu0R4cb+C2jCufs17zJK57qy5O2wfrh44AYM4sy1Kr1YpsC57kBAFwpM6yOoZhhH9fXFyEc+M8z1t8QjGVdDqtg4ODoa/HPf6jUDYQR6vVUrValdQpM4lEIuwcwHoZV68E0dYDQQT2QdZ9edp1T/8mGlU+53HNk7jurbpxdRTWEzECgAWo1+uybVv7+/uSOhe4SqUS2cc0TR0dHalYLMo0TZ2cnOjs7EzlcnmiiydWT5zjPwplA3FYlhUZHt5qtfqCdUn9T9qwfprNZlifSJ1VZwbdMNu2vfajBcrlstLpNGV2jcx6zZO47gHLQEcAsACmacZqjPUOm5v0wjlvvu/r8vJSlUolfNI4iuu6KpVKSqVSMk1T1WpVDx48UDqdDvcpFouSOg1ZKfqb47x/3r+hWCyGTx183x85121acY//KKtWNpYtlUrFKpPlclm+78swDDWbTZ2enobHO5PJ6OTkRKZpRp48SZ1jNu71adxWeczlcjo/Px/42sHBwcLn2U5ad0ijj1XAtu3wKVQikYjUDXHev8l6n8KWy+XIEqXS6Pp3mNuoI0fJ5/PK5XITDSufxTR5NMt5Hbw3+E7P83R+fj5T2V32dW8e1zxpede9RdRf48rVNN85y29Y9nmN1URHAABJnfl4tVpNvu/3NTCH8X1fruvKcRyZpinbtiMN9d6nU7lcLnJDN+798/4NwYW5e43i7gZnsVgcOAduf3+fi2aP23zK7DhOrDmixWJR2Ww20th59dVXw8Zko9EIh6x2S6fTqlQqY1+f1G2VR8dxlEqlBp473fNsJzHJ8Z2m7hh3rHzf18OHD/XOO+/IMIy+wGPj3r9pDg4OIiMAgoCBAc/zVK/XI09Ox9W/g4wrk7clk8moWCxOXO9OWi9Nk0eznte2bcu27TCduVxOmUxm6ptBrnv9ll1/jStX03znLL9hG8sAYmqvsevr67ak9vX19dB9Pvjgg/a3v/3t9gcffHCLKdsu25THX/rSl5adhIWrVCrtZDIZe9+rq6uBr11dXbUty4q8Xq/X25LazWZz7PtnMew3GIbR933TVIOlUqldKBSmTd5GyGazt/I9V1dX7VKpFOs4WZY1ctugY1YqlWK/Pq1FlsdqtdquVqvtdrtzfgXnVqBQKEx1rKZ5zyR1x7hjlc1m+45H8DvjvH8dDapXgvLRbDbb6XQ63N6bz/l8PnLs49S/g8yrjpyHuGWp2yTldto8Ckx7XluWFTnOhUKhbRhG7HTPOz1xrdN1b5n11yTlapLvjGMVysA2tJM3CcECASxMrVaLBPkJeuiXEQzK87xwGF+vSSISu66rarWqarU68OnxqgtGYATLAEmd43F4eLjEVA13eXmp4+PjWPsahqFUKhWWr96nQr1PzF3X1dHRUezX52ke5dHzPGUyGWUyGe3t7enw8DASZbtYLOrs7ExS54nPKgXcGnesgnninueF+dEdD2Hc+xdZzovFYjhKpbcOKBaLKpfL4b9uvu9H3tt9nIfVK4eHh/J9P5wzHfym3rXZXdftewI6af0bt0w6jiPHcZTL5eR5nsrlsmzbnnv5Mk1z4cEv532NipOH1Wo18pT18ePHC1sKkuveYoyrf2j7YF0wNQDATC4vL5VIJNRqtdRsNsPhcIZh6OrqKrJvcNHpvmAOe/+8DWukGoYx0cW5N0DbOvE8T4lEQqZpKpVKRYYJJhKJJaeun+u6E+X1+fm5Dg8Ptbe3p3w+r4ODg8iQ5t6h1J7nRT5/3OvzNI/yaJpm3zkWCMppMER11ebOjzpWQd4E0xpM0wyHTwfHY9z7F1HOg+kKlUpFpmnKdV2dnZ2FHUipVEqlUiksR4eHhzo6OgoDOHYPCfc8T7Zth79nWL0SzC+W+juqAsHv7Ra3/u39nEG6y2S5XNbx8bEMw1C1WpVt26pUKkqlUlNPQxkmlUrJdd2BATDnYZo8GmfS89pxHPm+v7ApLVz3FmNU/bOIcjULygBGoSMA2AC9AaKGOTw8nGv03aCBFlzcyuWyMpnM0EZNsK5wcFMy6fsXIeiE2AbBjW2xWIw0rqvVqlKp1BJTNljwJDRuY8UwDNm2rWq1qmKxKMuywpuWXoVCYeS853GvL8o8y2NwY7ZqnQDS6GMVNFwNwwjLaaFQ0P3798MG9rj3L6Kc27YdBpOUpKOjo8ic4GQyGWnoHx0dRW5ka7Va2DFjmubc6rngPBmnt/6Nq7tMJhKJ8P2e54XXnnkEOxv0vd0dIbdh2jwap/e8DgK7+b6vTCZz6+foNl33FmGSa420uHI1C8oAJDoCgJnt7Ozcyve0R0T9XsYNi9Tfu318fKxcLjdwGFrQiO7uiJjk/VLn6Ylt23NtHM77Qrjs8jAqj4Le/IuLi8jIi1qtpkwmM/S7BnU0BcGGug0LWOc4ji4uLoZ+fuD09DS8aZpmySjbtpVKpVSpVMJh84eHh315MW64cdzhyKteHmu1WqynqbMe32nEOVbd0zKCp1fBKJFR75+2nI86nr7vq1wuR14zutY9LxaLqtfrkfd4nhdZd7tQKCiTyejg4EDJZLJv/2l5njf2BmNQ/RtXd5nsPv61Wm1gZ0YwbDgIaDjoKWKcfUzTHFlvzLvczpJH4/Se14ZhhN9TLpe1t7enJ0+erOV1b9nXvFWtv7r3nbVcrXoZwPqiIwCY0agb9E3nOE7kwtr9pKj7BsRxHB0cHPRdCOO+P2Ca5tTDRIc9MYv7NC2uZZeHcXnk+74ajUak4d37d69BHU2TRBJPp9MTNcAajcbEc/ODeZDB7zBNU/V6XYeHh33lrFQqRW7Qeo17PbDq5THuE/BZj++kxh2rYXkaPO2Pc6ynKeejjmetVpNhGAOPTaPRiIxeCLiuG+mIsCxLV1dXajQaymQyfeVyUYbVv70mKZPdozZ6t1er1bDspFKpvjyPs4/UuVEZNZVjnuU2bh6NMy4Pfd/X2dlZZKk5y7LCTq5B5WHV65llX/NWrf7qPobzLFerXAawvggWCGyAXC4X619v8KpZBEMau+efBUO4e4N2SR8vW+P7fnghjfP+bslkcurhtMEQ6UHz5TZp3tu4POoNatQ9r3eSwEGL1Gq1wmB3xWJRtm1L+jhI2yDDnooOe1o06gnquNcDq14euztUVim407hjFcQF6M0b3/d1dHQU61hPU87HHc9B9VKwXFfvDWvQoRE03g8ODsLfk0wmlcvl5jZMeFBeBYbVv8M+J26Z7I3fEXxP77ljGEZffsfZJ0hrnA65WU2SR+OMy0PP81QsFvumCUj9nSqBVa9ntk3ca808yxVlAItCRwCwAUqlUqx/cXqlhw0XCxowAcMwlM/nB0b6Di6SjUZDjUZDyWQyfJJXLpfDeabj3j+tYb/h9PS0L/r1IoaBzksQITyXy83tJr17OLPUGT4d3KysSlR5y7KUz+fDf0EDK5/Ph09besujZVlqNBp98QTq9XrfU7Zxa0xPuhb5OMsqj8NG1ixK3LojzrEqFAqRYeGO48iyLCWTyVjvn3c5D27ien9XcEPc+9tLpVLYcPd9X+l0OlKm5hkpflhHwKj6N0h/93GRRpdJx3HCESbVajX8HN/3w2PRbDa1v78fvj+RSPQdpzj7BOlb9JzqafIoMM15nUwm+657QdmctTxw3ZvNPOuvceVq3HfO+zesWxnA7dlpL3tMzwxubm60u7ur6+tr3bt3b+A+z54901tvvaWXX35Zd+/eveUUbodtyuNHjx7pzTffHLlPMMSvN3DUqvM8L5zL3Wg0lM/n9eDBg/DCVi6XVSgUInPUgnmzgadPn4ZDYX3f1/379wc28IJqZ9T7F/EbpM5T5eC4PH78eGGrFASmLQ+NRkOe54XDnLsDpc0ql8vp4OAgfPJ6dnYWia4e9zNuIzZFcDwdx1E+nw+HEQ8rj2dnZ9rf3w/nlGez2b6biYODA1UqlaE3yeNej2vZ5bH7sycdgj7J8Z227hh3rMrlclh/9NYNcd4/j3LezXXdsHEfDKsNykjwWnBT3puW7hEZrVZrZGCxaaRSqUjAvjj176DjIg0vk41GI1ymsDtOg/Rx+bJtW/v7++HyeLlcri9IbZx9JCmTyej8/HyifJqk3E6bR7Oe173XvWC1nGnLw7LrmV6ztIHmdd1bZv0Vp1zFOWaTWKUyEKedjBXSXmPX19dtSe3r6+uh+3zwwQftb3/72+0PPvjgFlO2XbYpj7/0pS+NfL1arbYrlUr76uqqXa1W29ls9pZShlU0S3no3d80zXa9Xl9EMqdC2d5sHN/1UigU2tVqddnJaJdKpXahUAj/TqfTfemKs0+wfVKU2+WatQ00r+se5WB5xrWTsVqYGgDMie/7qlar4dB2y7KUSqXC+c3YLrOWB8uyIk80Wq3WrQ7zHodyvdk4vusln88vbfWYbpZl6fHjx+HfwVKO0sdz4UftEygWi7GXxe1GuV2eebSB5nXdoxwA8dARAMyJ67p989mCIczYPvMsD7lcTufn5/NK2lys07QXTI7ju35OTk6WHhTSNM0wHeVyWaenp+Frh4eH4ZSKYftInRvKp0+fTjVnnnK7PPNuA81y3aMcAPEQIwAz26Y8nmbu087Ojq6urhYe9AjrYdLyEDTsb2OZMQDrLVitYJ1vhIrFYhg/AOtvmjYQ1731RYyA9fJLy04AsEm6o8K2Wq3wiUar1QovgsEFrlqtyrZtua6rZrOpXC631o039BtXHsaVhWCZrSBK8bB1zAFA2owbJzoB1tc82kBc94DbQ0cAMCepVEqFQiEyn617XWypc5EMokUHF8FKpaJUKhVZ5xrrb1x5GFcWpE7U7IDv+1rjAVwAgA02jzaQxHUPuE10BABzYNu2kslkX1CbWq0WmeeYSCTCXnHP88KLZPeyT0FveavVCpdpwnqJUx7ilIV5LRcIAMCizLMNxHUPuD10BABzUCwWVa/X+7Z7nhcJhNQ9bLNWq6lSqfTtX61Ww6i5wZrpWC9xysO4sgAAwDqYVxsIwO2iIwCYURAlt7cnvNFoyPf9gXM2Pc+TpL7gOcHcuIBhGHJdl86ANTJpeRhWFiRGhwAAVts820AS1z3gNrFqAGa2TXn86NGjsMwFfvGLX+j//b//1ze//yc/+YleeOGFgRc63/f1i1/8Qp/+9KfDz3jhhRf0D//wD7pz504YbOfv//7v9cILL+iTn/zk4n4U5mrS8jCsLDx79kytVkv/4l/8C0nSj370I7300ku38hsAAIhjnm0grnvr791339VnPvOZZScDA+zu7ur111+PbGNEADCh6+trlkYBAAAAsBYePXrUt22pHQGZTEYnJycyTbOvx5Do6ejz4YfSd78r/fSn0qc+JX3+89KdO8tOFQAAAACslaV2BDQajXAuULd0Ok0AEUS98Yb0la9IP/7xx9tefFH6+telV15ZXroAAAAAYM08t8wvz+VyarfbkX+lUolOAES98YaUTkc7ASTp3Xc72994YznpAgAAANac4zgql8vK5XJhAEhsvqV2BPRGEnVdV0dHR0tKDVbShx92RgIMimkZbPuDP+jst8J835fjOGGkXAAAgFnRvsAkBpWXRqMhScpmsyoUCspkMstKHm7ZUqcGdMcB8DxPnueNXCbk/fff1/vvvx/+fXNzI6kTtf7Zs2cD3xNsH/Y6ZrfQPP7e96SnT6Vf/uXh+/zsZ9Lf/I30m785/+/v0W63tbOzM9F7XNeV7/uyLEu1Wk2FQkGlUmlBKQQAANuA9gUmMay8tFotVatVpdNpGYahRCKhRqPRtyQkNs/KLB+Yy+XGVl5/9Ed/pK997Wt92//0T/9UL7zwwqKSBoS+/vWv65/9s38We9UA3/d1dnamQqEQbnMcR48fP45sAwAAiIv2BSYxSXnZ29vT1dXVbScRC/bo0aO++5eV6AhoNBoqlUpjOwIGjQh46aWX9LOf/Uz37t0b+J5nz56pWq0qlUpt/Br3y7LQPP7e96Tf+Z3x+/3FX9zKiIBXXnlFOzs7sTsCHMfR2dmZ6vV6uM33fd2/f59KFgAATIX2BSYRt7zkcjmlUqm+6dtYf4M6ApY6NSBQKpV0cHAwdr/nn39ezz//fN/2u3fvjr0BjbMPZrOQPP7CF6T9/U5gwEF9Vjs7ndUDvvCFW1lKcNJpAel0uq8yNQxDvu/L9/2+ZTMBAADGoX2BScQpL47j0AmwZVaiI8B1XR0eHi47GVhFd+50lghMpzs3/d2dAcFN+Z/8ya10AkyrXC4rkUhIklqtVhgHo9VqqVaryfd9tVotZbNZSZ0e2ocPH0Z6bQEAALqNal8YhhHOCaeNAWl8e9QwDFmWpUajIcMwwlhulKPNtRIdAZ7nRQIHAhGvvCI5Tmf1gO4lBF98sdMJ8MorS0vaOKlUSoVCIRJwJZfLhf+fSCRkmqZSqVRYubquG1bUAAAAvUa1L0zTlOd5tDEQGtce7V4pwPd9BTPHKUebbanLBwZM06QwYbRXXpF++EPpr/5K+tM/7fz3yZNb7wTY3d3Vu+++G2tf27aVTCb7oq7WajVZliXP85RMJuU4TmSfIN4CAABAr3HtC0m0MRAaV15M09TV1VX4rzt8HOVos63EiIBms7nsJGAd3Lkj/dZvLTUJr7/+uh49ehRr32KxOHDIlOd5Oj09DS/WFxcXkYittVqNNVwBAMBA49oXkmhjIBSnvAxDOdpsK7FqwLRubm60u7ur6+vrkasGvPXWW3r55ZcJFrgg25bHg6Ju9nJdV6lUSr2nV6PR0OHhYbjd933t7e1F9tvZ2el7H1aL7/sql8uSpHw+v+TUzCbOb3EcR1JnHqFpmmHDAMDq2qR6atNNcqziti+Cz6WNsdnGlZ1Jysuo76AcrbY4dcig+5eVmBoAbKJBcS/Ozs4iy2T2xsdoNBqR4CxYTa7r6unTp8tOxlyM+y2e56larSqdTiubzbI+NbAmNqme2nSTHqs47QuJNsY2iFN24paXYShHq2/a+p6OAGABLMtSq9WKbAueqgaBVqTO0i3dS/xcXFyEc7A8z1t8QjGVdDoda8nTdTDut7iuGymjQSTqYXzfn2Pqbt+6px8IbFI9tekmOVZx2xcSbYxtMK7sTFJehqEcrb5p6/uViBEAbKJ6vS7btrW/vy+pU5FWKpXIPqZp6ujoSMViUaZp6uTkRGdnZyqXy7EraGCRms1mWIalzkoXw26Wbdte+xED5XJZ6XSalWywFVi1aT3FaV9ItDHQEbe8DEM52lx0BAALYppmrJui3qFZk1TO85bJZHRyciLTNCO9v9LgoWUB3/d1eXmpSqWiarXa93q5XJbv+zIMQ81mU6enp+HnB++VOjednufp/Py87/snMS49xWIx8v3Mn51M79MFqXOMu5cikjr5LH0cEDbOMMRlH5t8Pq9cLhd7yOQsxpXTXq7rqlQqKZVKyTRNVatVPXjwQOl0euD+qVQq8rmLONeWYdp6Sup0VgVPTRKJRF/ejXp9VD02jVWopw4ODhY+z3fSeiBOOQ8+U5KePn0audbOUj6GWYVj1S1u+0JarTbGIow7p3uNO1bTXLdGWYWyM0l5GWaZ5WjSYzKuDhhXx2xTu5SOAAChRqMRDhnrlk6nh1b6jUZDtVpNvu8PvEEsFovKZrORCu7VV18NP8+2bdm2HTbQcrmcMplMrBujadMjKbIWbveNX7FYHDjPan9/fys7DA4ODiIjAIKAgd08z1O9Xo88GegdHZDL5fpuTHuNOza3JZPJqFgsTny8J3m6Oq6cDuL7vlzXleM4Mk1Ttm0PbfQ6jtM3hWPe59owi37KPE095fu+Hj58qHfeeUeGYQwM3Drq9XH12DS/Ydn1VPc830lMcnynqQfGlfNMJhNZz7xcLke+Z5ryMcoqHKttMUnZGnfODjLuWE1TXkeh7PSb9PowzTEZVweMq2O2ql3aXmPX19dtSe3r6+uh+3zwwQftb3/72+0PPvjgFlO2XbYtj7/0pS8tOwkLUygU+raVSqVY761UKu1kMtm33bKskdssy4p8b6FQaBuGEes7p0mPYRjtq6uryLZpqsJSqTQwv9bRoN8S5FGz2Wyn0+lw+6A8zefz7WazGXmvZVmRfK7X621Jkf16zevYzMOg3zlONpud+D3DyumwfXvzZ5Crq6t2qVTqy7tFnWu9psmHSUxTT2Wz2b73VavV2K+Pq8emtcx6qlAoTHWs4r5n2npgVDlvNpttSZHXr66uIttmuY6NwjVl8SYpj+PO2UFGHatpy2sclJ2PTXKMpz0m4+qAcdfSTW2XDrp/IVgggFDv00XXdXV0dDTTZxqGoVQqFT5V7u0NrlarkR7Nx48fL2x5Os/zwqG9vSaJfOu6rqrVqqrVal+vc9DLHCzjInWeXBweHk6d7kCxWAyf9PZ+b7FYVLlcDv91830/8t7u3zrstxweHsr3/XA+YPCbBq057LpuXw9/rVaLBBEKXh8WXyDusXEcR47jKJfLyfO88GngvAMWmaapRqMx18+8LZeXlzo+Pu7bPu5cW2TZnadp6qkg9oPneWF56v7t414fV4/N06LrKdd1VSwWdXZ2JqlTdywq4Nek9cA4wWf1BjANvktazHVsVHoWfU1ZB8uoO8ads73iHKt5l9dRKDvxTHNMZq0DNq1dOgpTAwCEuhu2nufJ87yZK7/z83MdHh5qb29P+XxeBwcHQ4d6O44j3/cXNvdsWGPXMIyJLvSWZQ3MF8/zlEgkZJpmZOiq67pKJBJTpVn6eAhkpVIJ57ednZ2FF7tUKqVSqRQev8PDQx0dHYVRfbuHuXmeJ9u2w/QP+y3BXDyp/6I66Dd3MwxDV1dXkW3BBW3YzVOcY1Mul3V8fCzDMFStVmXbtiqVilKp1NTDnIdJpVJyXTfMw1VxeXmpRCKhVqulZrPZN+/Tdd1Y52zvubaosrsIk9ZTQdkKyohpmuFQT8uyxr4uTVaPzWrR9VSwPRhyu6gYEdPUA4Fh5bz7JqA33UG+LeI6Nsyij9U6WEbdEeecHfaeXsGxmqW8ToOyM960xyROHTDuWhpY93bpOHQEABuiN1DbMIeHh7GivBYKhbk0dA3DkG3bqlarKhaLsiwrvJkLBEFUfN9XJpO59eBlwcVgVsHFplgsRm4gq9WqUqnU1J9r23YY+EaSjo6OIjEWkslk5MJ3dHQUuYmt1Wpho980zble0IJRA+MEaxZPemy7j00ikQjf73leWObnPcc9+K7uzpBVEBzPIL/L5bIymUzkeAbHY1gDYti5tqiyu2hx6qnup8jBbysUCrp//76urq7Gvh68Nq4eW7R51VPSxzdQt13XxqkHRpVz0zRlWZZc1w07KEc9NZvXdWxS8zxWq24ZdUecczauUcdq2uvWLLap7Exj0mMyqA6Iey3dhHbpOHQEAHOws7NzK9/THhEEZ56NnXkOibZtW6lUSpVKRZ7nKZPJ6PDwMHKTZRhGJPDT3t6enjx5MrDidRxHtm3P9SZtXpVt0Bt7cXER6V2u1WrKZDJD3zfqN/m+r3K53JdfQd4Ui0XV6/XIezzPi6wnWygUlMlkdHBwoGQy2bf/LDzPG3uBDDoypllmqPvYdI9MqNVqAzs0giFxQVDDQT3kcfYxTVMXFxdD0zWo4y0I8NMtlUqNjWIdV2+Hy/HxsXK5XPg0K85STsPOtWnL7qz54DjOyHwOnJ6e9o3OmLSe6h4aGjxt6b6JHPZ68PR8XD3W+7tWtZ6SOsc1zmiXeZbzuPXAuHIejAZqtVrhk+hB74tbPlb9WEnLbWOMy59l1R3S6HM2rmHHKk55pex8bFDZmfd1ctK2xLA6YFwdI8Vvly6iDEjzLwdDTRyJYIUQLHA1bFseb3KwwMCgIDzjDAqC0mw2BwaGSSaTYbCWfD4fCZISBIOqVCoDv6der0eC102ankHVnqSxQYbiCgJX9X7+KKN+U7VaHRqkpl6vD3xNUrterw/c3zTNoXk7jUql0s7n8yNfjxOoa5Jj02w2B/7u3vI2KJhbnH3a7U6+TxpI7TaCBfYKjnXwL9BbDuOca9OU3UEWHSyw+3vi1FODAsy1250ATaVSKdbro+qxQVa9nkqn01MH0Ju2nE8SeLbXsDqt3e4PFtidzjjlY9WP1bLFyZ/brjvGnbOj3tNr0LGKW14pO5Ob9vowSR3S/V2D6oBRdcyk7dK4ZSD43mWWA4IFAhssl8vF+tcbSG4Q13XnMgxq2NPioDfY8zwVi8VIz2cwpHnY9yeTyamHtgdDYQfNyZrXHLveIGLdc9eHDWEd95sGDb0PlqHpnYPpOI6SyWT4tO/g4CD8vclkUrlcbq5D3IK4A4MEvzfoVfd9f+i+kxyb3ic+wff0llvDMPryPM4+QVq7R1UsWzA8sTt/gnPFNE21Wq0wCFyxWJRt25I+DjAZ51ybpuwuU9x6KphD3Fu2fN/X0dHR2NfH1WODrHo91Wg0wqepiw4wNkk9MK6cS/1P+YJpAr3HKG75WPVjtWxx8ue2645x5+yw98Q5VpOUV8rO7ZjkmPS+r7cOGFfHTNounaUMBN+5zHJARwCwIUqlUqx/cYZUDYuIHVSQgwwaxmRZlhqNRt985Xq9rnQ6rWQyqXw+H/mui4sLJZPJmSvAYcOqTk9P+6LQTzNkfZjuYfvSx79HGh4UZpTuYGaBIEqyZVl9v7NUKoUXJd/3lU6nI/k77+i3wzoCGo2GGo2GkslkGKynXC6HHReDytKoY+M4TjjftFqthp/j+35YvprNpvb398P3JxKJvrIXZ58gfbcxJ3BYOe3NH8Mw+s6VIGq2YRiyLEv5fD78F9yk5vP52OfavMvuok1STxUKhcgUBMdxZFlWZH7xsNfH1WOzWFY95XnerQTCnLQeGFfOJSmTyUTyplQqDQz0Ne+VHZZ1rNbBMuqOcef0pNcYaXx5ndamlJ1gZYhcLndrncPTtCUCg+qAcXXMJrZLR9n5p+EHa+nm5ka7u7u6vr7WvXv3Bu7z7NkzvfXWW3r55Zd19+7dW07hdti2PH706JHefPPNkfsE89R6g7iti4ODA1Uqlb6GYrlcVqFQiMyF8jwvnOfbaDSUz+f14MGDsIHs+77Ozs60v78fzuHLZrNhoyGYAx8IordOexM2Lj1S5ylpcFweP348NFrstHK5nA4ODsKnFmdnZ5FoypNyXTe8EAbB4IJjE7wW3JB3560UfdrXarUWEuAslUpFAvb5vq/79+8PvMEOLjmDypI0/Ng0Go1wmcLuOdvSx7EDbNvW/v5+uOxPLpfrC44ZZx+pc7Nxfn4+UV7lcrnYsTrGldNB+dN7rjx9+nRg2Q0+13Ec5fN5pVIpWZYV61ybR9mdJB9mMUk9FWwPyuSgvBv1+rh6bFLLrqe6P3vSzoy4x3faemBcOQ/qPMMw1Gw2lcvlBl5nh5WPSS37WA2yim2MZdQdo87ZSa8xccrrpDap7DQaDXmep3Q6HebVpIEZpcmO8SxtCWl4HTCujtnEdqk05P5lrpMPbhkxAlbDtuXxuBgB1Wo1nP8+zTxjYN0UCoWVmNNYKpUi8wHT6XRfuuLsE2yfFOd6B/mw2Ti+y7XJbYxN+i2raJay07u/aZpD43aMwjFeHmIEAAvm+76q1WpkuG4qlQrn6wKbKJ/PL2WJrl6WZenx48fh391rBwdPFEbtEygWi7GX4+zGed5BPmw2ju/ybHobY1N+xyqatexYlhW5zrdaralG23CMVwsdAcAcua7bN2/KsqxYAfqAdXZycrLwoGPjmKYZpqNcLuv09DR87fDwMJxWMWwfqdNYevr06VRzAVdliO6ykQ+bjeO7PJvexqBsLc48y04ul9P5+flU6eAYrxZiBGBm25bHcWIE9NrZ2dHV1dWtBB8DliVYsWCdL/TFYjGMHwAA64A2BqY1adkJOvxnDZSK2zfo/uWXlpQWYGN1RzNttVrhk8VWqxVWtEFFWq1WZdu2XNcdGfQIWAeb0DCgEwDAKhvXxqB9gWFmLTvBcnzBSiqGYVCm1hwdAcAcpVIpFQqFyLypYK5xUFmWy+UwcntQ2VYqFaVSqcjauwAAAIFxbQzaFxhm1rIjdVbTCfi+P/VqClgddAQAc2LbtpLJZF/wlFqtFplvnEgkwpEBnueFFXH38mtBr2yr1QqXSwMAANspThuD9gUGmVfZmWa5QKy2iTsCbm5uVKvVdHR0NHBe/l/+5V/qt3/7t+eSOGCdFItF1ev1vu2e50UCknUPn67VaqpUKn37V6vVMDprsAY4AADYTnHaGLQvMMg8yg4200SrBpycnGhvb0+WZWlvb09/+Id/GHn9+vpaqVRqrgkEVs1Pf/rTvm1BJNbe3tZGoyHf9wfOnfY8T5L6ArQEc7AChmH0RXoFAADbYdI2Bu0LBOZVdqTOaJJgxR3KzWaI3RHw1a9+VfV6XW+//baurq70ne98R7Vara8zgPki2HQfffTRwO2D5t6dnZ0NXV/ddd1IT3xQqTabTe3v74fbE4lEuAY6AADYPpO0MWhfoNs8yk4wmiSdTiubzapQKCwuwbg1sTsCvvWtb6lcLuvhw4fa3d2VZVl6++239X//7/+NDHve2dlZSEIBSdKHH0p//dfSN7/Z+e+HHy47RZI6a7G2Wq3ItmAeXjabjWwLRs1Uq9Uweqvv+yMvxr2fDQAAtkOcNgbtCwwyr7LDaJLNFDtGwNOnT3V0dNS3/fLyUsfHx/pf/+t/bcTSUVhhb7whfeUr0o9//PG2F1+Uvv516ZVXlpeuf1Kv12XbdtjbbhhG3xwr0zSVSqXkOI7Oz89l23bfmqwHBweRi3YQ0AcAAGyncW0M2hcYZh5lh9Ekmyl2R4BlWbq8vNSXv/zlvteCzgCiSWJh3nhDSqel3qkn777b2e44S+8MME1z7FCp3qitg4ZlWZYl27bDvz3PI5gPAABbbFwbg/YFhplH2RmE0STrL/bUgPPzc7399tv6N//m3+iHP/xh3+uXl5f63//7f88zbUDHhx92RgIMij8RbPuDP1iZaQKzMk1TJycnYUCW7qk3AAAA06B9gWkdHBxE/mY0yWaIPSJgd3dXl5eXevLkiT772c8O3KdSqejJkyfzShvQ8d3vRqcD9Gq3pR/9qLPfb/3WrSVrkZhmAwAA5o32BabBaJLNFLsjIHD//v2ZXgcmNmC5vpn2AwAAABBL92iSVqvFaJINMXFHAHDrPvWp+e4HAAAAIDZGk2ye2DECgKX5/Oc7qwMMW5pyZ0d66aXOfgAAAACAkegIwOq7c6ezRKDU3xkQ/P0nf9LZDwAAAAAwEh0BWA+vvNJZIvAzn4luf/HFlVg6EAAAAADWBTECsD5eeUX63d/trA7w0592YgJ8/vOMBAAAAACACdARgPVy587Slwh8//339ejRo77t//AP/yBJ+pVf+ZXI9r/7u7/Tc889p5deemnoZ/7iF7/QCy+8oFarpX/8x3/Upz/9aUnS3//93+vu3btKJBJz/AW4bXHKBmUAAAAA8/Tuu+/qM5/5jHZ3d/tem7oj4Pvf/77+7M/+TH/7t3+r73znO5KkP/7jP5ZlWfr1X//1qRMLrDrTNPXmm2/2bd/Z2VG9XlcymYxs39vb0/n5eaxoq4eHhyoUCuHarIeHh/qv//W/slbrmpukbFAGAAAAMA+PHj0aeN8iTdkRcH5+rq9+9at67bXXdH5+Hm6/f/++bNsOOwaAbeG6riT13eg1Gg35vh+rE8D3fTUajcgNX+/fWA2+76tcLkuS8vn8yH0nKRuUAWyLYC3qer2uTCazlmU8Tj3gOI4kqdVqyTTNtfydwDbjPMcqm6Q9OshUwQKLxaLq9bpeffXVyPbf+73fU61Wm+YjgbVnmmbftrOzM5VKpVjv9zwv8hmNRiP8O7iZxGpwXVdPnz6NvX/cskEZwDZoNBqSpGw2q0KhoEwms+QUTWdcPeB5nqrVqtLpdPhbAawXznOssknbo72m6gh4+vSp9vf3+7Y/efJE7XZ76sQA68qyLLVarci2oIc4m83G+gzDMGQYRvj3xcVF+BTZ87z5JBRzkU6ndXBwEGvfScoGZQDboNVqqVqtSuqU+UQiEXYOrJNx9YDrupHz2TCMgR16vu8vIHW3axN+AzDIvM5zaf3Pk3VP/yaapD06yFQdAZlMRplMRjc3N+G2m5sb5XK52Dc9wKap1+uybVvFYlHFYlGtVkuVSiX2+03T1NHRkYrFohzH0cnJiSSpXC5zXq25uGWDMoBtYFlWZDRMq9Xqmzoj3V7nl+/7C2ngNpvNyEOTRCLR9z22bUduItZVuVze+s7Kbf/92yrOeS5txrnOeb55pooRUCqVlMlkwgL94MEDNRoNZbNZvfbaa/NMH7A2TNOceUhY71DxSToS5qVcLsv3fRmGoWazqdPT05EXL9/3dXl5KalzQfQ8T+fn50Pfk0qlwqeB3WzbDns1E4lErLgK49JUqVQGflexWAzT5/v+VPOqJjFJ2ViFMjCtYrEoqVMOpP7fMsy4Yz/sddd1VSqVlEqlZJqmqtWqHjx4MFPZmYdx5W/Y/tLwc2hcmZ30O2f9DfM6h3K5XCTWULeDg4NbGWX46quv6uTk5FbKTffooHK5rFwuF3l9mnPotuuzQfL5vHK5XOxzfhbTlPVReZTJZHRyciLTNPuuW6Zpjn09cFvldVKLqI+k0fX2vOujddM7CnBTzvXbPM/ncX0f1N4M8l3qjHDvbpstok2xau3RXlOvGlCpVPTkyZNwOF8ymdT9+/fnljAAt69YLCqbzUYqpVdffXXkzaht27JtO2wQ5XI5ZTKZgRWe4zh9Q+Z839fDhw/1zjvvyDAMNRoNHR4eTt2gajQaqtVq8n2/72Ic/Ebp42H5rutGLmzFYnHgfKv9/f2lNLDnoTf2wCLYth25oOZyuaGdPoFxxz7O667rynEcmaYp27YXcjM3Sf6NK3+DjDuHxpXZab5zlt8wr3PIcRylUqmBx6w7PsaiLaqz7eDgIPJkMAgkJnXKVL1ej4z0meYcGncsblMmk1GxWJy4nlz0+RXn/Amma3VLp9OqVCpjXw8+Y9Lyehv18iLqo3H18rzro1U36jyXNu9cv43zXJr9+j6ovZnJZJRKpcI8LJfLkWMx7zbFWrRH21PY2dlpn5yctN94441p3j4319fXbUnt6+vroft88MEH7W9/+9vtDz744BZTtl22LY+/9KUvLTsJC2NZVqxtva8XCoXw70Kh0DYMo2+/q6urdqlUavdWO9lsNvL+drvdrlarkyR7oEql0k4mk33bDcNoX11dRbZNUxWWSqW+dK+qbDa70M+/urpqW5YVydd6vd6W1G42myPTNerYj3u9Uqn0HctFmCb/hpW/QcadQ3HL7CTfGcciz6FqtRoey3q93ldOCoXCwsvtPAyqB4K8aTab7XQ6HW7vzst8Ph/5zdOeQ/Oqz+ZlmvK36PNrXB4NqsdLpVLs14N9Jv0dt1m+51kfxb1mz7s+WqZpz/N2ezPP9ds4z2e5vg9qbzabzbakyGdeXV1Fti2qTbHs9uio+5apYgTUajUZhqF//+//ve7cuaOTkxP95V/+5cydEgCWyzAMpVKpsHc7Tg9utVqN9Ew+fvx44NI5l5eXOj4+7tteLpeVTqfleV7Ye7uopXc8zwunPfSaJCq/67qqVquqVqsDnxRto1qtFpk7GJSbUXOvxx37WctG0LMfLK0TpOfw8DD2Z9yGUefQvMrsvMwjPZ7nhbGG9vb2dHh4GFkdo1gs6uzsTFLnicgsc1J93w9jbriuG0njoPIhffyUKCh3juPItu2+/YbVA4eHh/J9X6Zp6uTkJPyO09PTyHt769ZJz6G4x8JxHDmOo1wuJ8/zIr9v3kzTXKnAj3HyqPeJn+u6Ojo6Cv8e9fq8y+sqGHdNX+V6uftc7702F4tFlcvl8F+3cfXEtOd58P5NO9dX7TzvNai9GeRBb2BHSUtZ8W5V2qNTTQ1IJpP6xje+oW984xtqNBq6uLhQNpvVkydPlMvl9N//+3+f5mMBLNn5+bkODw+1t7enfD6vg4ODiYadOY4j3/f7htq6rjuwoRBUzMGwStM0w2GIi+gMGHYxNAxjomBhlmWxTnAXwzB0dXUV2RZcyIZ1JI079nHLxuXlpRKJhFqtlprNZjjEz/M8JRIJmaYZGQrouq4SicR8M2COes+heZXZeZlHekzT7CsvgeDcCoZrzhpcq3uYs+d5sm07LF+Dykej0ZBlWfJ9X5lMRpVKRaZpKp1Oa29vLzK8d1g9EMz5lfpvIqWPy2a3Wc6hXt3Holwu6/j4WIZhqFqtyrZtVSoVpVKphUy/SKVScl13YPDHZYiTR73DuD3PixzXUa/Pu7yummH10bTX7EXVy8F0heB8dV1XZ2dn4fmXSqVUKpXCY3l4eKijo6OwnA6rJ6Tpz/Pu39ttE8712zrPh13fRxnW3uzubOk9T7vzd5rvnMaqtEenjhEQSCaTSiaT+uIXv6hCoaBSqURHALAEvYFohjk8PBwagd4wDNm2rWq1qmKxKMuywgvLKEEwlKDx3Lt/0GveW7l199AGF5RCoaD79+8PvVFYhKDSx/ycnZ2pVCoNLTvjjn2cshFsDy7w5XI5vHkLGuvFYjHSWKlWq0qlUvP/wTMadw71WrUyO8/0BI3WedxU1Wq18CbNNM3IDY1lWSqXy5HyEaxg0Gw2ZVlWrJEtkwrqw3HGnUPDdB+LRCIRvt/zvPA6sagAbolEInKDtKqGldegHTvMoNfnWV5XxbD6aNZr9qLqZdu2w4COknR0dBSe67ZtK5lMRs65o6OjyI3ssHpiVpt6rt/GeT7q+j7KsPamaZqyLEuu60YCDs/jO+fptq/tM3UEvPHGG7q4uJDjODIMQ9lsdmE9J8Aq29nZuZXvaY8IoDePgDG2bSuVSoU3UplMRoeHh2Mr/OD8lzoV597enp48eSLDMGItfdc9DDPoDR3WqxsM053nRWjele4yy8OgDqEgAE23YUHapE4eX1xcjP3+09PTgU8EgkZZnCUPhx37ca9336QFjo+Plcvl5Pt+WHYuLi4i16VaraZMJjM0PfPIv2kMO4eGmaXMrvo5VKvVYj1pivM7CoWCMpmMDg4OlEwmVa/XJX08lLlUKkXKR7Dddd1I4693uPgsPM8b2+Cf5Bzq1X0sustorVYb2KANhpMGQc6G1bvj9pE6DehRdceyzq9eg8rruKHOw16PU15XoV6exLj6aJJrdrdp6+VR57rv+yqXy5HXDMMIz7FisRie9wHP8yJrrw+rJ2a1qef6bZzno67vw/J0XHszGCnRarXCkSnd3zXJdy7iOirNvz061sQRCdrtdiaTaT/33HPtvb29di6XazcajWk+ZmYEC1wN25bHmxossNlsDgzmkkwm25VKZeB7rq6u2vl8PhLsJAjIUqlU2vV6vV2v1yP7a0zwlna7E0ClNxhToF6vRwLzDDMoOEvwfb0kzSVA4aq67aBUw45dt3HHPk7ZGFQuJYVlrre8Ba9PapHBzMadQ5OU2bjfuernUDqdjlWG4v6OYF/TNCNlptlsDg1s2vsb46Ypjkql0s7n8yNfn+Qc6jXoWAz7rb31/qDgsHH2CVSr1VsJmhe3rE+SR4OC4MV5fdqysYrBAuPWR3Gu2cO+c5p6edS5Xq1WB5bt4H2DXuu+TvTu31tPzGJTz/XbOM/HXd97jWtvDjIoWGDc75zk+rPs9ujcgwUmEgm9/fbbarVa+sY3vqHf+I3fmOZjAMxRLpeL9a83SE5gWM/1qCkHnuepWCxGejCD4ViGYajVaoXBlIrFomzblvRxQJ9gjmHvXCnf94c+fUsmk1MP0wqGbw6am8Wc/9kFT/KDHnnf94fOgxt37Me9HgxZ7X69d85vb7DL7nmSywi0N8i4c2gRZXbVz6FGoxGe/6OCH437HQcHB2E6k8mkcrlcpI5zHCcMKNVdHmq1Wt9vcV1Xx8fHajQaM08TGFSuu79Hin8OxT0WvU9rg+9xXbcveFbvuRFnn4Dv+5Enrcs2aR6Neno77PW45XUdxKmPJr1mD/qOSevlcef6oOH3/j8t2dY7R99xnHBaszS+npjFpp7riz7P41zfe41rb0r9o3qCaQLBqJZJvnOW62jwmavQHp2qI+Ab3/iGHj58OO+0APgnQfTZXC4X+4alVCrF+jds2JRlWQMbufV6PRy6FTQSAslkUvl8PlJJXlxcKJlMhgFM8vl8+C/oVMjn8+FnFgqFyBAzx3FkWdbMQxuHDa86PT3ti7I7zZA8RDUaDTUaDSWTyTCgVrlcDhthvWVHGn/sR71uGEZf2QuiWQcNme7hodLHZTNIzyINK3+TnkNS/DI77yGFyzqHPM+b+fz3fV/pdDqSr73Rzx8/fhyuktJ9s1CtViPfHwT8MgwjXDVpFsNuDqY5h0YdC8dxwnnX1Wo1/Bzf98N6vtlsan9/P3x/IpHouwbE2ScQZyj0PMQ9v6T45XXcKjnDXp9HeV20edZHca/Zw75z3vVyd3DZ7t8V3BD3pqNUKoU3cHHqiVls6rm+6PM8zvW9N4/itDczmUwkD7unhsX5zmmtcns0VoyA//Af/oMymYx++7d/W5L6lsboFSyjAmyrYL5cb4CaOIIey2w2K9/3bzVwXqVS0dnZmfb398Me0u55fK7rqlQqRZYWOj09jVTGvu/rnXfe6fvs7vmNQSwCy7KUTqfVarXCz3j69OlMwW08zwu/q9FoyLZtPXjwILwQ5PP5SA/x48eP5xJfYZRZysM6CCI2+74f9sIHgrIyqOyMO/bjXu8te0+fPo300JumqaOjIxWLxXCJp7Ozs1hxK6Y1rvxNcw6NK7PjvnPev2HR51ChUAg/e9rfYBiGHjx4EJnven5+Htnn5ORErVarbwSA7/uRkVDdQabmcYNgGEbfU8ppz6FRxyKIzO44js7Pz2Xbdqx8jdOhNGyfx48f9+XzPE1zfsUtr6ZpjoxcP+z1eZTXRVlEfTSuXh73nYuolyuVShiAMAgWF3xf8FpwU16pVCKdxePqiVls6rm+6PNcGn99H5RHgWHtzVKppEajIc/z1Gw2IytJxPnOSS37WhpLnLkFh4eH7W9961vh36lUaui/L37xi7NPZoiJGAGrYdvyeFyMgGq12q5UKu2rq6up5lH1vsc0zaFzorD6Zi0P87CM79wk5B/mrVAorERcklKpFJn3nk6n+9IVZ5/u1ya1refXtv7ubbOJ5zrn+XoZdd8yVbDAVUFHwGrYtjwedUIFgXa6jQsWM86wIDhYfYsoD9NoNpu3+n2bhvzDIkzTmJ63ZrMZSUd3QKsggNaofbpNe8OzrefXtv7ubbRJ5zrn+fqZe7DAm5ubgdt/+MMf6oc//OH0wxOANee6bt+c/mCt6mnkcrmFD7/C4sy7PExrE6cj3CbyD4twcnKy9OBywdDsIC5N99TPw8PDcJj1sH0Cvu/r6dOnU02d2Nbza1t/9zbalHOd83zz7LTbIxYmH+LOnTv68MMP+7a/8847KhaL+s53vjOXxI1zc3Oj3d1dXV9f6969ewP3efbsmd566y29/PLLunv37q2ka9tsWx4/evRIb7755kTv2dnZ0dXV1UQBR1Z1ziFmN015ALB5ggjm695ILhaLA+fqAujYhHOd83w9jbpviRUssNewvoOjoyPVarVpPhLYGN2RX1utVthz2mq1whu/4Ca/Wq3Ktm25rqtms6lcLifTNMMlXIJI/sGyPVg/48rDuLIAYHNtSkcvNwfAaJtwrnOeb56JOgJ+7dd+TTs7O9rZ2dHnPve5vtfXYQkVYJFSqZQKhULkPAiiTwc3deVyWcfHxzIMI7z5q1QqSqVS4YoBmUwmfL/v+0M737DaxpWHcWWBjgAAAAAswkQdAaVSSe12W1/84hf12muv9b1umqZ+4zd+Y26JA9aJbdtKJpN9nWG9S1MF61FLnc6z4Mawe/md21ouEIsTpzzEKQvdyxoFy5gBAAAAs5ioI+Dhw4eSOsNbfu/3fm8hCQLWVbFYVL1e79vueV4k4Er38LBarTbTGqVYXXHKw7iy4HmeqtVquK5ssBYuAAAAMIupVg3I5XJ64403+rafnp7q+9///qxpAtZOEBm+9+lvo9GQ7/sD54Z5nidJAwPGOY4TRm3tjTqP1TdpeRhWFoJYEQHDMCgPAAAAmNlUHQFf/epXB968HB0dybbtWdMErKVB87nPzs7Cp7m9XNeNPN0NbvCCp8DpdFrZbFaFQmExCcZCTVIehpWFZrOp/f39cHsikZDv+/NPLAAAALbKVB0B9XpdR0dHfdsty5rqaZVt2yqXyyqXy0tfZxMr7sMPpb/+a+mb3+z8d8AylstgWZZarVZkW1CWs9lsZFsqlZLUmQceRJP3fT+8weMp8PqLUx7ilIVBej8XAAAAmNRUyweapqknT57oX/2rfxXZ3mq1dP/+/dif4/u+Hj58qHfeeUeGYajRaOjw8JAI6RjsjTekr3xF+vGPP9724ovS178uvfLK8tL1T+r1umzbDp/gGobRN+fbNE2lUik5jqPz83PZth3eIAbDxXkKvBnGlYc4ZeHg4CBy7IOAgQAAAMAspuoIyGaz+vKXvyzHcfSrv/qrkqQf/vCHOj4+jix7No5t2zo5OQmffiaTyUi0bCD0xhtSOi31dhK9+25nu+MsvTPANM2xw/h7o8gPmzbQi6fA62dceYhTFizLiky38jyPYIEAAACY2VQdAfl8Xs1mU/fv39fe3p6kztP9V199VWdnZ7E/p1wuq9lsyvO8sIFLIxd9PvywMxJg0EiRdlva2ZH+4A+k3/1d6c6dW0/evPEUGAHTNHVyciLHcdRqtSKrTwAAAADTmqojQOo8vSoWi5Ho2JNMCwiiZDcaDZmmKdM0lcvllMlkhnYGvP/++3r//ffDv29ubiRJz54907Nnzwa+J9g+7HXMbuF5/L3vSU+fSr/8y8P3+dnPpL/5G+k3f3Mxaeiy6KkrPAVGt0ErTgAAAACzmLoj4Pvf/74uLi7UaDT0ne98R5L0x3/8x7IsS7/+678+9v3dy2UFw2MLhYLu37+vq6urge85OzvT1772tb7tb7/9tl544YWR38eUg8VbaB5/85vj97m5kd56a3Fp+CfvvffeQj+fp8AAAAAAFmmqjoDz83N99atf1WuvvaZyuRxuv3//vmzbDjsG4uhefcAwDPm+37eUVuD09FT/6T/9p/Dvm5sbvfTSS/riF7+oe/fuDfz8Z8+eqVqtKpVK6e7du7HThfgWnsff+570O78zfr+/+ItbGRHwP//n/1z4d/AUGAAAAMCiTNURUCwWVa/X9dnPfjYyhPn3fu/3IkuljTJszrNhGOFogV7PP/+8nn/++b7td+/eHXsDGmcfzGZhefyFL0j7+53AgIOG5e/sdFYP+MIXbiVGwM7OzsK/AwAAAAAW5blp3vT06dNwSazum6InT57Enj8dxAXoven3fT8ySgDQnTudJQKlzk1/t+DvP/mTjQgUCAAAAACLNlVHQCaTUSaTCYP1SZ1h+rlcLvaIAKkTE+Di4iL823EcWZYVWVILkNRZGtBxpM98Jrr9xRdXYulAAAAAAFgXU00NKJVKymQyMgxDkvTgwQM1Gg1ls1m99tprsT8nnU6r1WqpWCxK6ow0IKgfhnrllc4Sgd/9rvTTn0qf+pT0+c/f+kiA3d1dPXr06Na+r91u67333tMnPvEJpiUsCHm8eOTx4q1THn/00Uf6wQ9+oM997nN67rmpnkkAAIAxdnd3h7429aoBlUpFnufpb//2byVNvnxgYJIRBIDu3JF+67eWmoTXX3/9Vr/v2bNneuutt/Tyyy8T52JByOPFI48Xb53y+ObmRru7u3r8+PHQYL8AAGBxpu4IkD6e5w8AAAAAANZDrPF4d+7ciSyZ9txzz+nOnTtj/z148EDf//73F5V2AAAAAAAwoVgjAl577bVIJP+48/gvLy+VyWT0gx/8YLrUAQAAAACAuYrVEfCf//N/jvz98OHDWB9+dHSkvb29yVMFAAAAAAAWYuoYAT/84Q9VKpXkeZ4k6V//63+tV199NRL0p1arKZ1Oz55KAAAAAAAwF1Ot2fOtb31LpmmqUqlob29Pe3t7+h//439ob29P/+f//J9wv4cPH+ry8nJuiQUAAAAAALOZakSAbdvK5/N67bXXIttzuZy+/OUv6/Hjx3NJHAAAAAAAmK+pOgJarZb+8A//sG97oVBQIpGYOVEAAAAAAGAxppoacHx8rCdPnvRt/+EPf0hMAAAAAAAAVlisEQGnp6d92377t39b2Ww2sq1cLuv4+Hg+KQMAAAAAAHMXqyOgXq/3bTs8POzbfnh4OJ9UAQAAAACAhYjVEfD2228vOh0AAAAAAOAWTBUjAAAAAAAArKeJOgJubm50enqqBw8e6M6dO7pz544+97nP6T/+x/+om5ubRaURAAAAAADMSeyOgL/8y7/UZz/7WVUqFT18+FDf+MY39Nprr+nhw4f6sz/7M+3t7emv/uqvFplWAAAAAAAwo1gxAp48eaJ0Oq1CoaBXX3217/VvfOMbsm1blmXJ8zz96q/+6twTCgAAAAAAZhdrRMBXv/pVZbPZgZ0AgUKhoC9/+cvK5/NzSxwAAAAAAJivWCMCXNcduIRgL9u29eDBg5kTBQAAAAAAFiPWiIB2ux3rw3Z2dmZKDAAAAAAAWKxYHQGWZelb3/rW2P3K5bIePnw4c6IAAAAAAMBixJoa8Nprr+no6Eimaerf/tt/O3Cf//bf/puKxaKazeZcEwgAAAAAAOYnVkeAaZq6vLzUF7/4RR0eHsqyLD148ECtVkvNZlOO48jzPF1eXuqzn/3sgpMMAAAAAACmFasjQOpMD2i1WrJtW5VKRYVCQVKnk8CyLNVqNe3u7i4soQAAAAAAYHaxOwIkyTAMlUqlRaUFAAAAAAAsWKxggQAAAAAAYDPQEQAAAAAAwBahIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL0BEAAAAAAMAWoSMAAAAAAIAtQkcAAAAAAABbhI4AAAAAAAC2CB0BAAAAAABsEToCAAAAAADYInQEAAAAAACwRegIAAAAAABgi9ARAAAAAADAFqEjAAAAAACALUJHAAAAAAAAW4SOAAAAAAAAtggdAQAAAAAAbBE6AgAAAAAA2CJ0BAAAAAAAsEXoCAAAAAAAYIvQEQAAAAAAwBahIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL0BEAAAAAAMAWoSMAAAAAAIAtQkcAAAAAAABbhI4AAAAAAAC2CB0BAAAAAABsEToCAAAAAADYInQEAAAAAACwRX5pmV/uuq5KpZJSqZRM01S1WtWDBw+UTqeXmSwAAAAAADbWUjsCfN+X67pyHEemacq2bToBAAAAAABYoKV2BEjSkydPZBjGspMBAAAAAMBWWHpHwCTef/99vf/+++HfNzc3kqRnz57p2bNnA98TbB/2OmZHHi8W+bt45PHikceLt055vA5pBABgk+202+32sr7ccRy1Wi0lEgm1Wi01m00VCoWh+//RH/2Rvva1r/Vt/9M//VO98MILi0wqAACYk1/84hf6d//u3+n6+lr37t1bdnIAANg6S+0I8DxPkmSapiSpXC6rWq2qUqkM3H/QiICXXnpJP/vZz4Y2JJ49e6ZqtapUKqW7d+/O+RdAIo8XjfxdPPJ48cjjxVunPL65udE//+f/nI4AAACWZKlTA4IOgMDx8bFyuZx83x8YN+D555/X888/37f97t27Yxs9cfbBbMjjxSJ/F488XjzyePHWIY9XPX0AAGy655b55Y7jRP4Obv6DkQIAAAAAAGC+ltYR4Pu+MplM5Kbf931J/SMFAAAAAADAfCytI8AwDOXz+chNf7lcVjqdZjlBAAAAAAAWZKkxAk5PT1UsFsO/nz59OjRQIAAAAAAAmN1SOwKCUQEAAAAAAOB2LDVYIAAAAAAAuF10BAAAAAAAsEXoCAAAAAAAYIvQEQAAAAAAwBahIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL0BEAAAAAAMAWoSMAAAAAAIAtQkcAAAAAAABbhI4AAAAAAAC2CB0BAAAAAABsEToCAAAAAADYInQEAAAAAACwRegIAAAAAABgi9ARAAAAAADAFqEjAAAAAACALUJHAAAAAAAAW4SOAAAAAAAAtshOu91uLzsR07q5udHu7q6ur6917969gfs8e/ZMb731ll5++WXdvXv3llO4Hcjj+H7/939f19fXE72n3W7rvffe0yc+8Qnt7OwsKGXbjTxePPJ48dYpjz/66CP94Ac/0Oc+9zk99xzPJOZtd3dXr7/++rKTAQBYYb+07AQA2+T6+lpvvvnmspMBANhgjx49WnYSAAArjm54AAAAAAC2CB0BAAAAAABsEToCAAAAAADYIsQIANaI4zhqtVqq1+vKZDKyLGvZSQIAAACwZugIAFaM7/tyXVfJZFKmaYbbG42GJCmbzcr3fd2/f19XV1fLSiaWYFjZAAAAACbB1ABghbiuK9d1ZVmWPM9TLpcLX2u1WqpWq5IkwzCUSCTCzgFsvlFlAwAAAJgEHQHAivB9X9VqVel0WoZhyLIspVIp2bYtSbIsS6VSKdy/1WopmUwuK7m4RePKBgAAADAJOgKAFRE88e1mWZbK5XLfvrlcTufn57eVNCzZJGUDAAAAGIeOAGBFpNNp1ev1yDbDMOT7vnzfD7c5jqNUKqV0On3LKcSyxC0bAAAAQBwECwRWSLlcViKRkNQZ+h+sCtBqtWQYhlzXDYeGNxoNGYYh0zTluq5831er1VI2m5XUGU7+8OHDvhtIrKdRZaNWq3H8AQAAEBsdAcCKSKVSKhQKkXn/QUA40zTleZ4ymUz4mu/7arfb8jxPiURCpmkqlUqFN4Ku64Y3jlhvo8qGJI4/AAAAJsLUAGAF2LatZDLZF/yvVquFT35N09TV1VX4r91uS5I8z1MymZTjOJH3V6tVpVKp2/sRWIhxZYPjDwAAgEnREQCsgGKxqJOTk77tcZaJCzoKLi4uIvvWajVWFdgA48oGxx8AAACTYmoAsGRBNPjem7ZGoyHf92MFBfR9X41GI7wpDN7f/TdWg+/7YbT/fD4/ct+4ZYPjD2yHSeoPAABGYUQAsAJM0+zbdnZ2plKpFOv9nudFPqPRaIR/9y47h+VyXVdPnz6NvX+cssHxB7bDpPUHAADDMCIAWDLLstRqtSLbHMeRpDDw2ziGYcgwjPDvi4uL8Cmy53nzSSjmIp1Oq9VqxVr2L27Z4PgD22GS+gMAgFHoCABWQL1el23b2t/fl9S5satUKrHfb5qmjo6OVCwWZZqmTk5OdHZ2pnK5HLszAaspTtng+MfTO3ICAABgW9ERAKwA0zRVKBRm+ozeaQSTdCTMi+/7ury8VKVSUbVajfWeYrEY/v/Tp0/78iF4vdlsSur/ndN85yjjPq9YLIZP333fX/g83bhlYxWOf2DaY2Lbtg4ODiR1lkQcFh8jlUoN/Nxx7z84OAhX21hVw35brzh5PCo/xp1XyxYnH4I8kDq/w/M8nZ+fR0bHjMqnTCajk5MTmaYZeY80eEpOHKtWfwAAMAwdAQDmotFoqFaryff9vuHsw2QyGaVSqfCpdblclm3b4Y1v9/9LUi6Xi9wgTPOds/yG4OYpSK/rusrlcuFNVLFYHDh/d39/f20b/JM+RZ/mmPi+r4cPH+qdd96RYRhqNBo6PDwceNPuOE5f3IM47++OmxDXbY8gGPTbBhmXx+PyY9x5NQ+z5F3cfLBtW7Zth9+Ty+WUyWRi1w+NRiOcatMtnU5P1ZFG/QEAWCvtNXZ9fd2W1L6+vh66zwcffND+9re/3f7ggw9uMWXbhTyO70tf+tKyk7BwlUqlnUwmx+7XbDbbktpXV1fhtqurq3Db1dVV27KsyOv1er0tqd1sNqf6zriGfZ5hGJH0tNvt9jTVaKlUahcKhWmTd6uy2exU75vkmGSz2b78qFarfftdXV21S6VSX57HeX+hUJj4t0z726cx7LeNMiyPR+XHJOfVLKbNu0nywbKsyO8sFAptwzD69huWT4POwVKpNGGK+61C/bEN1xoAwGxYNQDAUgRB7LqH5Ab/X6vVwv92B7sLnvwtI1CW53nyfb9vCLE0WWR+13VVrVZVrVYHPo3cRuVyWel0Wp7nhXk5aOnDy8tLHR8fT/R+13VVLBZ1dnYmqfPUdRUDKA77bdMYl5/jzivXdeU4TrhMXfDa4eHhXNI3yiT5UK1WI0/KHz9+PNGSmb1TR1zX1dHRUez3T4L6AwCwapgaAGApum8+ehvHnufJsixdXV1FtgcN5mUEfBt282gYxkQdE5ZlTXSzsumCfA2G7pumGQ7x7s4n13UH5tu49wf/guHwg27Elm3Yb5vGuPwwDGPkeeV5nhKJhEzTjEzbcV1XiURiLmkcZpZ8cBxHvu9PHGQ14HleWO8sAvUHAGDV0BEAQLlcLtZ+h4eHc4tCb5qmLMuS67rhk7lxT8bOzs5UKpVW6mYukUjMJT7BtuoeGRIseVgoFHT//v3IDavv+zJNs++mKc77g5viVSo33Yb9tmnEzc9u3edVrVaTZVkqFovh+6XO0/dUKjVz+kaZJh+C4Hy+7yuTyUx9jAuFwlICJlJ/AACWhY4AYMl2dnZu5XvaI6KlLytieLValW3barVa4VNIafATf9u2dXJyMlNHhOM4sm07jJQ+D/NuxC+zPAzqEAoCmnVLpVJDI/pPq3tIdvCUNHhCHGcZxFHvr9VqkZvaQebx2x3H0cXFxcjvkaTT09MwPYta4nFUfnTrPa+C1y8uLiIBBWu1mjKZzMDvmkfeTZsPhmFEgo3u7e3pyZMnE3UINBqNsfssou6Q5l9/AAAQFx0BwJKNukHfBt03G8GTwN55uo7j6ODgYOYbJtM0x94QjnrvIMFTzHlZZnkY1CHUHdV8EYblnWEY8jxPjUZj5Lztce+X4j3NnsdvT6fTE3WQjPtt04iTH4Fh55Xv+2o0GpFOg96/u82ad9Pkg+/7Ojs70+npaXjTb1lW2OExyXEolUrhMovDzFJ3BO8fZN71BwAAcdERAGApUwOkzg1Ad+M6aMB3P80LpgsE3xsszTVN4zmZTE61LJikcGj5oGXRmLM7vWAeu+d5kbLg+76Ojo7UarXUaDTCchA8kS0WizJNU+l0euT7pU45Oz09ldS5+Z33aIZpxfltkxqXn4FR51VvGe9eenGe8QwC0+SD53kqFovK5XJhfRF0JE46PcB13bGBEGepOyTqDwDA6qEjANgwjuOo1WqpXq/3BVwbZp5PfIcNdfU8T47jRKJ8ZzIZlUqlMI2lUimSlkajoUajEUZAlzq/r7czYt7Da4d93unpqVzXDb9/UFrQMUk5KBQKuri4CG9cHceRZVnh371Ppsvl8kTv770pXhW9gd8G/bZB+RUYlsfj8mPceWUYRuRmuvuzFrHiwjT5kEwmlc/nIzfVQTp767xx9cOgm/NZUH8AANYBHQHAigmGtiaTyYkbp8Fc12w2K9/3RwYIm7egoX5xcaFGoyHbtvXgwYNIIMBSqRRp3JdKJTUaDXmep2azqVKpFFlN4OHDh/J9X7ZtR74r+Ixx3znv35DP51UsFsNlux4/frzw+AqzlIdlmKYcpNNptVotFYtFSdLTp09VrVb7Prt7Dr5t20qlUrIsa+z7C4VCeMxWZTRAr2G/bVB+jcvjUfkR57wyTVNHR0fhE/mTkxOdnZ0tLJ7BtPlwenoa/sbgt73zzjvh33HrB9M057IiwirWHwAADLPTXuMJyjc3N9rd3dX19bXu3bs3cJ9nz57prbfe0ssvv6y7d+/ecgq3A3kc36NHj/Tmm28Ofd11Xfm+HwY4q1QqEzUUXdeNvOfg4ECVSmUln4ZivFnLwzwsOkbAKtvm3z4r8m65xl1rAAB4btkJANDh+76q1Wo4R96yLKVSqb6ndqNYlhVpfLdaLToB1tQ8ysM83Pb3rZJt/u2zIu8AAFhtdAQAK8J13TBYViBYOm0auVxO5+fn80galmDe5WFa6zAdYVG2+bfPirwDAGC10REArIh0Oq16vR7ZFqz/HUTDjstxnIWs9Y7bM8/yAAAAAHQjWCCwQsrlchi0qtVqhdGvW61WGMU7CDRVrVZl27Zc11Wz2VQul5NpmnJdNxxK3mg0ZBgGT+fW1LjyMK4sAAAAAIPQEQCsiFQqpUKhEJnTn8vlJH08zLZcLuv4+FiGYYQ3f5VKRalUKlwxIJPJhO/3fV9rHA90q40rD+PKAh0BAAAAGIaOAGAF2LatZDLZF9ivVqtF1sROJBLhyADP88Ibw+6l0m5ruUAsTpzyEKcsBCMGWq2WTNPsW18dAAAA24mOAGAFFIvFvvngUucG7/T0NPy7e85/sJwcNk+c8jCuLHiep2q1Gq4iEazHDgAAANARACxZEBm+9+lvo9GQ7/sDA/55nidJ4RPhbjwFXm+TlodhZSGIFREwDEOu61IeAAAAwKoBwCoYNJ/77OwsfJrbq/eGLrh5DJ4Cp9NpZbNZFQqFxSQYCzVJeRhWFprNpvb398PtiUSC1QYAAAAgacU6AlKp1LKTgE324YfSX/+19M1vdv774YfLTpGkztrwrVYrsi14qp/NZiPbgnOkWq2G0eS7l5Mb9hQY6yNOeYhTFgbp/VwAAABsp5WZGuA4DjcsWJw33pC+8hXpxz/+eNuLL0pf/7r0yivLS9c/qdfrsm07fIJrGEbfnG/TNJVKpeQ4js7Pz2XbdniDGAwX5ynwZhhXHuKUhYODg8ixD6aKAAAAACvREeD7Pk+qsDhvvCGl01LvMnrvvtvZ7jhL7wwwTXPsMP7eKPLDpg304txaP+PKQ5yyYFmWbNsO//Y8j/gAAAAAkLQiUwMuLy91fHy87GRgE334YWckQG8ngPTxtj/4g5WZJjCrg4ODyN88Bd5epmnq5OREjuOoXC5HVp8AAADAdlv6iIBJoli///77ev/998O/b25uJEnPnj3Ts2fPBr4n2D7sdcxupfP4e9+Tnj6VfvmXh+/zs59Jf/M30m/+5sKT0x7UITFHPAVGt0ErTgAAAABL7wjwfV+macaax3x2dqavfe1rfdvffvttvfDCCyPfW61Wp00iYlrZPP7mN8fvc3MjvfXWwpPy3nvvLfTzu58Ct1otngIDAAAA6LPTXvQjyhHK5XIYBdv3fe3t7Y18YjpoRMBLL72kn/3sZ7p3797A9zx79kzValWpVEp3796d7w+ApBXP4+99T/qd3xm/31/8xa2MCHjllVf053/+5wv/HgDA9nr06JHefPPNZScDALDCljYioNFo6OjoaKL3PP/883r++ef7tt+9e3fsDWicfTCblczjL3xB2t/vBAYc1Mm0s9NZPeALX5Du3Fl4cnZ2dhb+HQAAAAAwytI6AlqtlhqNRrhkYLPZlCQVi0WZpsncVszHnTudJQLT6c5Nf3dnQHBT/id/ciudAAAAAACwCpbWEWBZViSIWaPRULlcVj6fX1aSsKleeaWzROBXviL9+Mcfb3/xxU4nwJKXDgQAAACA27T0YIGS5DiOLi4uJEm2bSuVShHpHPP1yivS7/6u9N3vSj/9qfSpT0mf//ytjwTY3d3Vo0ePJnpPu93We++9p0984hNMLVgQ8njxyOPFW6c8/uijj/SDH/xAn/vc5/TccyuxkvFG2d3dXXYSAAArbiU6AtLpNFMBsHh37ki/9VtLTcLrr78+8XuePXumt956Sy+//PLqxWDYEOTx4pHHi7dOeXxzc6Pd3V09fvx4aLBfAACwOHTDAwAAAACwRegIAAAAAABgi9ARAAAAAADAFqEjAAAAAACALUJHAAAAAAAAW4SOAAAAAAAAtggdAQAAAAAAbBE6AgAAAAAA2CJ0BAAAAAAAsEXoCAAAAAAAYIvQEQAAAAAAwBahIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIAAAAAANgidAQAAAAAALBF6AgAAAAAAGCL/NKyEzCLdrstSbq5uRm6z7Nnz/SLX/xCNzc3unv37m0lbauQx4tF/i4eebx45PHirVMeB9ft4DoOAABu11p3BPz85z+XJL300ktLTgkAAJjUz3/+c+3u7i47GQAAbJ2d9hp3x3/00Uf6yU9+ok9+8pPa2dkZuM/NzY1eeukl/ehHP9K9e/duOYXbgTxeLPJ38cjjxSOPF2+d8rjdbuvnP/+5Pv3pT+u555ilCADAbVvrEQHPPfecXnzxxVj73rt3b+UbRuuOPF4s8nfxyOPFI48Xb13ymJEAAAAsD93wAAAAAABsEToCAAAAAADYIhvfEfD888/rv/yX/6Lnn39+2UnZWOTxYpG/i0ceLx55vHjkMQAAiGutgwUCAAAAAIDJbPyIAAAAAAAA8DE6AgAAAAAA2CJ0BAAAAAAAsEXoCAAAAAAAYIv80rITsCi+7+vy8lKVSkXVanXZydlYxWJRktRsNiVJpVJpmcnZKEEZljr563mezs/PZRjGchO2wVKpFPXFnLmuq1KppFQqJdM0Va1W9eDBA6XT6WUnbaPYtq2DgwNJUiKRIH8BAMBIG9kR0Gg0VKvV5Pu+Wq3WspOzsWzbVqFQCP/O5XLcSM2RbduybVumaUrq5G8mkyF/F8RxHLmuu+xkbBzf9+W6rhzHkWmasm2bm9Q58n1fDx8+1DvvvCPDMNRoNHR4eCgWBAIAAKNs5NSAZDKpbDYb3kBh/nzfV6PRkO/74bZcLifXdeV53vIStkE8z5PjOOHfBwcHqtVqS0zR5qLTcLGePHmidrutZrOpbDa77ORsFNu2dXJyEo4USiaTdBYCAICxNrIjALejVqtFbvqDjpfuzgFMr1qtKp/Ph38/fvxYlmUtMUWb6/LyUsfHx8tOBjCxcrmsdDotz/PCES3UEwAAYJyNnBqAxTMMQ1dXV5FtQSOUkRjz5ziOfN9XpVJZdlI2juu63Dgt2OXlpRKJhFqtlprNZmRKEaYXdMQ2Gg2ZpinTNMMpRJRpAAAwCh0BmJuzszOVSiWC2c1REDDQ931lMhnydgF835dpmoxkWZBkMinp4w7CcrmsTCZDp9YcBB0BhmGE+VwoFHT//v2+jloAAIBuTA3AXATzVJn/O1+GYSibzYZTBPb29rhhnaNgWDUWJ3hSHTg+Pg5HuGA+jo6Owv83DCMM0AgAADAMHQGYmeM4Ojg4iMxnx2x835dt25GbJcuyaODPUaPRiNxAYTG6A15KCke1EFR0dsOmYRmGQf4CAICRmBqAmQQ3pcFIgCD6OnECZuN5norFonK5XHjjFHQKMD1gPlqtlhqNRliGm82mJKlYLMo0TUYKzEEwpaXZbPYFE6WOmF0w2sLzvHBqgNTJYzq5AADAKBvdEcByYIvVaDTUaDTCiNVS5+kf0wNml0wmlc/nIzdLFxcXSiaTBAGbE8uyInnZaDRULpcZ2TJHhmH0leNgOgYdWvNRKBTCukHq1MGWZUU6BgAAAHrttNvt9rITMW/B+usXFxdqNBrK5/N68OABT/jmyPd93b9/f+A83w0sUkvh+77K5XL4dxBtnRuo+QvqC8dxlM/nlUql6HCZk95y/PTpU1YNmLNyuRzWxeQvAACIYyM7AgAAAAAAwGAECwQAAAAAYIvQEQAAAAAAwBahIwAAAAAAgC1CRwAAAAAAAFuEjgAAAAAAALYIHQEAAAAAAGwROgIARHiep0wmo729Pe3t7SmTycjzvL79UqmUbNse+jnFYlE7OzsLS+fh4aFyudzCPh8AAADYVHQEAAi5rqvDw0M9ePBA9Xpd9Xpdpmnq8PBQrutO9FmWZalUKi0opdLp6akymczCPr+b4zhKpVK38l0AAADAov3SshMAYDX4vq9UKqVKpaJ0Oh1uLxQKOjg4UCaT0ZMnT2QYRqzPSyaTSiaTM6fLdV3lcjk1m83I9u40Lopt2yqXy0okEgv/LgAAAOC2MCIAgKTOTW8ymRx4g53NZpVIJHR2draElC1PoVDQ1dXVyCkQAAAAwLqhIwCApM6Td8uyhr6eTqf7pgf4vq9cLqe9vT0dHBzIcZzI5/XGCOjet1wuR14rFos6ODjQzs5OOBUhk8kolUrJ8zzt7OxoZ2dHvu9LisYoyOVyfdMEGo1G5PtHfTcAAACwTegIACCpEyTwwYMHQ18/ODhQo9GIbLu8vFQul9OTJ0+UTqeHBhaUFL725MkTVatV2bYdfl4ul9PFxYUqlYqurq5UKBTk+74qlYoqlYpM01S73Va73R44NSGTyUQ6ISSpVCqFoxtGfTcAAACwbYgRACDUarWGvhY8ie+WzWbDOACFQkGO46hUKqlQKET28zxPjuPo6upKhmHIMAwVCgVdXFzINE2Vy2U1m02ZpilJI0cmDGJZlgzDkOM44c3/5eWlzs/PR373PGIYAAAAAOuGjgAAkiTTNPsC8nXrvlEfxrKsgSMCgqfv9+/fj2w/OjqS67oyDGPsZ49zfHysi4sLpdNpNRoN+b6vdDodjhQY9N0AAADANmJqAABJnZv43uH13S4vLyd+Ut8tmUzq6uoq8q9arU79eb1yuVyY/qBD4La+GwAAAFgndAQAkNQZ2u95norFYt9rtm3L9/2+If+9XNcdGGcgmUyGT+kHveb7/tDYAnElk0kZhiHXdeU4jnK53NjvBgAAALYRHQEAJEmGYahSqci2bdm2Lc/z5HmecrmcisWiqtVqX6C+crkc3mTncjl5nqdsNtv32aZpKpvNRoIJOo6jYrHY95rv+3IcJ1wRwDTNcLvruiM7DLLZbNihEYxeGPXdAAAAwDaiIwBAKJ1Oq9lsyvM8HR4e6vDwUK1WS81ms29agGmaOj4+1tnZmfb29lSr1VSv1wdG9Zc6UfyTyaQODw+1t7enUqkUfmbw/6lUKnzt5OREUueJfjKZ1P3798eOSDg5OZHrun2dEaO+e5RyuaydnZ2wk2NnZ0cHBwdj3wcAAACssp12u91ediIAbB7XdZVKpUQVAwAAAKwWRgQAAAAAALBF6AgAMFeu68r3fVUqFSWTyWUnBwAAAEAPOgIAzFWpVNLe3p5c19X5+fmykwMAAACgBzECAAAAAADYIowIAAAAAABgi9ARAAAAAADAFqEjAAAAAACALUJHAAAAAAAAW4SOAAAAAAAAtggdAQAAAAAAbBE6AgAAAAAA2CJ0BAAAAAAAsEXoCAAAAAAAYIv8/1iRqaI1PNOUAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "epde_search_obj.visualize_solutions()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "ea85370e", + "metadata": {}, + "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 21-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 22-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 23-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 24-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 25-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 26-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 27-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 28-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 29-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 30-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 31-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 32-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 33-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 34-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 35-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 36-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 37-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 38-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 39-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 40-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 41-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 42-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 43-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 44-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Multiobjective optimization : 45-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 46-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 47-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 48-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 49-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 50-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 51-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 52-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 53-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "Multiobjective optimization : 54-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "The optimization has been conducted.\n", "\n", "\n", "0-th non-dominated level\n", "\n", "\n", - "0.0 * d^2u/dx0^2{power: 1.0} + -0.11852093259823644 * t{power: 1.0, dim: 0.0} + 0.36577252189514686 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999997461454138, dim: 0.0} + -0.980316863474351 * u{power: 1.0} * sin{power: 1.0, freq: 2.000000141169946, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + 1.2254264146899703 = du/dx0{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0044595328173458}} , with objective function values of [2.91933544 3.5 ] \n", + "0.0 * u{power: 1.0} * cos{power: 1.0, freq: 1.999999655208078, dim: 0.0} + -0.21710447417313278 * t{power: 1.0, dim: 0.0} + 0.21934828166108633 * u{power: 1.0} + -1.0624101924873819 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997979374395, dim: 0.0} + 0.3259812785356608 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999995208876964, dim: 0.0} + 1.2864733879295016 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002922984678908907}} , with objective function values of [2.84149365 4. ] \n", "\n", - "-0.2055064934951428 * du/dx0{power: 1.0} + -0.0035309324905232475 * d^2u/dx0^2{power: 1.0} * t{power: 1.0, dim: 0.0} + -0.779367282745194 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.999999554309517, dim: 0.0} + -0.2876572777322552 * u{power: 1.0} + 2.0086219620617323 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000004333155865, dim: 0.0} + -1.202721068333836 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.999999518103412, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0020695345959820125}} , with objective function values of [0.65309385 7.5 ] \n", + "0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.00000043981936, dim: 0.0} + -0.9918493622898968 * d^2u/dx0^2{power: 1.0} + 1.4857876345926648 * t{power: 1.0, dim: 0.0} + -3.9448922927621832 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000002504454213, dim: 0.0} + -0.02862032765756233 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999996264795334, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006608472868227343}} , with objective function values of [0.40273103 4.5 ] \n", "\n", - "-3.951218144289313 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999995807630981, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000378681952, dim: 0.0} + -0.9900591677672463 * d^2u/dx0^2{power: 1.0} + 1.4766602719611732 * t{power: 1.0, dim: 0.0} + 0.014961528346664596 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999995886960809, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004122473023559842}} , with objective function values of [0.80876299 4.5 ] \n", + "-0.996179464616404 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000003761978054, dim: 0.0} + -3.9513011254968484 * u{power: 1.0} + 1.4811871152361153 * t{power: 1.0, dim: 0.0} + 0.0 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000261633951, dim: 0.0} + -0.03401223670958213 * u{power: 1.0} * sin{power: 1.0, freq: 2.000000206562423, dim: 0.0} + 0.011051213359916434 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003959167640051569}} , with objective function values of [0.29622773 5.5 ] \n", "\n", - "-3.973456847439746 * u{power: 1.0} + 1.4807802528320022 * t{power: 1.0, dim: 0.0} + 0.0 * d^2u/dx0^2{power: 1.0} * sin{power: 1.0, freq: 1.999999729438855, dim: 0.0} + -0.9922888819896745 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0000003247686093, dim: 0.0} + -0.02489648279447621 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999998567712671, dim: 0.0} + 0.04836827440130382 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0053721087732711585}} , with objective function values of [0.78069956 5.5 ] \n", + "-4.002093298088698 * u{power: 1.0} + 0.051016415753568856 * du/dx0{power: 1.0} * cos{power: 1.0, freq: 2.000000226917959, dim: 0.0} + 1.5008156389914016 * t{power: 1.0, dim: 0.0} + -0.999100526319761 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999997491460861, dim: 0.0} + 0.0 * du/dx0{power: 1.0} + -0.03723715753609818 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004961614766111451}} , with objective function values of [5.53360803e-03 6.00000000e+00] \n", "\n", - "-3.9674622737518246 * u{power: 1.0} + 1.4800804249030497 * t{power: 1.0, dim: 0.0} + -0.9950430233227409 * d^2u/dx0^2{power: 1.0} + 0.02943784296981477 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999995807630981, dim: 0.0} + -0.08104293233348653 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999998567712671, dim: 0.0} + 0.05682269585623731 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999997453694336, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003101475166935847}} , with objective function values of [0.74033353 6.5 ] \n", + "0.0 * t{power: 1.0, dim: 0.0} * du/dx0{power: 1.0} + -0.11630001772518705 * t{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} + 0.3720190828680421 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.0000002393218383, dim: 0.0} + -0.965303039186131 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999997979374395, dim: 0.0} + 1.219005892018271 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0052972615966122944}} , with objective function values of [2.87906607 3.5 ] \n", "\n" ] } ], "source": [ - "t_max = 160\n", - "t_train = t[:t_max]; t_test = t[t_max:] \n", - "x_train = solution[:t_max, 0]; x_test = solution[t_max:, 0]\n", - "\n", - "epde_search_obj = epde_discovery(t_train, x_train, True)" + "epde_search_obj.equations()" ] }, { "cell_type": "code", - "execution_count": 9, - "id": "d2d363bf", + "execution_count": 16, + "id": "9933b8cb", + "metadata": {}, + "outputs": [], + "source": [ + "correct_eq = epde_search_obj.get_equations_by_complexity(4.5)[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "72c9adbc", "metadata": {}, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = -1.185\\cdot 10^{-1} t + 3.658\\cdot 10^{-1} t \\cdot cos^{1.0}(2.0 x_{0.0}) + -9.803\\cdot 10^{-1} u \\cdot sin^{1.0}(2.0 x_{0.0}) + 1.225 \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot sin^{1.0}(2.0 x_{0.0}) = -2.055\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + -3.531\\cdot 10^{-3} \\frac{\\partial ^2u}{\\partial x_0^2} \\cdot t + -7.794\\cdot 10^{-1} t \\cdot cos^{1.0}(2.0 x_{0.0}) + -2.877\\cdot 10^{-1} u + 2.009u \\cdot cos^{1.0}(2.0 x_{0.0}) + -1.203 \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot sin^{1.0}(2.0 x_{0.0}) = -3.951u + -9.901\\cdot 10^{-1} \\frac{\\partial ^2u}{\\partial x_0^2} + 1.477t + 1.496\\cdot 10^{-2} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = -3.973u + 1.481t + -9.923\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} \\cdot sin^{1.0}(2.0 x_{0.0}) + -2.49\\cdot 10^{-2} u \\cdot sin^{1.0}(2.0 x_{0.0}) + 4.837\\cdot 10^{-2} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot sin^{1.0}(2.0 x_{0.0}) = -3.967u + 1.48t + -9.95\\cdot 10^{-1} \\frac{\\partial ^2u}{\\partial x_0^2} + 2.944\\cdot 10^{-2} t \\cdot sin^{1.0}(2.0 x_{0.0}) + -8.104\\cdot 10^{-2} u \\cdot sin^{1.0}(2.0 x_{0.0}) + 5.682\\cdot 10^{-2} \\end{eqnarray*}$\n" - ] - }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABIkAAAGyCAYAAABgN5SRAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAACCF0lEQVR4nO3dT4wj533n/0/PZCDAgqerOZefEwmrKa72nLB7FvgdBMSZohYQVhNEIbsBL3z6ecjdXBJ4sV3uXBa+bJvcHJJTTLb3JOCX7WFZMJSFgIhl/wL48DsMSXjPCmu8cBz/Djvs6jYUQBpI/B3oqmHxXxXZ/N/vFzCQulgkHz71PE9VPfU832en2+12BQAAAAAAgBvt1qoTAAAAAAAAgNWjkwgAAAAAAAB0EgEAAAAAAIBOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAEj6rVUn4Dq+/PJL/dM//ZO++tWvamdnZ9XJAQAACXS7Xf3617/Wb//2b+vWLZ5XAQAArIuN7iT6p3/6J73++uurTgYAAJjBL37xC7322murTgYAAAB+Y6M7ib761a9K6l1k3r17d8WpWZ4XL17o448/1ttvv607d+6sOjlri3yKRx7FI4/ikUfJkE8vXV1d6fXXXw/P4wAAAFgPG91JFEwxu3v37o3rJPrKV76iu3fv3vgbjUnIp3jkUTzyKB55lAz5NIyp4gAAAOuFQAAAAAAAAACgkwgAAAAAAAB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEB0EgEAAAAAAEDSTrfb7a46EbO6urrS7u6uLi8vdffu3VUnZ2n+3b/7d/rVr36lV199VTs7O6tOztrqdrv69NNPyacJyKN45FE88igZ8umlL7/8Up988onefPNN3brF8yoAAIBV2N3d1fvvvx/Z9lsrSguu4erqSj/5yU9WnQwAAAAAALChHj16NLSNx3cAAAAAAACgkwgAAAAAAABMNwMAAGM4jqNOp6Nms6l8Pi/LsladJAAAACzQyjuJqtWqfN+XYRhqt9s6OTmRYRirTtbW8X1frusqk8nINM1VJwfAglHnkdS4stJqtSRJhUJBvu/r/v37uri4WFUyAQAAsAQrnW5WLpd1eHio4+NjFQoFnZyc6PHjx6tM0lZyXVeu68qyLHmep2KxuOokAVgg6jySmlRWOp2O6vW6JMkwDKVSqbDjCAAAANtppZ1E9Xo9MmrIMAz5vr+y9Gwj3/dVr9eVy+VkGIYsy1I2m5Vt26tOGoAFoM4jqbiyYlmWKpVKuH+n01Emk1lVcgEAALAEK+0kMgxD2Ww27BjyPI9pEXMWPCXuZ1mWqtXqilIEYJGo80hqmrJSLBZ1dna2rKQBAABgRXa63W53VV/u+7729/fleZ6Oj4+VTqdVKBTG7v/ZZ5/ps88+C/++urrS66+/rv/9v/+37t69u4wkr4U/+qM/0v/4H//jWp+xs7Oji4sL4j8BNwR1HkkNlhXHcSRJuVxuhakCAADAvD169EgffvhhZNtKA1cbhiHbtlWv11Uul2VZlg4PD8fexJyenuq73/3u0PaPP/5YX/nKVxac2vXxz//8z1PtX61WlUqlJPWmCwSr03Q6HTUaDfm+r06nE3bQ+b6vhw8fqtlszjfhAJaCOo+kJpUVwzDkum44Fa3VaskwDJmmKdd1KUcAAABbaKWdRLZtK5vNqlaryfM85fN57e/vq91uj9z/5ORE3/72t8O/g5FEb7/99o0aSTTNkP9sNqtSqRSJI9EfmDSVSsk0TWWz2fBC33Xd8KYBwGahziOpSWXFNM3wvBzwfV/dblee51GOAAAAttTKOok8z5Pv++FTS9M01Ww2tb+/L8dxRg5rf+WVV/TKK68Mbb9z547u3Lmz8DSvi52dnUT72batTCYzFGi00WiEK9lYlqVyuRzZp16vK5vNzjXNABaPOo+k4sqK1Dsvj1rynnIEAACwvVYWuNrzvJHTyliqeX7K5bKOjo6GtgfLHAc3Aufn55F8bzQarGADbCDqPJKKKyuTUI4AAAC210oDVwdTzfo7i4rFYmTJ3Umurq60u7ury8vLGzXd7N1339Xf/u3fTtzHdV1ls1kNHt5Wq6X9/f1wu+/72tvbi+y3s7Mz9L515/t+uCLP8fHxyH2C4KudTkemaYY3OsC6SlKuAzetzmMyx3FkmqYajYYkRRaFSFpWJqEcYZJp2q51xDUFtsmm10eMRjuFeVm7wNW1Wk2np6e6d++eDMOQ7/sqlUqrTNJWMU1zaNvp6WmkE87zvMh+rVYr/Nt13Y1pTFzX1fPnz3Xv3r2Rr3uep3q9Hv72bDa7Mb8NN1dcuR50k+o8xvN9X6enp2o2mzJNU3t7e0MrhyYpK5NQjjDJtG3XuuGaAttk0+sjRqOdwiKtfHUzOoUWw7IsdTqdyLagN7n/ZsEwjMhIrvPz83C6gOd5i0/onORyOXU6Hfm+P/L1YIWeQLBqz6jG0vf9jV4mfNPTj5fiynW/m1bnMZ5hGOEKY0H8oH5Jy0rcd1COMM40bdc64pripU1PPza/PmI02qmXNj3962hlMYmweM1mU7Ztq1wuq1wuq9PpqFarRfYxTVMHBwcql8tyHCeMUVGtVhPfLMyT7/sLOYm12+1IT3sqlRr5PbZtb3wjU61WuUm7oTaxzmNxqtWqTk9Ph8qAlKysTLKMckQ7tt02+fhyTQFg3dFO4TpWOpIIi2WaZqKRWoPTC6a5UZi3x48f6+joaOTqdvM2+CS9Wq0OBWwtl8uSeg2tNJxXo5TL5bCx9X1/6fO/j4+Pp4rttSrzzlvXdVWpVJTNZmWapur1uh48eBCWpbjXZ+H7vp48eaJaraZ6vT5VehdhE+v8tII8l3plx/M8nZ2dTbzAiTv2ST4z7ljP+jsWWXYKhYJM05Rt20PHPGlZmWTR5SidTq9djKN8Pq+joyOZpjlU5kZN4Uv6nqA9lKTnz59PPDbZbPbaZXAd2q51PL7XwTXF6sySr9VqNRx90G63dXJyEuZzXJ2dpR2YZB3q4yosoj4keT0Q19ZuI9qp1ZnlOjLuWEzK91mul/vRSYS1sqib1XQ6Hek9DwK4BTzPU7PZjDwBt207cvIoFouxF+dBZQ4+x3XdlTRa+Xxe5XJ5bS8kFpG3vu/Ldd0wYK9t25EOoLjXp9VqtdRoNOT7/tBJN0l6y+Wynj9/PvS+e/fure1xizMYp2YRbNuWbdvh9xSLReXz+YllJ+7Yx31m3LGe1jLKTnDzY1mW8vm88vn8RsUi6I9xNI1Fl8FWqxVOzeuXy+XGnr/i3pPP55XNZsPjXa1Wh9rIgOM4cl332r9h1W3XrMd3XXBNkcyyzgmz5GuhUIjcXD1+/Disw3F1dpZ2YJx1qI/zMO2xXkR9iHt9mrZ2G9BOJbOMdmqW68i4YxGX77NcL0d0N9jl5WVXUvfy8nLVSVmqf/tv/+2qk7CWKpVKt1QqRbZdXFx0u91ut91ud3O5XLg9k8lE9js+Pu622+3I+yzLCt/f7Xa7zWazKymy3yDDMCLv6Xa73VVVs8HfuC4Wlbe1Wm3o9X5xr8+qVquNzOt5lYVR5XpdFQqFhX+HZVmR/CiVSl3DMCa+J+7YJ/3Mccd6VosqO5VKpXt8fBz+bZpmt9lszpzOVSiVSjOVp0WXwVF1sVKpzPyedrvdlRQ53hcXF0Pbgu2VSmVu55RVtl2zHt9l4poiapa2b9HHeNZ8tSxr4ra4ej5LOxBn068lpjnWi6oPk16fpq3dJLRTUevYTvVLeh2Z5FjE5fs018vvvvvu0DZiEmFpfN8P41e4rht5Gho85Q+WcgwEvfye54X72LY9tJ/ruqrX66rX65GnO/v7+/J9X6Zp6ujoKPyOk5OTofcP9iI3Go3I/Nbg9XExkzzPGxs4rf+3Oo4jx3FULBbleV7kN86TaZpqtVpz/cx5WVTerot5pXdcub7J6vV65CnR06dPrz1CZhGfOat5lJ3Dw0M9ePBAruvKtm0Vi8UwqPS6c11X5XJZp6enknpPytYpzsDg6EPXdXVwcDDze4LfNhhcVOq1k/2ePHmiw8PDWZKd2KLbrkUc3/7risF2slwuq1qthv/6xV2TcE0Rta7XFNPmq9SrY9lsNtxncCRBXD2fpR2YxTKuJUZdf/u+r/39/ZnTncS860OS16XJbe0i84J26ma3U7OYdCyS5Pu1r21ju7LWGCOJNkuhUAh7P/t7t9vtdrfZbHbb7XbXNM1w/2BbqVTqZjKZSC923MiBabTb7ZFPlQbVarWJTxzq9frInnPDMLq1Wq3b7fZ6/IP3FwqFMA8sywr3mZdNGoEyj7yt1WrdSqUS/rd/JEWS16+T9sGnAknSu42WPTKgVqsNPWkZt1/SYz/pM5cxkuimlp1Bk9qDSZZZBtvt9tSjBwbfM+rpdrfb+/39+9Xr9W673Q6ffM/DKsvfrMe338XFReTaoF6vR36PZVmR64ZMJhMZUTfumuS6uKZ4aRWjxeLytdvtlR3TNLuSusfHxxPrcVw9n6UdGGUV9XHc9XdwHpzGdY/1detD3Otxbe0886If7RTtVL/rXEf2H4tp24a46+VRI4mISYSlaTQa4fxK0zTDedvBEs3VajXytLvT6SiTyajdbsuyrERPh2YR9LbHOT09VaVSmXoFgFQqFc4/TaVS4fs9zwuDxc0jEO6o7w0CnY0zGKxunP39/YWufDWPvA3KTnAsq9Wq8vl8WM7iXl+G/vRidkEwPt/3lc/nY8tNkmM/7Wcu200qO0G8mnU7BoNKpdLUMRcG32OapizLkuu6kSD7g4Lz1LzPf0nNs/zN6/jath0GD5akg4ODsE7btq1MJhM5tx8cHMh13bA9GHdNcl03+ZpiHSTJV8MwZNu26vW6yuWyLMvS4eHhyPfE1fNZ2oHrmld9DK6/y+Vy5Pq7Xq8rm81e+/Oncd36MO59wetxbe2i8oJ2inZqXpIci8G24TrXtnQSbYmdnZ2lfE93zEokwTSwSRWzVCopn88rnU4rk8mo2WxKUjj0rVKpRAJ0Bdtd1400ivMe1ut5XmylCRr5WTpK+itr/xDlRqMxsrEPhoz2n9Rm2cc0TZ2fn09M2zwubBzHif0eSTo5ORk55WVeeTt4sjs8PFSxWAyHY8a9PihJmb5OeudhlfV+VAdjEDSvXzabHRsgfNayYxhGJPDk3t6enj17NrYeJzn2037mOIsoN9J8y866ny8ajUai6XHXLYPXabtmGc4+7j31el22bavT6SiVSoXltb9Tc5r2cd3brqTHd9Lv8H1f1Wo18pphGGF9LZfL4TVGwPM8pdPp8O9x1yTXdVOvKVZ5TggkzVfbtpXNZlWr1eR5nvL5vPb394fKWlw9T9IOrHN9DI7z+fl55Pq70Wgon8+Pfd88jnW/edWHuNcntbWz5gXt1Gi0U+PbqVkkPRaD9eFa17YzjXdaE0w3Wx/NZjPxEMhms9k1TTMyHK7dbo+cQjZqaH0ul5vL0N5ArVaLnX6S5PuCoayDJHXr9frQvqN+b7vdjgx7HDUUNMk+3W5vCOi6BwadZ96OGl4pKRy2G/f6oKRletTQ0WnKwjZZRpDS4+PjyHDZIK8nDVmedOyn+cwkw4SnaQtvatmJy6PrtPHLavMKhcJMQ9qTvKc/mGqz2Yy0UUmmm61725X0+E76HfV6fey082azOfK1ce39qGuS6+Ca4qVlT+NImq+j0pXJZIbKQFydTVKn170+jmpTZrk9nPVYz6s+zJJXg4GrZ8kL2inaqWnMMt1s1LGIy/dpr5cJXI2FyWQyE4dAptPpMPhWJpNRsViM9GI6jhMG5Owf/tloNIZ6il3X1eHhoVqt1lyG3pumOTZwWpCWoBc2CBY27nMMwxj5+qjf0L8t+B7XdYeC6g1OPUiyT5DW/qcRoxSLxUT/BgPpzcM88zYYRtn/elA2gikak14fJa5MTzJNWUBynuepXC4PDaWVNPapSNyxn+UzJ7lOuQnStO1lJy6PWq1WOFp0XQO2D7bD13nP4GiEYDqEYRjqdDphsOdyuSzbtiW9DII6yrq3XUmPb9zvGNV2B8sLp1KpyHbHcZTJZMKnunHXJNdxk68pVmWafB03gmLcCINJ5SJJO7Du9XEwaHcwHVRa/MIg86wPSfJqUlsrzZYXtFO0U4s07ljE5fs8rm3pJMLC+b6vXC4XaSgHI6w/ffo0XGmiv9Gs1+uRoXqe54VzWxuNxlway3ENZavVUqvVUiaTked5YTT+IH1BBex3cnIyFM0/qNiO44Tzmuv1evg5vu+HFbfdbuvevXvh+1Op1FBHWJJ9gvTF5U+lUkn0b97xiOadt4Zh6Pj4OFLGqtVqePKPe/06xg11npRezCaTyQwdx/Pzc2UymbA9GSw7ccc+yWcG5j1dcFvKTrBySrFYnMtNhed5a78a2+DNRP/2wbYr7j35fD6Sb/1Try3L0vHxcfgvuJE9Pj5ONI1jklWVv3kc3+AiePBzg5uQwd9WqVTCG7kk1yTXcZOvKVZh2ny1LGvkQ8ZmszlUp8bV2aSvT2NV9bF/+pP08vwnaWzHwDzMuz4keX1SWyvNPy9op2inBo2r56PyNe5YTMr3aa5tx9n5zdCkjXR1daXd3V1dXl7q7t27q07O0rz77rv627/924n7+L4fBj6b1wnsOvqfFnY6naEAgY7jjJxjGoxk6b+gLBaLyufzMk1zbr8tm81GAqj5vq/79++PbICCKlOtVlUqlYbmIZfL5TBdT58+DU9ArVYrXG7SsqxwTrz0cr6ubdu6d+9euGRhsVgcChqdZB+pdzI8Oztbu8ZyEXkbfG7/iKfnz59P9fq0PM8L5yW3Wi0dHx/rwYMHkYvMSeldhFXX+2KxuPAAnoPHsd1uq1QqheV8VNmZtmwMfmaSYz2NdSs71yk3rVZLnucpl8uFdfvi4uJa6en/7dPm8TLKoNR7wlur1YY6O8a1XZPe47quWq2WDMNQu91WsVgceRyCMuM4jo6Pj5XNZme6YVh1+bvO8e0X5FsmkwmDsAZ5G7wW3AgVCoWha47AqGuS6+KaQuFnLbI+zpqvvu/r9PRU9+7dk2EY8n1/qIxI4+ts0teTWHV9lHrHKZ1Oh9fVp6enymazU3VGTXOsF3UdGPd6krZ2HnnRj3aKdkqKr+eD+ZrkWEjT3RcNXtv2e/TokT788MOhL9pYxCQarV6vd2u1WrhE3rrHpVkHpVJpLeJ9DC7dmMvlhtKVZJ9gO26Odaj3tDWb57rlZvA9pmmOjfO1DJRBrAOuKXqojzcHx3rz0E71UHZHxySik2gDTeokCgJV9YsLToaedehUabfbkXT0BzcLgo9N2iewLg0/lmNd6n273V7q9+F6FlFujDFBOpeFMoh1wTUF9fEm4VhvJtopym63O7qTiOlmG2jSdDPHcXR6ehpZInFeUwC2XTCs87pxHuaVjiCoXZCedDqtZrMpwzDG7iO9HEa96KlNWB/Ue8xi3uWmWCwmXvIY2HZcUwBYd7RTkEZPN6OTaAMliUk0aGdnRxcXF2sXn2bdBCsLrEMcp1mVy+Vwzi5uNuo9ZjFLuVmXC01gnXBNAWDd0U6BTqItEddJ1B/5vNPpyLIspdNptdvtsAEILujr9bps25bruhMDZgJYb3H1njqPUeZxvghW1whWDTIMgzIFAACwAUZ1Ev3WitKCBclmsyqVSkOrgUkKL9qr1WoYIT+46K/Vaspms2GUfQCbI67eU+cxyjzOF1JvNZGA7/va4GdPAAAANx6dRFvEtm1lMpmhpTgbjUZkmdxUKhVZ2jm4KehfBrF/3ufgsvQA1keSek+dx6B5ni+IewUAALA9mG62gcZNN9vZ2VGz2Ry66N/b29PZ2dnIWBF7e3t69uxZJPaE53kqlUqqVCqSek+b+28IAKyPaes9dR7S/M4XAAAA2FyjppvdWlFaMGdBTIjBC/5WqyXf90de8HueJ0lDF/yu60a2GYYRfj6A9TFtvafOQ5rv+ULqjUJzHEfVapVyAwAAsOHoJJqnL76Q/v7vpb/5m95/v/hiqV8/Kq7I6elpODpgkOu6kWkFwcV9u93WvXv3wu2pVEq+7883sQDmYpp6T51HYF7nC8/zVK/XlcvlVCgUWH4WAABgw9FJNC8ffCC98Yb09a9L3/hG779vvNHbvgSWZanT6US2BTFGCoVCZFs2m5XUiykRrGrj+/7Em8LBzwaweknqPXUeg+Z5vmAUGgAAwHYhcPU8fPCBlMtJg+GdfvnL3nbHkd57b+HJaDabsm07HBFgGIZqtVpkH9M0lc1m5TiOzs7OZNt2eHMQTDFIp9ORm8cgkC2A9RNX76nzGGVe5wtGoQEAAGwXAldf1xdf9EYM/eM/jn59Z0d67TXp2TPp9u25fOW4wNXz4nleuMyxJO3v76vZbC7s+wCsFnUeswo6mo6PjyVJxWJR+/v7kRFJAAAAWE+jAlczkui6fvrT8R1EUm900S9+0dvv939/acm6DtM0dXR0JMdx1Ol0dHJysuokAVgg6jxmxSg0AACA7UIn0XX96lfz3W9NjFrdBsD2os5jFpZlybbt8G/P8yIBrgEAALBZ6CS6rq99bb77AQCwIRiFBgAAsF3oJLqut97qxRz65S+HA1dLL2MSvfXW8tMGAMCCMQoNAABge9xadQI23u3b0l/9Ve//d3airwV//+Vfzi1oNQAAAAAAwCLQSTQP773XW+b+d34nuv2113rb33tvNekCAAAAAABIiOlm8/Lee9If/mFvFbNf/aoXg+ittxhBBAAAAAAANgKdRPN0+/bGLHMPAAAAAADQj06iDXT37l39wR/8gV599VXtDMZBQqjb7erTTz8lnyYgj+KRR/HIo2TIp5e+/PJLffLJJ3rzzTd16xYz3wEAAFZhd3d3aNtOtztqSa7NcHV1pd3dXV1eXuru3burTs7SvHjxQh999JHeeecd3blzZ9XJWVvkUzzyKB55FI88SoZ8eummnr8BAADWHY/vAAAAAAAAQCcRAAAAAAAA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACA6CQCAAAAAACApJ1ut9tddSJmdXV1pd3dXV1eXuru3burTs7SvHjxQh999JHeeecd3blzJ/LaN7/5TV1eXq4oZeul2+3q008/1auvvqqdnZ1VJ2ctkUfxyKN45FEy5NNLX375pT755BO9+eabunWL51UAAGA+dnd39f777686GRvtt1adAMzX5eWlPvzww1UnAwAAAACApXr06NGqk7DxeHwHAAAAAAAAOokAAAAAAADAdDMAAG4cx3HU6XTUbDaVz+dlWdaqkwQAAIA1sNJOonw+r6OjI5mmKcMwIq+ZprmaRN0wvu/LdV1lMhnyHAC2zKg2vtVqSZIKhYJ839f9+/d1cXGxymQCAABgTax0ulmr1VI+n9f+/r7S6XT4z7btVSbrxnBdV67ryrIseZ6nYrG46iQBAOZkXBvf6XRUr9clSYZhKJVKhR1HAAAAuNlW2klULBbV7XYj/yqVimq12iqTdSP4vq96va5cLifDMGRZlrLZLB10ALAFJrXxlmWpUqmE+3Y6HWUymRWmFgAAAOtipZ1EuVwu8rfrujo4OFhRam6W4AlzP8uyVK1WV5QiAMC8JG3ji8Wizs7Olpk0AAAArLGVxiTqj4HjeZ48z5sYPPOzzz7TZ599Fv59dXUlSXrx4oVevHixuISumeC3jvrN3W430WfkcrmhTjrDMOT7vnzfH4oRBQDYHEnaeMdxlM1mh/YDAADAzbU2q5uVSqXI8PdRTk9P9d3vfndo+8cff6yvfOUri0ra2gpiSvT79NNPE7+/Wq0qlUpJ6k03CDroOp2ODMOQ67ryfV+dTkeFQkFSbwrDw4cP1Ww25/ALAACLMqmNbzQa4TS0VqslwzDCBze0/QAAADfXWnQSJQ2YeXJyom9/+9vh31dXV3r99df19ttv6+7du4tK3tp58eKF6vW6stms7ty5E3ntBz/4QaLPyGazKpVKkTgUQVBT0zTleZ5SqZRM01Q2mw1vFFzXDW86AADraVIbL/VWFw34vh+OQqXtBwAAuNnWopOoUqkonU7H7vfKK6/olVdeGdp+586doc6Sm2DU797Z2Yl9n23bymQyQ4FKG41G+KQ5mPpXLpcj+wWdUwCA9RTXxpumOXbJe9p+AACAm22lgasDrusSA2eJyuWyjo6Ohrb3L5EcdBadn59Hnj43Gg1WwQGANZakjR+Hth8AAOBmW4uRRJ7nRYJYY3GC1W4GL/ZbrZZ8348EMPV9X61WKxJMfPDvTeD7friiz/Hx8ch9HMeR1IvVYZrmxv1GADdDXHs2TRs/6Tu2oe3fZo7jqNPpqNlsKp/Pb9yx4byMbbLp9XEQ9RPrbNvq27pai04i0zSJdbBEozrkTk9PhwKHD3betVqtSGDTTamUruvq+fPnunfv3sjXPc9TvV4Pf382m92Y3wbgZolrz6Tkbfw429L2b6sgjmOhUJDv+7p///7Y6YPrivMytsU21MdB1E+sq22sb+tqLaabtdtthrEviWVZ6nQ6kW3B04AgQGnAMIzINMDz8/PwOHmet9iEzlEul5sY82pwumOwstsovu/POXXLtenpB266uPZsmjZ+nG1p+7dVp9MJVzc1DEOpVCrxAiDrgvPyS5ue/ptuG+rjIOrnS5ue/m2zjfVtXa1FJxGWq9lsyrZtlctllctldTod1Wq1of1M09TBwYHK5bIcxwljXFSr1cQ3G/Pk+/5CGut2ux15WpJKpUZ+j23bGx87q1qtcpOHWJSRzZa0jR9n3dp+RFmWFRkV1ul0Rj5oW0Y95rx8fZyXN1vS+rgqi6ij1E+syrrXt22yFtPNsFymaapUKiXad3B6wjQ3GvP2+PFjHR0dJYqpcV2DT+Kr1epQwNdyuSypd7KUhvNqlHK5HJ4wfd8fO9d7UY6Pj1UsFhNPO1kF3/f15MkTSb289TxPZ2dnsRcawfGQpOfPn48s47Zth0/HUqlUWJby+byOjo5kmubQ96wyXlqQF7VaLXxyMo1sNjv0vmq1Kt/3ZRiG2u22Tk5Ohn5zOp0Ol0RfJ0mO8TTvcV1XlUpF2WxWpmmqXq/rwYMH12pj4o7ZMtqAadr4cVbZ9s/atgbiysaoejHN60ksqxwUi0WdnZ2NfG0Z9Zjz8vWt+3k5yTlj0DzL/7bUx1VZVh2lfl7fda5Jxl3fSvF1eJbjMslNrm/bhE4ibIxF3aSk0+nIE5AgCF/A8zw1m83IE3TbtiM3IcViMfZCJmiEg89xXXclF4b5fF7lcnnpJ9qkbNuWbdvhMSgWi8rn8xPzNp/PK5vNhnlbrVYjx8j3fT18+FA//vGPZRiGWq2W9vf3wxuoVqsVTsnpl8vl5lrupgnS32q11Gg05Pv+0MVXEo7jDA3/LpfLKhQKkZPz48ePI7+xP/7MNBa9AEHcMZ7lPb7vy3VdOY4j0zRl2/a1LqTjjllcG1Aul/X8+fOh9927d29t62ucacvFLG3rNGVjVL2Y5vUkllUOHMdRNpsdWWZnrcfT4rw8H7Oelxfd7iY5Zwy6bvnvty31cZUWUUepn8lMWz9nuSaJu76Nq8OzHJdJbnp92yrdDXZ5edmV1L28vFx1Upbq888/7/7oRz/qfv7550OvvfvuuytI0fqrVCrdUqkU2XZxcdHtdrvddrvdzeVy4fZMJhPZ7/j4uNtutyPvsywrfH+32+02m82upMh+gwzDiLyn2+12V1UFB3/jOrEsK3KsSqVS1zCMsfu32+2upEjeXlxcRLYVCoWh41+v1yPfMahSqcz4C8YrFApTv6dWq019vC4uLrqVSmWofFmWNbTv4LZSqTRTOmd5T1JJjvEs76nVamPffx3jjtm82oBR7dm6mqZczNK2TlM2xtWLpK9Pa5HloF6vh21Ys9kcyp9Z6/EycV6OmuW8vOhjnOScMc51y/821cdNRP2MWkb9nOWaJO76dlIdnvW4JLHq+sb98PURkwhrwff9MP6F67qRJ0dBr3qwHGcgeFrseV64j23bQ/u5rqt6va56vR4ZLbK/vy/f92Wapo6OjsLvODk5GXr/4JOARqMRmaMcvD5u3rfneeFQz0H9v9VxHDmOo2KxKM/zIr9xnkzTXNtAb/V6PfK04OnTpxNXzQjyZjCIotQ7TlKvrORyubCsSIp85uCTCNd1dXBwEPl7sAz6vq/9/f0pf91yPHnyRIeHh0PbDcNQNpsNy2n/Uy7XdVUul3V6eiqp9zRnXebhJznG83jPIiVtA+KMa8+2xSxtq5TsOI+rF0lfn4d5lAPP85TP55XP57W3t6f9/f2F1GPOyz03+bw86Zwxi2nK/zbUx+BzFnX9sKg6Sv0cto71U4q/vo2rw9Mel+tYRn3DHK26l+o6GEm0PSOJCoVC2BPc/4Si3W6HvcSmaYb7B9tKpVI3k8lEepEnjTqZVrvdTvTUrFarTRzVUK/XR/aUG4bRrdVq3W6399Smf+RLkAeWZYX7zMumjESo1WpDTzkGjRpJ0O32nkxUKpXw9VqtFpabQqEQedIy+Hn9o4jGlcEgbdNYxkiier3ebbfb4WiKfhcXF13TNLuSusfHxyNHS00qx5MseyRRt/vyGM/6nlqt1q1UKuF/j4+P55LeUccsSRuwja5bLuLa1qRlY1K9SPL6rGlfVTmYtR7347y8/uflRY8kSnLOGOc65X9b6uM8rx9GWcc6Sv18aZaRRNNckyS5vp22Dscdl2l+yyqvgzb1fnidEJMIa6HRaITzYk3TDOfKep4ny7JUrVYj0euDaPbtdluWZS2s5zt4YhLn9PRUlUpl6lUcUqlUOGc3lUqF7/c8Lwz4d92AjeO+NwhQN85gwMFx9vf3577iURD0zvd95fP5iflqmqYsy5LruuGIoP4nEv0jDYIyVCqVdP/+fV1cXAx9XqlUisx3D8pguVyOlMF6va5sNnut37kIQZkdVRcMw5Bt26rX6yqXy7IsS4eHh2H+BnFM1m01krhjPOt7guMZ1PFqtap8Pr/UIM39bQCGxbWtScvGpHqR5PVFm2c5mFc95ry8fuflZYs7Z8zLYPnflvq46OuHdayj1M/ZTXtNkuT6dto6POtxuQ6ug9YTnUQ3wM7OzlK+pztmFZVgKOukxrVUKimfzyudTiuTyajZbEp6OWSyUqlEAqsF213XjTSeg9OErsvzvNiG0rZtHR0dzdRR0t8o9k95ajQaI08KwbDfIEjgqGlYSfYxTVPn5+cT0zaPwICO48R+jySdnJxELmQMw4gEod3b29OzZ8/GHot6vS7bttXpdJRKpcITbP+FSn+5MAwjDBDYnz+jhhIHr5+fn0fKYKPRUD6fH/ubRnWyBQH6+s0z8F7cEuW2bSubzapWq4VDdvf398O62Wg0Ei0let3fNku5SHKMB8W9Z/C9h4eHKhaLY4dDJ2nLpjXvC6NVtvfzLvNJ29a44xxXL+Je77eIMiDNtxwkqcecl8db1/PyKtrduHPGvPTn+TT1UVrvdnnW64ekv2kd6yj186Vpz3/TXpMEJl3fTlOH447LJpz/MEerHsp0HUw324zpZs1mMxLgLm5f0zQjww7b7fbIYbCjhiHncrm5Bhyu1WoTh3sGQ0LjBENCB0kamvY07vcGw0gDo4bzJtmn2+0N+VzHoKYXFxfd4+PjyDDX/uG003yOfjNcdtx0FMMwho7dqACA/Z/Xb5bmc5HTzZrNZrfZbIZ/D6Z5sGwEMplMmLfXqT/LLk9xgauTvGdUmZIUycd+SduyUcdsmjZgm8xaLpK2raP0H+e4ehH3+qBpzmerKgdJ6jHn5e04Ly96mm/cOWOSWcr/tPWx213/dnmW64dp6mew/7rUUernS7NMNxs06Zok7vp2mjqc5LhswvkvsI73w5uGwNVYuEwmM3H6RjqdDodMZjIZFYvFSI+54zhh8ML+aQSNRmOot991XR0eHqrVas1l+KxpmmOD3wVpCXrcfd8fu28w9H/U66N+Q/+24Htc1x0Kzjo4rSLJPkFa0+n0yLQGisVion+DQRGvw/M8lcvloWHnwW8ZZ3AEUDD1xDAMmaY58jj6vj/05Gww//rT1f+Ep3956esuzzsvnU4nDFpbLpdl27YkhUEtxz3d63/K1Wq1wjxZt6DIk47xLO8JpjL2l4ugrI0bnRTXlk0yTRtw003TtkqTj3NcvYh7fdB1yoC0nHKQpB5zXt7M8/IyJTlnTCsuz6etj9L6t8uzXD8k+U3rWkepn7OZ5Zok7vo2aR1Oelw24fyH+aGTCCvl+75yuVykARxczerp06dhZP5UKhVur9frkSH1nueF85MbjcZc5tOOO9m1Wi21Wi1lMhl5nheuqBCkL+js6HdycjK0IkPQIDuOE85Pr9fr4ef4vh+eJNrttu7duxe+P5VKDZ3Mk+wTpC8ufyqVSqJ/84xHlMlkdHx8HCkP5+fnymQyYZkYlbf5fD6St4NDrEulUmSYsOM4sixraErGuJVbDMOI5FeQpuA9izRuGO5gPliWpePj4/BfcBFwfHysXC4ny7JGXgA2m81w6LPneYmmm61C3DGetlwYhjFU1oJVQq7bdow7ZpPagHUUrFxTLBaX1hk6S9s66TgnqReTXr+OVZWD69Zjzsvre15epqTnjME8DcxS/rexPi7i+mGd6yj1czZJrklG5dGk69skdTjuuMxqW66DbjJiEt1AwVzVTCaz8mUDDcPQgwcPIvOBz87OIvscHR2p0+kMPf3wfT/SG94fwHRePdKGYQw1lL7v6+HDh/J9P3zKFQiWbnddV5VKJbKU+/HxceRp2NOnT8O4P6ZpKpvNynEcnZ2dybbtcL9JF0ZJ5vGO2ufp06dD+bwuTk5OIidB3/f14x//OPx7VN5WKhW1Wi15nqd2u61KpRIp27lcTp1OJ/zc58+fjwxsaJrmyBOjaZo6ODhQuVwOl349PT2dOnbCNDzPC2NItFot2batBw8eRILzDuZDoD/2RDAf3bIs1Wo1nZ6e6t69e+FomsHOtCTlbhXijvEs5WKwrD1//vxaT8nijtmkNmBRZm3vg9E5hUJBvu+PDfQ+T7O2rXHHOTCuXiR9PalVl4Pr1mPOy5yXA3HnjFF5Oq/yvy31cRHXD+tcR6mfs4u7JhmVR3HXt5PqcJLjMq1V1zfM0arnu10HMYmmj0lUr9e7tVqte3FxsbZxadZNqVRai5ghg8tv5nK5oXQl2SfYjuXb5vq2zb9tU12nvR/c3zTNsXERJqFcYBE4L/dQv7COqJ891M/VISbR9THd7AbxfV/1ej0cumhZlrLZ7FDvMaKOj4/Xopfbsiw9ffo0/DtY4lR6OW950j6Bcrl8rZgCmN0217Vt/m2b6LrtvWVZkXYvWDp5WpQLLALn5R7qF9YR9bOH+olNttPtjlm3fANcXV1pd3dXl5eXunv37qqTszQvXrzQRx99pHfeeUd37tyJvPbo0SN9+OGHI9/nOI5OT0/DJTIlLW0awaZblyk4/UOLU6lUmJ50Oq1msynDMMbuI/WO9+npaWS4OIDtM8/2vlgsJl62HlgWzsvA+qJ+YpUm3Q8jGTqJNtCsnUTj7Ozs6OLiYq0CJq4jx3HWIo7TdZTL5ZnnGQPYfNO29+tyoQ+MwnkZWF/UT6wKnUTXR+DqG6Y/Yn2n0wmHVHY6nfCmIbgpqNfrsm1bruuq3W6rWCxudEN/Xdtwk8SJDrg54tr7uLY+WBo4WCHFMIwbfQ7A+uG8DKwv6iewuegkukGy2axKpVIkrkQwxza48K9Wqzo8PJRhGOGNQ61WUzabVavV4gYBADZAXHsf19ZLvaXlA77va4MHHgMAACAhOoluCNu2lclkhgKPDi6NmUqlwhFFnueFNxX9yyn2z90Nls4EAKyHJO19kraeWHUAAAA3D51EN0S5XI4EMA14nqeTk5Pw7/6hoY1GQ7VabWj/er0erlqQzWbpJAKANZKkvY9r6wEAAHAz0Ul0A7iuK0lDT5VbrZZ83x85Z9jzPEkaCm4axKgIGIYh13XpKAKANTBtez+urZcYNQoAAHAT3Vp1Am6EL76Q/v7vpb/5m95/v/hi6UkYFUvo9PQ0HBE0aLDjJ7jxaLfbunfvXrg9lUrJ9/35JhYAMLNp2vtxbX0wajSXy6lQKLD8LwAAwA1BJ9GiffCB9MYb0te/Ln3jG73/vvFGb/uSWJalTqcT2RY8IS4UCpFt2WxWUi8uRbAqju/7EzuCBj8bALAaSdr7JG39uFGjAAAA2G5MN1ukDz6QcjlpcEWYX/6yt91xpPfeW0pSms2mbNsORwEZhjEUg8I0TWWzWTmOo7OzM9m2Hd5cBFMU0ul0pMMomIYAAFgPce19kraeUaMAAAA3E51Ei/LFF9Kf/ulwB5HU27azI/3Zn0l/+IfS7dsLT45pmrHTBQZXwxk1NcGyLNm2Hf7teR5xKgBgjcS190na+lEYNQoAALD96CRalJ/+VPrHfxz/ercr/eIXvf1+//eXlqzrMk1TR0dHchxHnU4nsjIaAGA7MGoUAADgZqKTaFF+9av57rdGRq2GBgDYHowaBQAAuJmmDlx9dXWln/zkJ7q6uhr5+k9+8pNrJ2orfO1r890PAIAl6R81Wq1WGTUKAABwQ0w1kii4YOx2u9rZ2ZFt2/ov/+W/hK9fXl4qm83qixUs8b523npLeu21XpDqUXGJdnZ6r7/11vLTBgBADEaNAgAA3DyJRxJ95zvfUbPZ1Mcff6yLiwv93d/9nRqNhv78z/88sl93VIfITXT7tvRXf9X7/52d6GvB33/5l0sJWg0AAAAAABAncSfRD3/4Q1WrVT18+FC7u7uyLEsff/yx/uEf/iEyDH1nsEPkJnvvvd4y97/zO9Htr73W2/7ee6tJFwAAAAAAwIDEnUTPnz/XwcHB0PYnT56o3W7rv/23/zbXhG2N996Tfv5z6f/5f6T/+//u/ffZMzqIAAAAAADAWkkck8iyLD158kTf+ta3hl578uSJDg8PdXFxMdfEbY3btzdqmXsAAAAAAHDzJO4kOjs70+PHj1Wr1VSpVPTGG29EXn/y5Iny+fy804cp7e7u6tGjR6tOxlrodrv69NNP9eqrrzINcgzyKB55FI88SoZ8eunLL7/UJ598ojfffFO3bk290CoAAMBIu7u7q07CxtvpThlp+tmzZ7p///7Mr8/T1dWVdnd3dXl5qbt37y7lO9fBixcv9NFHH+mdd97RnTt3Vp2ctUU+xSOP4pFH8cijZMinl27q+RsAAGDdTf34Lq4DaFkdRAAAAAAAAJgfxngDAAAAAACATiIAAAAAAADQSQQAAAAAAADRSQQAAAAAAADRSQQAAAAAAADRSQQAAAAAAABdo5PoZz/7mb7zne/o3/ybfxNu+4u/+Av97Gc/m0e6AAAAAAAAsEQzdRKdnZ3p4cOHSqfTajQa4fb79+/Ltu25JQ4AAAAAAADLMVMnUblcVrPZ1OPHjyPb//iP/zjSaQQAAAAAAIDNMFMn0fPnz3Xv3r2h7c+ePVO32712ogAAAAAAALBcM3US5fN55fN5XV1dhduurq5ULBZVKBTmljgAAAAAAAAsx0ydRJVKRV/96ldlGIYuLi704MED7e3tKZ1O63vf+9680wgAAAAAAIAF+61Z31ir1fTs2TO1Wi1JUiaT0f379+eWMAAAAAAAACzPTneGIEK3bt3S4eGhjo6O9Ed/9EeLSFciV1dX2t3d1eXlpe7evbuydCzbixcv9NFHH+mdd97RnTt3Ru7zzW9+U5eXl0tO2Xrpdrv69NNP9eqrr2pnZ2fVyVlL5FE88igeeZQM+fTSl19+qU8++URvvvmmbt2aaVAzANwIu7u7ev/991edDAA3yEwjiRqNhqrVqv6v/+v/Ui6XUy6XU7FY1B/8wR/MO32Y0eXlpT788MNVJwMAAADAjB49erTqJAC4YWZ6fJfJZPT9739fnU5HT58+1RtvvKFCoaDbt2/rT/7kT+adRgAAAAAAACzYtcd4ZzIZlUolVSoVPXz4UJVKZR7pAgAAAAAAwBJdq5Pogw8+0NHRkW7fvq3Dw0Pt7++r0WjMK21YIcdxVK1WVSwW5bruqpMDAAAAAAAWbKaYRIeHh/rhD3+o3d1dHR4eqtFo6Pd+7/dmToRt20qn05KkVCqlXC4382chOd/35bquMpmMTNMMtwcr1hUKBfm+r/v37+vi4mJVyQSwxca1QwAAAACWb6aRRKlUSh9//LE6nY6+//3vz9xB5Pu+9vf3dXJyokKhoIODA+Xz+Zk+C9NxXVeu68qyLHmep2KxGL7W6XRUr9clSYZhKJVKhR1HADAvk9ohAAAAAMs3UyfR97//fT18+PDaX27bto6OjmQYhqRefKOgcwKL4/u+6vW6crmcDMOQZVnKZrOybVuSZFlWJLZUp9NRJpNZVXIBbKG4dggAAADA8iWabvYf/sN/UD6fD5e4Pzk5mbj/6elpoi+vVqtqt9vyPE+e58myLFmWlei9mF3w9L6fZVl6/PixSqVSZHuxWNTZ2dkykwfgBpimHQIAAACwHIk6iZ4+fapsNhv+3Ww2x+67s7OT6Is9z5PUi39jmqZM01SxWFQ+nx/bUfTZZ5/ps88+C/++urqSJL148UIvXrxI9L3bIPitk35zt9sd+1oulxuK+2QYhnzfl+/74cgux3GUzWaJEQVg7pK2QwAAAACWJ1En0eCKZR9//PG1vzjoJDIMI5zKVCqVJgZJPj091Xe/+92h7R9//LG+8pWvXDtNm2bS1LxPP/104nur1apSqZSk3nSyoGOu0+nIMAy5rhtOAWm1WjIMQ6ZpynVd+b6vTqejQqEgqTdt5OHDhxM7DwFg0KR2qNFo0NYAAAAASzbT6mZXV1e6e/fu0Paf//znkqQ33ngj8WcdHByE/x88RQ4CmQ46OTnRt7/97Ug6Xn/9db399tsj07OtXrx4oXq9rmw2qzt37ozc5wc/+MHY92ezWZVKpUicoSBgrGma8jwvEkDc9311u115nqdUKiXTNJXNZsMbN9d1wxs9AEhiUjskibYGAAAAWIGZOon29vb0xRdfDG1vt9sql8v6u7/7u9jPGLfUsWEY4SijQa+88opeeeWVoe137twZ21myzSb97nHT/mzbViaTGQpE3Wg0wo450zRHjuYK4kaVy+XI+4MOKwBIIq4doq0BAAAAVmOm1c3Gxbs5ODgYmpo2ThCHaLBDyPf9yOgizFe5XNbR0dHQ9iTLTwedSOfn55F9G40Gq58BSCyuHaKtAQAAAFZjqpFE//Jf/kvt7OxoZ2dHb7755tDrnudNdQFfKpV0fn4evsdxHFmWxU3AggQrCQ3mb6vVku/7iQJU+76vVqsVmQ44+DeAm8X3fVWrVUnS8fHxxH2TtkPb1NbE5Y/jOJJ6sZhM09zI34jt4DiOOp2Oms3mxIVENgn1D+tqG+sbgO0wVSdRpVJRt9vV22+/re9973tDr5umqd/7vd9L/Hm5XE6dTkflclmS9Pz584nBmHF9o6b5nZ6eqlKpJHq/53mRzwhWp5M0NpYUgO3muq6eP3+ue/fuJdo/STu0TW3NpPzxPE/1ej387dlsdqN+G7ZHq9WSJBUKBfm+P3EhkU1C/cM62tb6BmA7TNVJ9PDhQ0m9zp0//uM/nksCgoCkWDzLstTpdCLbgidoSY+DYRiRpan7R4KNiyUFYLsFHf6+78fum7Qd2qa2ZlL+BCtJBoLVJUfdqPq+H9l302x6+rddp9NRvV5XLpeTYRhKpVJqtVobP7qb+tez6enfNtta3wBsh5liEhWLRX3wwQdD209OTvSzn/3sumnCAjWbTdm2rXK5rHK5rE6no1qtlvj9pmnq4OBA5XJZjuOEcUWq1SodfriWTbvxx+yStEPr2Nb4vp+oI2wa7XY7MsIhlUqN/A7btjf+Bq9arVLP15hlWZHRfJ1OZ61uWKl/10P9Wy/rXt8A3GwzrW72ne98R6VSaWj7wcGBbNtOtLoZVsM0zZHHbhqDU9Om6WRaFd/39eTJE0m9i0LP83R2dhZ70RdMhZR60yH78y6fz+vo6EimaQ59jmmaM3/nogXpqtVqM03vzGazQ++rVqvhU8p2u62Tk5PI70zynel0emxQ/FWaVAZmfU/wervdlhStU4soN3H5Xy6Xw8/3fT82rtB1JW2H1q2tefz4sY6OjhLFb7uOwZFW1Wp1aGGBSWVonGUf50HHx8cqFouJpzevwiz52m9U+zhLGzLOsupysVjU2dnZzOlcBOrf9ayy/o2qF6NMupaIOzfGXZPN4ibXNwA320ydRM1mc+QKZJZl6fDw8NqJAubNtm3Zth1eKBSLReXz+YkXLfl8XtlsNhy1UK1WZdt2eIHfarXCaTL9crmcarXaTN85i8HYLZO0Wi01Gg35vj90IZyE4zhh4OFAuVxWoVCIXCg9fvw4vKFP8p398WamMc1vn0VcGZjlPYPvLxaLkQvoeZebuPwPbnaC9LquG7mRKJfLev78+dD77t27t/SbnFVbRCdVOp2OjFwIgucGPM9Ts9mMjJ6KK0OjxB3nZcnn8yqXy1OXnUXXdWm2fO03qn2cpQ0ZZ1l12XEcZbPZhXfGTIv6d32rqH+j6sUocdcScefGuGuyad30+gbghuvOIJ1Od3/2s58Nbfc8r5tOp2f5yJlcXl52JXUvLy+X9p3r4PPPP+/+6Ec/6n7++edj93n33XeXmKL1Z1lWt1QqhX+XSqWuYRhj92+3211J3YuLi3DbxcVFZFv/5wUqlcrM3zmrQqEw9XtqtVo3k8lM9Z6Li4tupVLpDjYblmUN7Ttq26TvLJVKM/2OWd6TVJIyMO17Li4uupZlRV5vNptdSd12u93tdhdXbsblv2EYQ79nllNDpVIZWSfQM5g/QZ632+1uLpcLtw8eo+Pj47BsBO+LK0OjzOs4z8O0bU+3u9i63u3Onq/97x9sH2dpQ5JYZF2u1+vder3e7XZ7vz/Jb98E1L+Xlln/xl03jBJ3LRF3boy7JpvVOtQ3rukBLNtMMYkKhYK+9a1v6X/9r/8Vbvv5z3+uw8ND5fP5WT4SWKh6vR55cvP06dOJK5gE8/YHA1pKUqPRkKShpz6u60ZG2MV9p+u6chwnXJpX6j0529/fn+KXLc+TJ09GjhQ0DEPZbDZ8GjvNE0fXdVUul3V6eiqp9+RtXWImJCkDs7yn0WhEfmOQV0H+TVtWr8PzvLHBTJM8+e3ft16vq16vj3ySuwl83w/jH7muG/n9o+qq9HJUiOd54T62bQ/tNyp/9vf35fu+TNPU0dFR+PknJydD7x2sT3FlaFDS4+w4jhzHUbFYlOd5kd83T6Zphiv7rJNp87XfqPZxljZkVvOoy57nKZ/PK5/Pa29vT/v7+5Gyt8hzFvVvO+vfuOuGUeKuJeLOjXHXZPO0jPoGAKs003Sz4+Njtdtt3b9/X3t7e5JeDgsNbvaAdeU4jnzfnzj8uP+ib/AiILhgGxyS7nne2Jv5we/0PE+pVEqmaUamIriuq1QqNfNvW5RJS46fnZ1pf39fe3t7Oj4+VjqdTjyE3rIsWZYVDt9fdbymfknKwLTvsSxraInb4IJy1MVhkrJ6HeN+h2EYUwWIDY7jJuufyuB5nmzblmVZY+tqq9WSZVnyfV/5fF61Wk2maSqXy2lvby8yPWVU/gSxTKThm5tA8N39DMOYqgwFnzNK/3GuVqs6PDyUYRiq1+uybVu1Wk3ZbHbm6aDjZLNZua67VkFaZ8nX/v1Glf9Z2pBZzaMum6Y5dgnuRZ+zqH/bV/8mXTeMMs21xKhz4zTXZNe16PoGAKs2UyeR1AvSVy6Xw5NjJpPR/fv355YwbKfB4I/j7O/vz30FoyAAYXBROalDwjRNWZYl13XDC8hJT4dKpdLIi5lx3xlcvJTL5ciFWr1eVzabne0HLlDwxHXUxY9hGLJtW/V6XeVyOYxNlrTDJ7gAXqcOImn6MjDre05PT1WpVEYG+k5SVhchlUrNFLNqkzUajbCz0jTNSIeuZVmqVquRuhqsRNNut2VZ1lSjTpIK6l2cUWUoif7jnEqlIu1T0FbPO4Za8F39N+nrKmm+jmsfZ2kP5m1edXnR5yzq3/bVv0nXDaMkuZZIem4cd022aDfx3AlgO83cSfSzn/1M5+fnarVa4Wpmf/EXfyHLsvS7v/u780of5mhnZ2cp39OdsELVPE7ajuPo/Pw8dr+Tk5PIRaVhGJHgoXt7e3r27NnYi4zgaV6n0wmfZErDTwsnDdse953B063z8/NIAMxGozFxyuaoTrYgWGK/eQZBjFty3LZtZbNZ1Wq1cPj0/v5+4ovQRqOR6InmdX/7LOUmaRnoN817bNvW0dHRUP5OU1aDKRbzvOif90XuKtuepPlTKpWUz+eVTqeVyWTUbDYlKayrlUolUleD7a7rRp5mz3OKg+d5sTee48pQEv3Hub/ONBqNkaPXgqk6QXDfUU/pk+xjmubEujiPdm7W80Qgab7GtY/TtAfrXJdnPWdR/8bb5voXVy9GSXItkeTcmGQq3SLqmjT/cycArMwsgYyq1Wo3lUqF/w04jtN9++235xQuKR6BqwlcncTFxUX3+Pg4EmAwCChaq9Wm+hyNCDhaKBSGAiYm+c7g8/rNUiUXGbi62Wx2m81m+Pdgmtvt9sjvz2QyQ3k77jtzudzMwSUXHcx20CxBZ8e9p1arDf3uWcpqs9mMBF4dZ1T+B589SFIYTHPTJc2f/v1N04zkd7vdHhk8fFQdvk55HlSr1brHx8cTX0/yXdMc53G/dbCujwoym2SfbrcXrHXaurvMup40X+Pax1EmtSHrXpdnOWdR/25e/ZulXsRdS0xzbhx1TTYqjUnL5TqcO7mmB7BsMwWuLpfLajabevz4ceTJ7R//8R/PPRgjtkuxWEz0bzDw5HV4nqdyuRx5whMMf570lHDwaVQwZWDwPa7rjow3Efedg0EZ++MOLHtKwjidTicMLl0ul2XbtiSFAUbHPWlNOq1Q6v3u4MnvugU9TloGpn1PcHyDJ6K+74cxFKYtq5lMZuaYRcE0v1HxFTY9xlAgSf6k0+kwDzKZjIrFYiS/HccJg6/2181GozGUT67r6vDwUK1W69pTX4L4LKOMK0PjPifpcR6MIxJ8z2A7ZxjGUDuVZJ8grel0emRaV22afI1rH6Xp2pB1r8uznLOofzev/iWpF4PiriWmOTeOuiYbdJ26Jt2McyeAm22mTqLnz5/r3r17kqLTCJ49ezZxqhG2S7AaSLFYTNypUalUEv2bZzyiTCaj4+PjyMXt+fm5MplMeDIPLkD65fP5yO8aHO4eGLWaV5LvNAwjciETvB585iKNGxI9mA+WZen4+Dj8F1ywHR8fK5fLybKskRfjzWZzaBj6pO9cpwC2/eLKwCzlptVqqdVqKZPJhB1D1WpVqVQqUbmZ1bj8Pzk5GVphZ97xwNaZ7/vK5XKRPB9cNefp06fhqjv9gWzr9Xqk7AaBbg3DUKPRuHYsqXE3qZPKUJCOwXI56Tg7jhPGlanX6+Hn+L4f1u12ux2e96Ve7I3Bep9knyB96xaDTJo+X+PaRyn5eWQaq6rLizhnUf+2r/4lqRej6tKka4lpzo3TrLCaBOdOADfRTDGJgiUbnzx5Em67urpSsVikgdwgvu+HK1xMe0INno4WCgX5vq/79++v9SoNJycnkQsS3/f14x//OPzbdV1VKpXI8qqVSkWtVkue56ndbqtSqYzMJ9M0R67uEvedpmnq4OBA5XI5XIb39PR0prn8SXmeF8bqaLVasm1bDx48iARVHcyHQH+MjyB2gGVZqtVqOj091b1798KVPQY7UiZ9Z6lUCp8uziuO0rzElYFpy43v+3r48KF83w+frgaCz4grN9OKy//j4+PIE96nT58uPODnddqeeTMMQw8ePIjE8jg7O4vsc3R0pE6nMzRywff9yKi5/kDF83iabBjGUNuSpAyNKpeTjnOwcpTjODo7O5Nt24nqZJL4G6P2efr06VAer9qs+RoY1z4mPY8kseq6vIhzFvVvu+vfuHoxKo/iriWSnhvHXZNNa9X1DQBWatZ5arlcrruzs9Pd2dnpHhwcdG/dutX99//+389vIlwCxCSaPSZRvV4P53nPMj998D2maUbmoGN5lh2XZ53c5N++qa7b9tw0pVJpLeJDVSqVSJyPXC43lK4k+wTbp0U5wSpQ/3qof6tFTCIAyzbTdDOp1+P/D//wD6rVavrOd76jf/iHf9Bf//Vfz7H7Covi+77q9XoYF8GyLGWz2aGnYpMET0gDwXK0WL5pjtu2ucm/fRPNo+25aY6Pj9fi6bRlWXr69Gn4d7A0ufQyNsikfQLlcnmqmGUByghWgfrXQ/0DgJtlp9vd3CBCV1dX2t3d1eXlpe7evbvq5CzNixcv9NFHH+mdd97RnTt3Ru7z6NEjffjhhyNfcxxHp6en4RKzkq41ZaxYLM51yXUA22nebc9NsS7TMfunBKVSqTA96XRazWZThmGM3UfqHevT09Nrx+QBlon6h1WbdE0PAIuQqJPo9u3bqlQq+ta3viVJunXrViRg9TiZTEZnZ2f63d/93WsndBQ6iWbrJBpnZ2dHFxcXUwU0XJeLJwCba5a256ZxHGctYjhdR7lcHhnPB1h31D+sEp1EAJYtUeDq733ve+ES1VJv9YUknjx5onw+r08++WS21GFh+lfi6HQ64ZDkTqcT3qgFHUD1el22bct1XbXbbRWLRZmmGS4zGqxKYRjGRl9AAVi8uLYnrt25qbahI54bVGwq6h8A4CZJ1En0n/7Tf4r8/fDhw0QffnBwoL29velThYXKZrMqlUqRGELBHPXgJqxarerw8FCGYYQ3a7VaTdlsNlzZLJ/Ph+/3fV8bPHMRwBLEtT1x7c5N7iQCAAAAliFRJ9EoP//5z1WpVOR5niTpX//rf63Hjx9Hpn01Go2tePqyTWzbViaTGQoyPbi0bCqVCkcUeZ4X3sj1jyIjhgiApJK0PUnanf6YG8GS1wAAAADmY6bVzX74wx/KNE3VajXt7e1pb29Pf/3Xf629vT39z//5P8P9Hj58qCdPnswtsbi+crmso6Ojoe39N2RSdGj1YAcSAEwrSdsT1+54nheujlYoFAjACgAAAMzZTCOJbNvW8fGxvve970W2F4tFfetb34oswYn14bquJA09yW+1WvJ9f+Sor2Ck2KiAsjzRB5DEtG3PuHYniIMWMAxDruvS9gAAAABzMtNIok6noz//8z8f2l4qlSJLG2OEL76Q/v7vpb/5m95/v/hiqV8/KqbH6empKpXKyP0Hb8CCmz2e6AOYxjRtz7h2p91u6969e+H2VCol3/fnn1gAAADghpqpk+jw8FDPnj0b2v7zn/+cGESTfPCB9MYb0te/Ln3jG73/vvFGb/sSWJalTqcT2RaMBioUCpFt2WxWUi8WSLASke/74Q3ZuCf6ADAoSduTpN0ZZfBzAQAAAMwu0XSzk5OToW1/8Ad/EOlYkF6uiIURPvhAyuWkwRXAfvnL3nbHkd57b+HJaDabsm07fBpvGIZqtVpkH9M0lc1m5TiOzs7OZNt2eEMXdALyRB/ANOLaniTtTjqdjrQzwVRXAAAAAPORqJNo1BSy/f39oe37+/vzSdW2+eIL6U//dLiDSOpt29mR/uzPpD/8Q+n27YUmxTTN2KlhgysQjZuKNogn+gDGiWt7krQ7lmXJtu3wb8/ziEcEAAAAzFGiTqKPP/540enYbj/9qfSP/zj+9W5X+sUvevv9/u8vLVnXwRN9AMtmmqaOjo7kOI46nc7IUa4AAAAAZjfT6maY0q9+Nd/91gBP9AGsAnHvAAAAgMWZKnD11dWVTk5O9ODBA92+fVu3b9/Wm2++qT/5kz/R1dXVotK4+b72tfnutwb6n+hXq1We6AMAAAAAsOESjyT6yU9+olwup1QqFS577vu+2u22/vt//++qVCpyXVdf//rXF5nezfTWW9Jrr/WCVI+KS7Sz03v9rbeWn7Zr4Ik+AAAAAADbI1En0bNnz5TL5VQqlfT48eOh17///e/Ltm1ZliXP8/Qv/sW/mHtCN9rt29Jf/VVvFbOdnWhH0c5O779/+ZcLD1oNAAAAAAAwTqLpZt/5zndUKBRGdhAFSqWSvvWtb+n4+Hhuidsq773XW+b+d34nuv2113rb33tvNekCAAAAAABQwpFErusOLXc/im3bevDgwbUTtbXee6+3zP1Pf9oLUv21r/WmmDGCCAAAAAAArFiiTqLuqDg6I+wEU6cw3u3bG7PMPQAAAAAAuDkSdRJZlqUf/vCH+o//8T9O3K9arerhw4dzSRiuZ3d3V48ePVp1Mlaq2+3q008/1auvvkoH5hjkUTzyKB55lAz59NKXX36pTz75RG+++aZu3ZpqoVUAuFF2d3dXnQQAN0yiTqLvfe97Ojg4kGma+qM/+qOR+/zX//pfVS6X1W6355pAzOb9999fdRJW7sWLF/roo4/0zjvv6M6dO6tOzloij+KRR/HIo2TIp5eurq60u7urp0+f6u7du6tODgAAAH4jUSeRaZp68uSJ3n77be3v78uyLD148ECdTkftdluO48jzPD158kRvvPHGgpMMAAAAAACAeUvUSST1ppx1Oh3Ztq1araZSqSSp14FkWZYajQbDIQEAAAAAADZU4k4iSTIMQ5VKZVFpAQAAAAAAwIoQLRIAAAAAAAB0EgEAAAAAAIBOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAEja6Xa73VUnYlZXV1fa3d3V5eWl7t69u+rkLM2LFy/00Ucf6Z133tGdO3di9//mN7+py8vLJaRsvXS7XX366ad69dVXtbOzs+rkrCXyKB55FI88SoZ8eunLL7/UJ598ojfffFO3bvG8alPs7u7q/fffX3UyAADAAv3WqhOAxbu8vNSHH3646mQAAIAN9ujRo1UnAQAALBiP7wAAAAAAAEAnEQAAAAAAAJhuBgDA3DiOo06no2azqXw+L8uyVp0kAAAAILGVdhK5rqtKpaJsNivTNFWv1/XgwQPlcrlVJuvG831frusqk8nINM1VJwcA1s6odrLVakmSCoWCfN/X/fv3dXFxscpkAgAAAFNZ6XSz4CK7WCyqWCwqnU7TQbRiruvKdV1ZliXP81QsFledJABYK+PayU6no3q9LkkyDEOpVCrsOAIAAAA2wcqnmz179kyGYaw6GVCv065er6tUKkmSLMuS7/uybTvcBgA3WVw72T+9rNPpKJPJrCqpAAAAwNQIXI1Q8HS8n2VZqlarK0oRAKyXpO1ksVjU2dnZMpMGAAAAXNvKRxI9efJEqVRKnU5H7XZ74oiVzz77TJ999ln499XVlSTpxYsXevHixcLTui6C35r0N3e73UT75XK5oel+hmHI9335vs+ILwA3XpJ20nEcZbNZpk8DAABg46y0kygYhh8E/axWq8rn86rVaiP3Pz091Xe/+92h7R9//LG+8pWvLC6hayqIfRHn008/TfyZ1WpVqVRKUm+qRDB1otPpyDAMua4r3/fV6XRUKBQk9aZfPHz4UM1mc8pfAACbZ1I72Wg0ZBiGLMtSq9WSYRjhOY72EwAAAOtupZ1EgytnHR4eqlgsjh21cnJyom9/+9vh31dXV3r99df19ttv6+7du4tO7tp48eKF6vW6stms7ty5E7v/D37wg0Sfm81mVSqVIjE0goCspmnK8zylUimZpqlsNhve5LiuG94wAcA2m9ROSlI+nw//3/f9cCQn7ScAAAA2wUo7iRzHiQzHDzqGPM8bGezzlVde0SuvvDK0/c6dO4k6S7ZN0t+9s7MTu49t28pkMkP53mg0wqfknufJsiyVy+XIfkGHFQBss7h20jTNsUve034CAABgE6wscLXv+8rn8/I8L7JNGh5hhMUrl8s6Ojoa2t6/vHPQWXR+fh55ct5oNFjBB8DWS9JOjkP7CQAAgE2wspFEhmHo+Pg40iFUrVaVy+UIkLxkwUo9gzcqrVZLvu9HRnv5vq9WqxVZ5nnw703g+364GtHx8fHIfRzHkdSLM2Ka5sb9RgDJxbUJ07STk75jG9pPbCfHcdTpdNRsNpXP5ymXAADcUCudbnZycqJyuRz+/fz587FBq7FYo0ZvnZ6eqlKpRLZ5nhfZt9VqRYKybspFpeu6ev78ue7duzfydc/zVK/Xw9+fzWY35rcBmF5cmyAlbyfH2Zb2E9un1WpJkgqFgnzf1/3798dOnQQAANttpZ1EwWgirJZlWep0OpFtwSiaILhqwDCMyEiv8/Pz8Ml6/9TBdZfL5dTpdMIpjoNc1438zmBlt1E3ceMCrW+KTU8/MA9xbcI07eQ429J+Yvt0Oh3V6/VwNHcqlVKr1WIqJAAAN9BKO4mwPprNpmzbDp+iG4YxclSXaZo6ODhQuVyWaZo6OjrS6empqtVq4huleQpu6ObdydFutyMjClKp1MibR9u2VSqV5vrdyxZM8yQWGDBZ0nZynHVrP9fF4AgrLJ9lWZGHIJ1Ohw4iAABuKDqJIKl385K0s2NwasUqpwg+fvxYR0dHieKBXNfgKIJqtToUrDaYPtlutyUN59Uo5XI57OTyfX/po+uOj49VLBYTT5lZBd/39eTJE0m9vPU8T2dnZ7Gdg4PTWfvLuOu6qlQqymazMk1T9XpdDx48GIrB9eTJE9VqNdXr9fn+qCWalA+zvieu3M477+I+bxn1aJp2cpxVtp/XPSbZbDbyvnw+r6OjI5mmOVQXTdOMfT2QTqfV7XanTs805v3bpWT1yrZtpdNpSb2HDdc5Vy2rDhSLRZ2dnc2cTgAAsNnoJMJGW9QNVjqdjowcCoJXBzzPU7PZjDz9HxxVVCwWR95Y9AtuMoLPcV13JR02+Xxe5XJ5bad/2rYt27bDY1AsFpXP5yfmbT6fVzabDfO2Wq1GjpHv+3JdV47jyDRN2bYduYFrtVpqNBryfX+og3CeFj2KIi4fZnlPXLmdd97FfV5cesrlsp4/fz70vnv37q1tmY8zbbm57jFxHCcM3t3/mcGUu365XE61Wi329eAzpi3/6/Dbk7QvDx8+1I9//GMZhqFWq6X9/f2ZO8OWVQccx1E2m13KgxcAALCmuhvs8vKyK6l7eXm56qQs1eeff9790Y9+1P38888T7f/uu+8uOEWbqVKpdEulUmTbxcVFt9vtdtvtdjeXy4XbM5lMZL/j4+Nuu92OvM+yrPD93W6322w2u5Ii+w0yDCPynm63211VtRz8jevEsqzIsSqVSl3DMMbu3263u5IieXtxcRHZVqvVhvJ+lFqtttC8KRQKC/vsJPkwy3uSltt55924z5tXPRrVJqyrWcvNLMfk4uKiW6lUhvJ0VF5VKpXErwf7TPtbVv3bk9SRQqEw9Pvr9fpM6e63yDpQr9fDNDabzZHnLq4nAADYfreW3y0FJOP7vsrlcvgUt/9JbjACJFiyOhA8zfU8L9zHtu2h/VzXVb1eV71ejzzp3t/fl+/7YbyQ4DtOTk6G3j/4JLvRaESCzwavjwuE63ne2KDR/b/VcRw5jqNisSjP8yK/cZ5M0wxXuFk39Xo98rT76dOnE1eCCvJmMPi41DtO8zCqDPq+r/39/bl8/jzMkg9x70labpdlXukZ1yZAevLkiQ4PD4e2D442cV1XBwcHiV53XVflclmnp6eSeiNd1jF496jfnqReBbHegnORpIWtXjePOuB5nvL5vPL5vPb29rS/v0+cKAAAbiimm2Ft9U8x8jxPtm3Lsix5nqdUKiXTNCPD/VutlizLku/7yufzqtVqMk1TuVxOe3t7kalhg0E6A0EsIWn4BicQfH8/wzCGlgsOLs7HXWiPuyEyDCPsWKpWqzo8PJRhGKrX67JtW7VaTdlsdqZpGpNks1m5rrv2wUodx5Hv+xOnGvZ30A3eOPXn+5MnT5RKpdTpdNRutxPHmxlXBl3XHSobq5Q0H677HilabpcpST1KYlybcNONW9VR0tAUXM/zIvtOej3I72CK1jqusDjut8fVkaBMBm20aZrhFNlFlLF51AHTNFnyHgAASKKTCGus0WiENxCmaYadAsGNRrVajXRoBKuxtNttWZYVO5JnVsFIozinp6eqVCpT3/wEnRbB/wfv9zwvDJS9iCDKqVQq0kk2ymCg7nH29/fnvlpTELQ16ASclK+macqyLLmuG3b2DT5RD8pOcCyr1WrYuRgnKIPlcjlSBuv1urLZ7LQ/bWGS5MM83iNFy+06WLf0bKqgvYtrR0ul0sRYaqNeDzpR1rGDSBr/2+PqSP9Io6B9KJVKun///lI7YqgDAABgFnQS3VA7OztL+Z7umCCdwTSwSZ0SpVJJ+Xxe6XRamUxGzWZT0ssh+5VKJTLyI9juum7kRn9wCsR1eZ4Xe1Nj27aOjo5m6ijpv6jvH83UaDRGdmAEU2OC4NqjnlQn2cc0TZ2fn09M2zwCajuOE/s9knRychLpgDEMIxIkdm9vT8+ePRt7LIKRV51OJxz1I2nov4HDw0MVi8Wx0zb6Bfl3fn4eKYONRkP5fH7s+0Z1sgUBZvuNCxw7S97F5cMos7znOjejSdqDac375niVbeZ1y82sqtVqojYsbprquNcbjUbsyMV1/e1J6kj/eScY1TNqdNIiyr80/zoAAABuBjqJbqhxnTfLYppm7M2BZVm6uLhQq9VSPp+X4zjhTcCoqQ1S78mv53mRzz4/P5944z5vjuMonU7H3lyNu+EeNVJpVAyMYHu9Xg87b7LZ7FCeJNlHUnizs2i5XG6qmznf93V6eqqTk5Pw9wfTCvuf5I/S34ETjAYIbtz6y5OkyIitJFPufN8PpzgGBv8eNKqTbZrV7KbNu8CkfJj2PeNuPJOOsBslSXsw6b3zTs8oq2wzr1tuZtFqtRJ3rlcqlXCZ92leTzLybp1/+7R1xDCMkVPDrlP+g/ePMu86AAAAbgY6ibASmUxm4rSedDqter0eXjwXi8VIB4njOGEw0f4ns41GY+gm3XVdnZ2dzW1qw6QRN8GUg6CDKFiueNSFepCWUcs5j/oN/duCv13XHQqeOmrfuH2CtE660ZNWM93M8zyVy+VIGQhuyCYdy1arFbnxCjqUgif6+Xxe7XZ7aFpi0puqwePWHyNqUhyXZZuUD7O8J/iXpNwmFdceTDJNPUJynU5HrVYrbNOCUS7lcjmM9RZwXXdiwPZxr7darXBRgMFO21VK8tvj6kgQS69/H9/3R3Y+Xaf8S9QBAAAwX3QSYe34vq9cLhe52H369OnQ6lZHR0fyfT8y+qVer0cuyoMAw4ZhJJrakERw8T+o1Wqp1WqFK9pIvRufoLPE8zw5jhP5HScnJ3JdN9ynf3/HcVSpVMIVl4Lf6ft+2KHRbrd179698PNSqdRQ/Iwk+wTpi+tAW+TT+3EymYyOj48j5eH8/FyZTCa8ARqVt/l8XpVKJTI9MUi/YRhDnxmsRjSYB5NGBfTvG6QpSM+6mJQP0vR5J00ut/3mPd1l3OclTc+6cBxHnU5HzWZzYcGMxxmXh4PlYDCQd6vVUrVajZST/vdO6lwd93rSUXvzMs/fHldHSqVSpE1wHEeWZV37925LHQAAAOuLTiJIUjh1J5PJrHx4umEYevDgQSSOztnZWWSfo6MjdTqdoZFDvu9HRrv0Bxid142YYRhD07J839fDhw/l+75s2468FtxYuK6rSqUSudE4Pj5WuVwOf+vTp0/DG41g5SzHcXR2dibbtsP9Jj1xT3JjPmqfp0+fDuXzujg5OVG5XA7/9n1fP/7xj8O/R+VtpVJRq9WS53lqt9uqVCqRsj34mc+fP488zQ9uHM/Pz9VqtWTbth48eBDmvWmaOjg4CEcXHB0d6fT0NHEcl2WJy4dZ8m5SuZXi825acZ8Xl55FmLXNDOLzFAoF+b6/tGDGcXk4qhwE+mNh2bY9NGXVNM2JU1XHvV4qlRK1ade1iN8eV0dyuZw6nU7Yxjx//vxaCw6sYx0AAADbaae76uA013B1daXd3V1dXl7q7t27q07O0rx48UIfffSR3nnnHd25cyd2/0ePHunDDz8c+7rruvJ9X5ZlhcGRubicLFjVatVD+avVqnzfD29u8vm8isViJF1J9gm2X2fKA2az6PgqmL/rtJlBYP1g/3Q6rVqtNvUIk5tcbm7yb1+1uOsJAACw+W6tOgFYLd/3Va/Xw2k2lmUpm80OjYZB1PHx8VrcpFiWpadPn4Z/9wfzDqaUTdonEMT8wfJR1zbLddvMYBRKoNPpzDQF6SaXm5v82wEAABaNTqIbznXdMDhnwLIsVavVFaVocxwdHYVD+1clmObkOI6q1WoYBFbqBY8OVrcZt4/Uu+l9/vz5ykdF3VSrnt6J6cyzzSwWizNP8bzJ5eYm/3YAAIBFY7rZBpr3dLNRdnZ2dHFxce2VwLad4zhrEcfpOsrl8shYHACSm7bNXEYsHmDemG4GAMD2I3A1VK1Ww6CinU4nHFHS6XTCG57ghqZer8u2bbmuq3a7rWKxuNEdJNe1DTd4dBAB04lrM+PaS9d1w6lqrVYrXDIdAAAAWDU6iW64bDarUqkUiYkRxKYJblqq1aoODw9lGEZ401Or1ZTNZtVqtbi5AXBjxLWZce2l1AsSH/B9Xxs8oBcAAABbhk6iG8y2bWUymaGgqYPLyqdSqXBEked54Q1R/3K+/cvVB8vOA8A2SdJmJmkvl7HkPQAAADALOolusHK5rGazObTd87xIcOP+KVXBcs+D+9fr9XDFnmw2SycRgK2TpM2May8BAACAdUYn0Q0VrM4z+ES81WrJ9/2RsXY8z5OkocCsQXyNgGEYcl2XjiIAW2PaNnNceykx8hIAAADr69aqE3BjffGF9Pd/L/3N3/T++8UXS0/CqFhCp6en4YigQYMdP8FNU7vd1r1798LtqVRKvu/PN7EAsGLTtJnj2stg5GUul1OhUFCpVFpcggEAAIAp0Um0Ch98IL3xhvT1r0vf+Ebvv2+80du+JJZlqdPpRLYFT7cLhUJkWzabldSLqRGs6OP7/sSOoMHPBoBNlqTNTNJejht5CQAAAKwDppst2wcfSLmcNLiazS9/2dvuONJ77y0lKc1mU7Zth6OADMMYip9hmqay2awcx9HZ2Zls2w5vjILpFel0OtJhFEyhAIBtEtdmJmkvGXkJAACAdUYn0TJ98YX0p3863EEk9bbt7Eh/9mfSH/6hdPv2wpNjmmbsVIfBlXxGTauwLEu2bYd/e55HjA0AWyeuzUzSXo7CyEsAAACsCzqJlumnP5X+8R/Hv97tSr/4RW+/3//9pSXrukzT1NHRkRzHUafTiayMBgB4iZGXAAAAWGd0Ei3Tr3413/3WyKjV0AAAUYy8BAAAwDqjk2iZvva1+e4HANgojLwEAADAOqOTaJneekt67bVekOpRcYl2dnqvv/XW8tMGAFgKRl4CAABgXd1adQJulNu3pb/6q97/7+xEXwv+/su/XErQagAAAAAAgH50Ei3be+/1lrn/nd+Jbn/ttd72995bTboAAAAAAMCNxnSzVXjvvd4y9z/9aS9I9de+1ptixggiAAAAAACwInQSrcrt2xu1zD0AAAAAANhudBLdALu7u3r06NGqk7F03W5Xn376qV599VXtDMaAgiTyKAnyKB55lAz59NKXX36pTz75RG+++aZu3WLm+6bY3d1ddRIAAMCC0Ul0A7z//vurTsJKvHjxQh999JHeeecd3blzZ9XJWUvkUTzyKB55lAz59NLV1ZV2d3f19OlT3b17d9XJAQAAwG/w+A4AAAAAAAB0EgEAAAAAAIBOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAIhOIgAAAAAAAEj6rVUnAMB2+OY3v6nLy8tVJ2Puut2uPv30U/3gBz/Qzs7OqpOzlsijZMinl7788kv9q3/1r/SNb3xDt27xvAoAMNru7q7ef//9VScDuFHoJAIwF5eXl/rwww9XnQwAAABsiUePHq06CcCNw+M7AAAAAAAArFcnUTabXXUSAAAAAAAAbqS16SRyHEeu6646GQCWyPd9OY4jz/NWnRQAAAAAuPHWopPI9311Op1VJwPAErmuK9d1ZVmWPM9TsVhcdZIAAAAA4EZbi06iJ0+e6PDwcNXJALAkvu+rXq8rl8vJMAxZlqVsNivbtledNAAAAAC4sVa+ulkwkiCJzz77TJ999ln499XVlSTpxYsXevHixULSt46C33qTfvMsyKd488yjbrebeN9gFFE/y7L0+PFjlUqla6cFAAAAADC9lXcS+b4v0zTl+37svqenp/rud787tP3jjz/WV77ylQWkbr3V6/VVJ2EjkE/x5pFHn376aeJ9c7mccrlcZJthGPJ9X77vyzCMa6cHAAAAADCdlXYSVatVFQqFxPufnJzo29/+dvj31dWVXn/9db399tu6e/fuIpK4ll68eKF6va5sNqs7d+6sOjlri3yKN888+sEPfjDV/tVqValUSpLU6XTCEYWdTkeNRiOMVRa0Eb7v6+HDh2o2m9dKJwAAAABgtJV1ErVaLR0cHEz1nldeeUWvvPLK0PY7d+7cyE6Am/q7p0U+xZtHHu3s7CTeN5vNqlQqKZPJhNv6A1enUimZpqlsNht2ErmuG3YqAQAAAADmb2WdRJ1OR61WK4xL0m63JUnlclmmaQ5NRQGwHWzbViaTiXQQSVKj0QhXOrMsS+VyObJPMOIJAAAAALAYK+sksiwrErC61WqpWq3q+Ph4VUkCsATlcnnklDHP83RychK2C+fn55Eg1o1GQ/l8fmnpBAAAAICb5taqEyBJjuPo9PRUUm+UweCqRwC2Q1C3B0cRtVot+b4fjiD0fV+tVmuoIznpSogAAAAAgOmtfHUzafRKRwC2k2maQ9tOT09VqVTCvz3Pi+zXarXCv13XpbNoi/i+r2q1KkmMJF1zm36skqTfcRxJvSnxpmnS1mBtbXp9HET9BID1sRadRABuBsuy1Ol0ItuCi77+lQ4Nw5BhGOHf5+fn4egjz/MWn1Asjeu6ev78ue7du7fqpCDGph+ruPR7nqd6vR52WGezWW5CsbY2vT4Oon4CwPpYi+lmAG6OZrMp27ZVLpdVLpfV6XRUq9Ui+5imqYODA5XLZTmOo6OjI0lStVqNdCZh8+VyOaXT6VUnAwls+rGKS7/rupHOacMwxk5/931/zqlbrk1PPza/Pg6ifr606ekHsPkYSQRgqUzTjASkHqd/+pmkoY4k3ByD0w+xvjb5WLXb7cgohlQqNfJmzbbtRG3YOqtWq8rlcht7rLDegnrT36lzXdRPAFgeOokAYEF839eTJ09Uq9VUr9fn8p5qtSrf92UYhtrttk5OTsILcdd1ValUlM1mZZqm6vW6Hjx4cK2Yb3HpKZfL4ff7vr+Q2BjpdFrdbnfunxsIfqPUuxHxPE9nZ2eJbnBs2w6ffqdSqTCvkxyLScfyOr9jm4/Vsg1Oj61WqyoWi5Ft5XJZUq/sSMMd3KMs41hMcnx8rGKxmCitqxDkqSQ9f/480U1/XH2alOfXaQPGWYf6uCqPHz/W0dHRwuONUj/nY5Zrlbh8nfT6Iq5VAMwXnUQAsACtVkuNRkO+7w9dyM76nnK5rEKhELl4ffz4cTjKyvd9ua4rx3FkmqZs277WRVeS9Egv40m5rhu5sC2Xy3r+/PnQ++7du5f4ors/aPk0phnRYtu2bNsO9y8Wi8rn8xMvln3f18OHD/XjH/9YhmGo1Wppf38/7CCJOxZxx3Jam3ys1kU6nY6MTAiC4wY8z1Oz2YxMeR0ctVAsFpXNZieWnbhjsSz5fF7lcnnqG+BFjxbL5/PKZrNh/lSr1djRIXH1KS7PZ2kDJlmH+rhKixj5S/1MZtr6Ocu1Sly+xr0+72sVAAvQ3WCXl5ddSd3Ly8tVJ2WpPv/88+6PfvSj7ueff77qpKw18inePPPo3XffnUOKtk+tVutmMpm5vMeyrInbarVa9+LiYuo0zpoewzCGvm+W00qlUumWSqWRr5VKpW6hUJj6M6d5j2VZke8vlUpdwzBiP38wzfV6Pfz/uGMRdyxntYnHaplGpT/Il3a73c3lcuH2wXw8Pj7uttvtyPssy4rka7PZ7EqK7DdoXsdiHqZtm7rd6erWtNrtdldSJH8uLi6Gtg2Kq09xeT5LG5DEKuvjJqJ+Ri2zfia9VonL1yT5Pu21CteXwPIRuBoANoRhGMpms+HT1FXGf/E8L5zaMWhcMNFRXNdVvV5XvV4PV7oLtpfLZZ2enkrqPd1d1Mp29Xo98rT26dOnsavmBDEjPM8Lf+80K+0s81hu4rEKgtYHT5sHX6tWq+G/fr7vR97b//vGpX9/f1++78s0TR0dHclxHFWrVZ2cnAz9/sFj1Gg0Ir81eH1c4Nmkx8JxHDmOo2KxKM/zwtE0864Dpmmq1WrN9TOvI/h9gwGKpV5ejzOpPiXJ81nagFktuj4GrwXlOOD7vvb392dOd//nTKpjg98rKVJ+g31s247sR/0ctm71MxCXr9PmO4D1w3QzANgQZ2dn2t/f197eno6Pj5VOp4eGwD958kSpVEqdTkftdnthATzHXQwbhjHVhaBlWSNvxoLtwbD1eQZAncRxHPm+P3G6RPDbg+lVpmmG01P6f8ukY5HkWM7LJh2rYBpfrVaTaZpyXVenp6fhVIRsNqtKpRLedOzv7+vg4ECZTEZSdNqQ53mybTtM87j0BzEzJI2d8uB5nlKpVGSbYRi6uLiIbAtuJMd1+CU5FtVqVYeHhzIMQ/V6XbZtq1arKZvNzn1KXzableu6Yf6tWv/N5GA5mnQDPqk+TVv+k7QB17Ho+hiUVdM0I9P2XNcdKsOzGFfHxn1vq9WSZVnyfV/5fD6s27lcTnt7e+F+1M9h61Y/pfh8TZrvy7pWATAbOokAbLXBIJbj7O/vR2IZrCPDMGTbtur1usrlsizLCi9WJYUXksGFWLVaDS/KlyW46JuH4IJ7GR1EQeDO4EZm0nf2j3YI8rxUKun+/fvhxXHcsYg7lsuwjsfKtm0dHR2F+XZwcBDmmW3bymQykRuNg4ODyE1Uo9EIO6tM05xb2Q9GMsQ5PT1VpVKZOh/6j0UqlQrf73le2Ib1x1EJRloEcVlG3Vwn2SeVSkVuwlctSKfrupEg8HFmqU+D5X+aNmAR5lUfPc+TZVkql8uRzoV6va5sNnvtzx9Xx4LvrVarke/tdDrKZDJqt9uyLGsho0qon6sVl6+Dr6/DtQqAyegkArAwOzs7S/me7oTVlOYxOsNxHJ2fn8fud3JystAnfrZtK5vNqlaryfM85fN57e/vhxeRgxfJh4eHKhaLY4fPB0P+53kROq9OB6l3M5IkP0d1BAbBRvtls9mxT6INw4gEyt3b29OzZ88m3kwcHBxE3h8E4+y/EQoMHou4YzloG45V3G/wfV/VajXyumEY4TEol8tqNpuR93ieF64uJ/U66/L5vNLptDKZzND+s/I8L/bGMujgmqWzuf9Y9JfRRqMxdOPkeZ7q9XrYtmWz2aEbzCT7SL02Y1Lbdt26NUvbGYzO6HQ64ciUIK3jTFufpOHyP00bsM71MTjO5+fnkdEZjUZD+Xx+7PuS/qZxdSz43kqlEvneYLvrupGy7LpupA29DurnS9Oe+64rLl9HvT7ttQqA5aOTCMDCTOq82SS5XG7lK28EsRKCC0nTNNVsNrW/vy/HcZTL5cL/Bvqfdo66gTdNc+ZOrXE3bEmf6CaR9Mn3qI7ApKvR+L6v09PTyHLZwdSI/tEM/cb9PsMwwlFGk45F0KE06ViO+s5NP1Zxv6HRaMgwjJFparVakZFbAdd1h25ILy4u1Gq1lM/nx+bnvDmOo3Q6HXsDOs2xGBWfR+r95sGYPUHn5DT7SAo7Ysa5Tt2SZm87+49pMOJkXIdCXNs4rswFeT5rG7DO9dH3/XCaV2Dw71HpSvKbJtUxz/PCEUWD6Rk8D52fn0/stJon6udixOXruNenvVYBsHwErgaw1YrFYqJ/g4E21824J6XBE8NgikR/TIXg5mrchW8mk5l5eHcwtWhUDId5BXxttVrhjeFgYNZ58TxP5XJ5aNqJNHzxHwjiEA3+dt/3dXBwEHss4o7lKNtwrJL8hlFl1f/N0syDN0tBB0BwU5FOp8PfmMlkVCwW5/ZUetTxDgTToYIboeCGeNznJD0Wo24spV6Mlnv37oXbU6nU0NSdJPsEae0fibUOBgP1Bh01445lXH2Ky/NZ2oB1r4+DgfD7Y+WMm76X5DfF1THHcXR4eDj0PY1GY2T5Pjw8VKvVuvbUM+rn8sXl67jXZ7lWAbB8dBIBWBrf9+U4zsJWqRqlUqkk+reoeETjphAENyZJ32NZ1siL6WazGd5AHR8fRy6yghW4rnujPO43nJycDK34Ms98XMZTxUwmM5Rv5+fnymQy4Q3AqGNVKpUi0wAcx5FlWcpkMrHHIu5YXscmH6vghn3wc4ObscHfVqlUwpta3/eVy+UieT7PFarG3YS2Wi21Wi1lMplwFEW1Wg07tEaVnUnHwnGccERWvV4PP8f3/Yk30kmmKo3aJ8k0nWXL5/OR/BmcvjSYp0nq06Q8T9IGzGpV9bF/mqb08vdIkwOAT5Kkjj19+jRcZa6/U7der0fahyDQtGEY4QjC66B+Xt801ypx+Trp9UVeqwCYn53uBs8Hubq60u7uri4vL3X37t1VJ2dpXrx4oY8++kjvvPOO7ty5s+rkrC3yKd488+jRo0f68MMPx77uum44JSCYx7+sIdGr4HleGI+j1Wrp+PhYDx48CG9aqtWqSqVSJP5D3HuCaRH37t0LpywVCoXwwiqI6RJ4/vz5tVYMiUuP1IsTE1zsPX36dK4rlPR/9rSdJ9MMuR/Mt2CllSBfRx2rYHtwYzCY13HHIu5YTmuTj1U/13XDm4tgikdwcxm8FtwQDuZX/wimTqcz90Dg2Ww2EpzW933dv39/5M1hcGk1ruyMOxatVitcyjtYMS64Ke1vO3zfD5dsz+fzKhaLkZv1JPsE28/OzqbKp0VPZwmOs2EYarfbKhaLQzeUg3mapD5NKv9xbcC0Vl0fpd5xSqfT4cjH09PTyKpjs4irY47jjAzEHIzY7e8oClaEDNJ3XdTPnmnr57TXKnH5miTfp71Wibu+BLAA3Q12eXnZldS9vLxcdVKW6vPPP+/+6Ec/6n7++eerTspaI5/izTOP3n333bGvXVxcdI+PjyPbarXa0DZgXgqFwqqTgC1TKpW69Xp91cnottvtbi6XC//OZDLh/19cXMTu069/n6SoW1hH1M+ebayfk64vASwGI4k2ECNkkiGf4i1rJJHjODo9PY2sNBQ8bQqWDAfmaTAmBzAP67JMc//y2alUKnzqn06n1Ww2ZRjG2H0CwRLp006pom5hXVE/t7N+MpIIWD5WNwOwcKNWuAmmBPgseYoF2LaLZKyHo6Ojpa2YNsm47++fNjMpjb7v6/nz5zPF3KFuYV1RP6mfAOaDwNUAlqJarcpxHDmOo2q1GgaZ7A+WGLxeLBbDQIe2bS810DUAjNO/1PcmC+KMANuE+gkA88FIIgALl81mVSqVhoJWSi+felWr1TAIZr1el23bqtVqymazkeV7AWCVVj1KYR6CgLnAtqF+AsD10UkEYKFs21YmkxlaHrvRaESGUgdLo0q9p4BBJ1L/aiX9c/gHV08BAAAAAFwPnUQAFqpcLkcCVgc8z9PJyUn4d//Tv0ajMRR80vM81ev1cGnXbDZLJxEAAAAAzBExiYBt98UX0t//vfQ3f9P77xdfLO2rXdeVpKFRRK1WS77vjxwWHsQSGAxm7bpuZJthGOHnAwAAAACuj04iYJt98IH0xhvS178ufeMbvf++8UZv+5KMiiV0enoajgga5LpuZIRQ0BHUbrd17969cHsqlZLv+/NNLAAAAADcYHQSAdvqgw+kXE76x3+Mbv/lL3vbl9BRZFlWZPUy6WVcoUKhENmWzWYl9WIQpVIpSb1lYCd1BA1+NgAAAABgdsQkArbRF19If/qnUrc7/Fq3K+3sSH/2Z9I77yw8Kc1mU7Zth6OADMMYijdkmqay2awcx9HZ2Zls2w47k4Ipael0OtJhFASvBgAAAADMB51EwDb66U+HRxD163alX/xC+n//34UnxTRNlUqlifsMrn42aiqaZVmybTv82/M8AlcDAAAAwBzRSQRso1/9Ktl+/9//J33lK4tNy5yYpqmjoyM5jqNOpxNZGQ0AAAAAcH10EgHb6GtfS7bf//F/SFdXi03LHI1aDQ0AAAAAMB8Erga20VtvSa+91os9NMrOjvT669L/+X8uN10AAAAAgLVFJxGwjW7flv7qr3r/P9hRFPz9l3/Z2w8AAAAAANFJBGyv996THEf6nd+Jbn/ttd72995bTboAAAAAAGuJmETANnvvPekP/7C32tmvftWLVfTWWwsZQbS7u6tHjx7N/XNXrdvt6tNPP9Wrr76qnXHT92448igZ8umlL7/8Up988onefPNN3brF8yoAwGi7u7urTgJw49BJBGy727el3//9hX/N+++/v/DvWIUXL17oo48+0jvvvKM7d+6sOjlriTxKhnx66erqSru7u3r69Knu3r276uQAAADgN3h8BwAAAAAAADqJAAAAAAAAQCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAARCcRAAAAAAAAJP3WqhNwHd1uV5J0dXW14pQs14sXL/TP//zPurq60p07d1adnLVFPsUjj+KRR/HIo2TIp5eC83ZwHgcAAMB62OhOol//+teSpNdff33FKQEAANP69a9/rd3d3VUnAwAAAL+x093gx3hffvml/umf/klf/epXtbOzs+rkLM3V1ZVef/11/eIXv9Ddu3dXnZy1RT7FI4/ikUfxyKNkyKeXut2ufv3rX+u3f/u3desWM98BAADWxUaPJLp165Zee+21VSdjZe7evXvjbzSSIJ/ikUfxyKN45FEy5FMPI4gAAADWD4/vAAAAAAAAQCcRAAAAAAAA6CTaSK+88or+83/+z3rllVdWnZS1Rj7FI4/ikUfxyKNkyCcAAACsu40OXA0AAAAAAID5YCQRAAAAAAAA6CQCAAAAAAAAnUQAAAAAAAAQnUQAAAAAAACQ9FurTgCS831fT548Ua1WU71eX3Vy1la5XJYktdttSVKlUlllctZSUJakXj55nqezszMZhrHahK2xbDZLvRvguq4qlYqy2axM01S9XteDBw+Uy+VWnbS1ZNu20um0JCmVSpFPAAAAWDt0Em2IVqulRqMh3/fV6XRWnZy1Zdu2SqVS+HexWOTmfgTbtmXbtkzTlNTLp3w+Tz6N4TiOXNdddTLWju/7cl1XjuPINE3Ztk3Hxwi+7+vhw4f68Y9/LMMw1Gq1tL+/LxYXBQAAwLphutmGyGQyKhQK4U09hvm+r1arJd/3w23FYlGu68rzvNUlbA15nifHccK/0+m0Go3GClO0vuiYnezZs2fqdrtqt9sqFAqrTs5asm1bR0dH4Ui9TCZDhywAAADWEp1E2CqNRiPSIRR0qvV3HEGq1+s6Pj4O/3769Kksy1phitbXkydPdHh4uOpkYINVq1Xlcjl5nheOSKO+AQAAYB0x3QxbwzAMXVxcRLYFN2SMwBrPcRz5vq9arbbqpKwd13W5mY/x5MkTpVIpdTodtdvtyHRPKOy0brVaMk1TpmmG0zspWwAAAFg3dBJhq52enqpSqRCQeYQgeLXv+8rn8+TRCL7vyzRNRqKNkclkJL3shK1Wq8rn83Q49gk6iQzDCPOrVCrp/v37Q53aAAAAwKox3QxbK4gDQpyU0QzDUKFQCKed7e3t0RnSJ5gihPGCkTGBw8PDcGQaog4ODsL/NwwjDPoNAAAArBM6ibCVHMdROp2OxN1Bj+/7sm07ciNvWRY3rX1arVbkph6j9Qc/lxSORiNQ/EvjproahkE+AQAAYO0w3QxbJ+joCEYQBatTEZeox/M8lctlFYvF8KY+6DBiyllPp9NRq9UKy1K73ZYklctlmabJCCMpnKbYbreHAsRT114KRlt5nhdON5N6eUVHJAAAANYNnUQbhqW4J2u1Wmq1WuFKQlJvtANTzl7KZDI6Pj6O3Mifn58rk8kQSPc3LMuK5EWr1VK1WmVkWh/DMIbKUTBFj87GqFKpFNYxqdcmWZYV6TQCAAAA1sFOt9vtrjoRiOd5nhzH0fn5uVqtlo6Pj/XgwQNGNPTxfV/3798fGQ+FYh7l+76q1Wr4d7AqFTf3w4J65ziOjo+Plc1m6Uz7jcFy9Pz5c1Y3G6NarYZtE/kEAACAdUUnEQAAAAAAAAhcDQAAAAAAADqJAAAAAAAAIDqJAAAAAAAAIDqJAAAAAAAAIDqJAAAAAAAAIDqJAAAAAAAAIDqJAAzwPE/5fF57e3va29tTPp+X53lD+2WzWdm2PfZzyuWydnZ2FpbO/f19FYvFhX0+AAAAANw0dBIBCLmuq/39fT148EDNZlPNZlOmaWp/f1+u6071WZZlqVKpLCil0snJifL5/MI+v5/jOMpms0v5LgAAAABYld9adQIArAff95XNZlWr1ZTL5cLtpVJJ6XRa+Xxez549k2EYiT4vk8kok8lcO12u66pYLKrdbke296dxUWzbVrVaVSqVWvh3AQAAAMCqMZIIgKReh0gmkxnZ+VIoFJRKpXR6erqClK1OqVTSxcXFxGl1AAAAALAt6CQCIKk3YseyrLGv53K5oSlnvu+rWCxqb29P6XRajuNEPm8wJlH/vtVqNfJauVxWOp3Wzs5OOL0tn88rm83K8zzt7OxoZ2dHvu9LisZEKhaLQ1PPWq1W5PsnfTcAAAAAgE4iAL/heZ4ePHgw9vV0Oq1WqxXZ9uTJExWLRT179ky5XG5skGtJ4WvPnj1TvV6Xbdvh5xWLRZ2fn6tWq+ni4kKlUkm+76tWq6lWq8k0TXW7XXW73ZHT3fL5fKSDSpIqlUo4KmrSdwMAAAAAeohJBCDU6XTGvhaM4OlXKBTCuEOlUkmO46hSqahUKkX28zxPjuPo4uJChmHIMAyVSiWdn5/LNE1Vq1W1222ZpilJE0c0jWJZlgzDkOM4YcfQkydPdHZ2NvG75xEzCQAAAAC2BZ1EACRJpmkOBYfu19+JM45lWSNHEgWjdu7fvx/ZfnBwINd1ZRhG7GfHOTw81Pn5uXK5nFqtlnzfVy6XC0cYjfpuAAAAAMBLTDcDIKnXwTM4ZavfkydPph7h0y+Tyeji4iLyr16vz/x5g4rFYpj+oLNoWd8NAAAAANuATiIAknrTxTzPU7lcHnrNtm35vj80jWyQ67oj4xplMplwdM+o13zfHxvLKKlMJiPDMOS6rhzHUbFYjP1uAAAAAMBLdBIBkCQZhqFarSbbtmXbtjzPk+d5KhaLKpfLqtfrQ0Gjq9Vq2AFTLBbleZ4KhcLQZ5umqUKhEAls7TiOyuXy0Gu+78txnHDlMtM0w+2u607sTCoUCmFnVzDqadJ3AwAAAABeopMIQCiXy6ndbsvzPO3v72t/f1+dTkftdntoqplpmjo8PNTp6an29vbUaDTUbDZHrj4m9VYby2Qy2t/f197eniqVSviZwf9ns9nwtaOjI0m9kUCZTEb379+PHcl0dHQk13WHOqomffck1WpVOzs7YQfYzs6O0ul07PsAAAAAYBPtdLvd7qoTAWD7uK6rbDYrmhgAAAAA2AyMJAIAAAAAAACdRADmy3Vd+b6vWq2mTCaz6uQAAAAAABKikwjAXFUqFe3t7cl1XZ2dna06OQAAAACAhIhJBAAAAAAAAEYSAQAAAAAAgE4iAAAAAAAAiE4iAAAAAAAAiE4iAAAAAAAAiE4iAAAAAAAAiE4iAAAAAAAAiE4iAAAAAAAAiE4iAAAAAAAAiE4iAAAAAAAASPr/AQHeWFVYJrn0AAAAAElFTkSuQmCC\n", "text/plain": [ - "
" + "Sequential(\n", + " (0): Fourier_embedding()\n", + " (1): Linear(in_features=21, out_features=128, bias=True)\n", + " (2): ReLU()\n", + " (3): Linear(in_features=128, out_features=128, bias=True)\n", + " (4): ReLU()\n", + " (5): Linear(in_features=128, out_features=128, bias=True)\n", + " (6): Tanh()\n", + " (7): Linear(in_features=128, out_features=1, bias=True)\n", + ")" ] }, + "execution_count": 17, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], "source": [ - "epde_search_obj.visualize_solutions()" + "import epde.globals as global_var\n", + "global_var.solution_guess_nn" ] }, { "cell_type": "code", - "execution_count": 10, - "id": "925ef946", + "execution_count": 20, + "id": "e0e52b3f", "metadata": {}, "outputs": [], "source": [ - "system = epde_search_obj.get_equations_by_complexity(4.5)[0]" + "correct_eq_text = '-3.968145633247173 * u{power: 1.0} + 1.49553293285314 * t{power: 1.0, dim: 0.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -0.9932499157165215 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.000000113550314, dim: 0.0} + -0.021589552690019387 = d^2u/dx0^2{power: 1.0}'" ] }, { "cell_type": "code", - "execution_count": 11, - "id": "e9580909", + "execution_count": 43, + "id": "a84d1bde", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\"-3.951218144289313 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999995807630981, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000378681952, dim: 0.0} + -0.9900591677672463 * d^2u/dx0^2{power: 1.0} + 1.4766602719611732 * t{power: 1.0, dim: 0.0} + 0.014961528346664596 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999995886960809, dim: 0.0}\\n{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004122473023559842}}\"" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" + "outputs": [], + "source": [ + "from functools import partial\n", + "from copy import deepcopy\n", + "import torch\n", + "\n", + "# from epde.operators.common.fitness import SolverBasedFitness\n", + "from epde.operators.utils.operator_mappers import map_operator_between_levels, OperatorCondition\n", + "from epde.operators.utils.template import add_base_param_to_operator, CompoundOperator\n", + "\n", + "\n", + "from epde.interface.solver_integration import SolverAdapter\n", + "\n", + "from epde.operators.common.sparsity import LASSOSparsity\n", + "from epde.operators.common.coeff_calculation import LinRegBasedCoeffsEquation\n", + "from epde.structure.main_structures import SoEq, Equation\n", + "\n", + "LOSS_NAN_VAL = 1e7\n", + "\n", + "class SolverBasedFitness(CompoundOperator):\n", + "\n", + " key = 'SolverBasedFitness'\n", + " \n", + " def __init__(self, param_keys: list):\n", + " super().__init__(param_keys)\n", + " self.adapter = None\n", + "\n", + " def set_adapter(self, net = None):\n", + "\n", + " if self.adapter is None or net is not None:\n", + " compiling_params = {'tol':0.01, 'lambda_bound': 1e0, 'h': 1e-1} #\n", + " optimizer_params = {} # 'optimizer': 'LBFGS', 'params': {'lr': 1e-4}\n", + " training_params = {'epochs': 5e6, 'info_string_every' : 1e2}\n", + " early_stopping_params = {'patience': 6, 'no_improvement_patience' : 500}\n", + " self.adapter = SolverAdapter(net = deepcopy(net), use_cache = False)\n", + "\n", + " self.adapter.set_compiling_params(**compiling_params) \n", + " self.adapter.set_optimizer_params(**optimizer_params)\n", + " self.adapter.set_early_stopping_params(**early_stopping_params)\n", + " self.adapter.set_training_params(**training_params)\n", + "\n", + " def apply(self, objective : SoEq, arguments : dict):\n", + " self_args, subop_args = self.parse_suboperator_args(arguments = arguments)\n", + "\n", + " try:\n", + " net = deepcopy(global_var.solution_guess_nn)\n", + " except NameError:\n", + " net = None\n", + " print(f'Using net {net}')\n", + " self.set_adapter(net)\n", + " \n", + " # self.suboperators['sparsity'].apply(objective, subop_args['sparsity'])\n", + " # self.suboperators['coeff_calc'].apply(objective, subop_args['coeff_calc'])\n", + "\n", + " print('solving equation:')\n", + " print(objective.text_form)\n", + "\n", + " loss_add, solution = self.adapter.solve_epde_system(system = objective, grids = None, \n", + " boundary_conditions = None)\n", + " print('Solved!')\n", + " self.g_fun_vals = global_var.grid_cache.g_func\n", + " \n", + " for eq_idx, eq in enumerate(objective.vals):\n", + " if torch.isnan(loss_add):\n", + " fitness_value = 2*LOSS_NAN_VAL\n", + " else:\n", + " referential_data = global_var.tensor_cache.get((eq.main_var_to_explain, (1.0,)))\n", + "\n", + " plt.plot(solution[..., eq_idx], color = 'r')\n", + " plt.plot(referential_data.reshape(solution[..., eq_idx].shape), color = 'b')\n", + " plt.plot(global_var.solution_guess_nn(torch.from_numpy(t_train).float().reshape((-1, 1))).detach().numpy(),\n", + " '*', color = 'y')\n", + " plt.show()\n", + " \n", + " discr = (solution[..., eq_idx] - referential_data.reshape(solution[..., eq_idx].shape))\n", + " discr = np.multiply(discr, self.g_fun_vals.reshape(discr.shape))\n", + " rl_error = np.linalg.norm(discr, ord = 2) \n", + " \n", + " print(f'fitness error is {rl_error}, while loss addition is {float(loss_add)}') \n", + " fitness_value = rl_error + self.params['pinn_loss_mult'] * float(loss_add)\n", + " if np.sum(eq.weights_final) == 0: \n", + " fitness_value /= self.params['penalty_coeff']\n", + "\n", + " eq.fitness_calculated = True\n", + " eq.fitness_value = fitness_value\n", + "\n", + " def use_default_tags(self):\n", + " self._tags = {'fitness evaluation', 'chromosome level', 'contains suboperators', 'inplace'}\n", + "\n", + "sparsity = LASSOSparsity()\n", + "coeff_calc = LinRegBasedCoeffsEquation()\n", + "\n", + "fitness = SolverBasedFitness(['penalty_coeff'])\n", + "\n", + "sparsity = map_operator_between_levels(sparsity, 'gene level', 'chromosome level')\n", + "coeff_calc = map_operator_between_levels(coeff_calc, 'gene level', 'chromosome level')\n", + "\n", + "add_kwarg_to_operator = partial(add_base_param_to_operator, target_dict = dict())\n", + "add_kwarg_to_operator(operator = fitness)\n", + "\n", + "fitness.set_suboperators({'sparsity' : sparsity, 'coeff_calc' : coeff_calc})" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "17b7541f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys([('du/dx0', (1.0,)), ('d^2u/dx0^2', (1.0,)), ('u', (1.0,)), ('t', (1.0, 0.0)), ('cos', (1.0, 2.0000002812049953, 0.0)), ('cos', (1.0, 2.000000429477152, 0.0)), (('d^2u/dx0^2', (1.0,)), ('du/dx0', (1.0,))), ('sin', (1.0, 1.9999995628251885, 0.0)), (('du/dx0', (1.0,)), ('u', (1.0,))), ('sin', (1.0, 2.0000001147055393, 0.0)), (('d^2u/dx0^2', (1.0,)), ('u', (1.0,))), ('sin', (1.0, 1.9999999740310022, 0.0)), ('sin', (1.0, 1.9999996979431114, 0.0)), ('cos', (1.0, 2.0000002439349633, 0.0)), ('cos', (1.0, 1.9999999797124852, 0.0)), ('sin', (1.0, 1.9999997481227105, 0.0)), ('sin', (1.0, 2.0000000412790064, 0.0)), ('cos', (1.0, 1.9999999109492106, 0.0)), ('cos', (1.0, 2.0000000603577037, 0.0)), ('cos', (1.0, 2.0000004365689645, 0.0)), ('cos', (1.0, 2.000000301698334, 0.0)), ('cos', (1.0, 1.9999996318716078, 0.0)), ('sin', (1.0, 1.9999995021492716, 0.0)), ('cos', (1.0, 1.9999996136417781, 0.0)), ('cos', (1.0, 2.0000003047159645, 0.0)), ('sin', (1.0, 1.999999843845787, 0.0)), ('cos', (1.0, 1.999999613053633, 0.0)), ('cos', (1.0, 1.999999717459417, 0.0)), ('cos', (1.0, 1.9999999583498531, 0.0)), ('cos', (1.0, 1.9999996744142612, 0.0)), ('sin', (1.0, 1.9999998604413136, 0.0)), ('cos', (1.0, 2.0000002313846874, 0.0)), ('cos', (1.0, 1.9999998894396644, 0.0)), ('cos', (1.0, 2.0000003576880108, 0.0)), ('cos', (1.0, 2.000000423719148, 0.0)), ('sin', (1.0, 1.9999999006401634, 0.0)), ('sin', (1.0, 2.00000000386315, 0.0)), ('cos', (1.0, 2.000000154586315, 0.0)), ('cos', (1.0, 1.9999997216803922, 0.0)), ('cos', (1.0, 2.0000002626522586, 0.0)), ('sin', (1.0, 1.9999999884976234, 0.0)), ('cos', (1.0, 1.9999996946954541, 0.0)), ('sin', (1.0, 2.000000162270962, 0.0)), ('sin', (1.0, 1.9999996538081737, 0.0)), ('sin', (1.0, 1.9999999271547564, 0.0)), ('sin', (1.0, 2.0000003810960374, 0.0)), ('cos', (1.0, 1.999999588681161, 0.0)), ('cos', (1.0, 1.9999995680742206, 0.0)), ('sin', (1.0, 2.0000000084116007, 0.0)), ('cos', (1.0, 2.0000001608584697, 0.0)), ('sin', (1.0, 2.0000002971857813, 0.0)), ('cos', (1.0, 1.9999995208876964, 0.0)), ('sin', (1.0, 1.9999995978394791, 0.0)), ('cos', (1.0, 2.0000002826482475, 0.0)), ('cos', (1.0, 2.000000144516241, 0.0)), ('cos', (1.0, 1.9999996664187718, 0.0)), ('cos', (1.0, 2.000000280038151, 0.0)), ('sin', (1.0, 1.9999999172450738, 0.0)), ('cos', (1.0, 1.9999995781082904, 0.0)), ('sin', (1.0, 1.999999525152539, 0.0)), ('cos', (1.0, 1.9999997682957404, 0.0)), ('sin', (1.0, 2.0000001907143683, 0.0)), ('cos', (1.0, 1.999999687115291, 0.0)), ('sin', (1.0, 2.0000002419021414, 0.0)), ('sin', (1.0, 2.000000177473483, 0.0)), ('sin', (1.0, 2.000000063455667, 0.0)), ('cos', (1.0, 2.000000448390741, 0.0)), ('sin', (1.0, 1.999999997336827, 0.0)), ('cos', (1.0, 1.999999609011976, 0.0)), ('sin', (1.0, 1.999999752399957, 0.0)), ('sin', (1.0, 2.0000004478648763, 0.0)), ('sin', (1.0, 1.9999995024343857, 0.0)), ('cos', (1.0, 2.0000002445395646, 0.0)), ('sin', (1.0, 2.000000020654137, 0.0)), ('cos', (1.0, 1.9999999300287743, 0.0)), ('sin', (1.0, 1.9999996379515066, 0.0)), ('sin', (1.0, 2.0000004136429164, 0.0)), ('cos', (1.0, 1.9999999934449735, 0.0)), ('sin', (1.0, 1.999999518768592, 0.0)), ('cos', (1.0, 2.000000074475053, 0.0)), ('cos', (1.0, 1.9999997690344957, 0.0)), ('sin', (1.0, 1.9999999554156604, 0.0)), ('cos', (1.0, 2.000000288275244, 0.0)), ('cos', (1.0, 1.9999996445663526, 0.0)), ('cos', (1.0, 1.9999997076135994, 0.0)), ('sin', (1.0, 1.9999996315121686, 0.0)), ('sin', (1.0, 2.00000039250156, 0.0)), ('cos', (1.0, 2.000000252649913, 0.0)), ('cos', (1.0, 2.000000302586654, 0.0)), ('sin', (1.0, 2.0000001943405894, 0.0)), ('sin', (1.0, 1.9999999392976944, 0.0)), ('cos', (1.0, 2.000000116469447, 0.0)), ('cos', (1.0, 1.9999997118523112, 0.0)), ('sin', (1.0, 2.0000002925653537, 0.0)), ('cos', (1.0, 2.000000191779305, 0.0)), ('cos', (1.0, 2.000000167673405, 0.0)), ('cos', (1.0, 2.0000000209169926, 0.0)), ('cos', (1.0, 1.999999772441233, 0.0)), ('cos', (1.0, 2.000000067847402, 0.0)), ('cos', (1.0, 1.9999999764556877, 0.0)), ('cos', (1.0, 2.0000000890411536, 0.0)), ('sin', (1.0, 2.0000001957556957, 0.0)), ('sin', (1.0, 2.0000001569093033, 0.0)), ('sin', (1.0, 2.0000004703676546, 0.0)), ('cos', (1.0, 2.000000182402642, 0.0)), ('sin', (1.0, 2.0000000241140747, 0.0)), ('cos', (1.0, 1.9999996982110815, 0.0)), ('cos', (1.0, 1.9999996343542346, 0.0)), ('sin', (1.0, 1.9999999931586017, 0.0)), ('cos', (1.0, 2.0000004827090914, 0.0)), ('sin', (1.0, 1.9999996518393093, 0.0)), ('cos', (1.0, 1.999999643918318, 0.0)), ('cos', (1.0, 2.0000000353334815, 0.0)), ('sin', (1.0, 2.000000479627978, 0.0)), ('sin', (1.0, 1.9999996264795334, 0.0)), ('cos', (1.0, 1.9999998778812305, 0.0)), ('sin', (1.0, 1.9999996347181683, 0.0)), ('cos', (1.0, 1.999999903700008, 0.0)), ('sin', (1.0, 2.000000005454858, 0.0)), ('cos', (1.0, 1.9999995788610214, 0.0)), ('cos', (1.0, 2.0000001005755976, 0.0)), ('sin', (1.0, 2.0000000010171886, 0.0)), ('sin', (1.0, 2.0000002168622215, 0.0)), ('sin', (1.0, 1.9999996607131854, 0.0)), ('cos', (1.0, 1.999999784404027, 0.0)), ('sin', (1.0, 2.0000001807189713, 0.0)), ('cos', (1.0, 1.9999995367624877, 0.0)), ('cos', (1.0, 1.9999995856135446, 0.0)), ('cos', (1.0, 2.0000003304953053, 0.0)), ('cos', (1.0, 1.9999997645547032, 0.0)), ('sin', (1.0, 2.0000000076887745, 0.0)), ('cos', (1.0, 1.9999996560606668, 0.0)), ('sin', (1.0, 2.0000000396082744, 0.0)), ('cos', (1.0, 1.9999997327420806, 0.0)), ('cos', (1.0, 2.000000088818525, 0.0)), ('cos', (1.0, 2.000000399056969, 0.0)), ('sin', (1.0, 2.0000001479023517, 0.0)), ('sin', (1.0, 1.9999999518600704, 0.0)), ('sin', (1.0, 2.0000003232729484, 0.0)), ('cos', (1.0, 2.000000197410743, 0.0)), ('sin', (1.0, 2.000000435465311, 0.0)), ('sin', (1.0, 2.0000003206908885, 0.0)), ('sin', (1.0, 1.9999999795533565, 0.0)), ('sin', (1.0, 2.0000000599914602, 0.0)), ('cos', (1.0, 2.000000124890483, 0.0)), ('cos', (1.0, 2.000000314824811, 0.0)), ('cos', (1.0, 2.000000151031419, 0.0)), ('cos', (1.0, 2.0000002728430335, 0.0)), ('cos', (1.0, 1.9999997301899093, 0.0)), ('sin', (1.0, 1.999999545158057, 0.0)), ('cos', (1.0, 1.9999996154701565, 0.0)), ('cos', (1.0, 1.9999998847308853, 0.0)), ('sin', (1.0, 2.0000004069380135, 0.0)), ('sin', (1.0, 1.9999995374352304, 0.0)), ('sin', (1.0, 1.9999997060143535, 0.0)), ('sin', (1.0, 1.9999996332895544, 0.0)), ('sin', (1.0, 1.9999998976870326, 0.0)), ('sin', (1.0, 1.9999999195712073, 0.0)), ('cos', (1.0, 2.000000373555741, 0.0)), ('sin', (1.0, 1.9999995175823126, 0.0)), ('sin', (1.0, 1.99999996642355, 0.0)), ('sin', (1.0, 1.9999999026238005, 0.0)), ('sin', (1.0, 2.0000003278514646, 0.0)), ('cos', (1.0, 1.999999726677391, 0.0)), ('cos', (1.0, 2.0000004486325724, 0.0)), ('sin', (1.0, 2.000000115927811, 0.0)), ('cos', (1.0, 1.9999996876735908, 0.0)), ('sin', (1.0, 2.000000427715137, 0.0)), ('cos', (1.0, 1.9999996063219605, 0.0)), ('sin', (1.0, 2.0000004856022984, 0.0)), ('cos', (1.0, 2.000000206973681, 0.0)), ('cos', (1.0, 2.000000475833763, 0.0)), ('sin', (1.0, 2.0000002938625445, 0.0)), ('sin', (1.0, 1.9999996249082972, 0.0)), ('sin', (1.0, 1.9999996963301352, 0.0)), ('cos', (1.0, 1.9999998303152582, 0.0)), ('sin', (1.0, 1.9999995782692963, 0.0)), ('cos', (1.0, 2.0000002675440602, 0.0)), ('cos', (1.0, 2.0000003323212483, 0.0)), ('sin', (1.0, 1.9999999088354818, 0.0)), ('sin', (1.0, 2.000000083370422, 0.0)), ('sin', (1.0, 2.00000049769706, 0.0)), ('sin', (1.0, 1.9999999788067377, 0.0)), ('cos', (1.0, 2.0000004097882744, 0.0)), ('cos', (1.0, 1.9999998275416249, 0.0)), ('sin', (1.0, 1.9999995342393884, 0.0)), ('cos', (1.0, 2.0000003813818474, 0.0)), ('cos', (1.0, 1.9999999105719413, 0.0)), ('cos', (1.0, 1.9999998999339947, 0.0)), ('sin', (1.0, 2.0000001297897847, 0.0)), ('sin', (1.0, 1.9999999480322452, 0.0)), ('cos', (1.0, 2.0000000248047636, 0.0)), ('cos', (1.0, 2.0000003746400865, 0.0)), ('cos', (1.0, 2.0000000674396983, 0.0)), ('cos', (1.0, 1.9999995790230123, 0.0)), ('sin', (1.0, 2.0000003601734653, 0.0)), ('sin', (1.0, 1.9999998748477292, 0.0)), ('cos', (1.0, 1.9999999845710847, 0.0)), ('sin', (1.0, 2.0000003208452086, 0.0)), ('sin', (1.0, 2.000000105266876, 0.0)), ('cos', (1.0, 2.0000002109400925, 0.0)), ('cos', (1.0, 2.000000241176979, 0.0)), ('cos', (1.0, 1.9999999104454325, 0.0)), ('sin', (1.0, 1.9999997963763207, 0.0)), ('cos', (1.0, 2.000000103685906, 0.0)), ('cos', (1.0, 1.999999857146896, 0.0)), ('sin', (1.0, 1.9999997979374395, 0.0)), ('sin', (1.0, 1.9999996481781093, 0.0)), ('sin', (1.0, 2.000000188954853, 0.0)), ('cos', (1.0, 2.000000428033849, 0.0)), ('cos', (1.0, 1.9999995808414357, 0.0)), ('cos', (1.0, 2.000000481267414, 0.0)), ('sin', (1.0, 2.000000420575174, 0.0)), ('cos', (1.0, 1.9999998274854018, 0.0)), ('cos', (1.0, 1.9999998475887113, 0.0)), ('cos', (1.0, 1.9999999181326833, 0.0)), ('cos', (1.0, 1.999999565592079, 0.0)), ('cos', (1.0, 1.9999995542793152, 0.0)), ('cos', (1.0, 1.9999997411542443, 0.0)), ('cos', (1.0, 2.0000002758655953, 0.0)), ('cos', (1.0, 2.000000367373909, 0.0)), ('sin', (1.0, 1.9999997528235531, 0.0)), ('cos', (1.0, 2.000000004446142, 0.0)), ('sin', (1.0, 1.9999995172570484, 0.0)), ('cos', (1.0, 2.000000237995507, 0.0)), ('sin', (1.0, 2.0000000903273176, 0.0)), ('cos', (1.0, 1.9999997785148231, 0.0)), ('cos', (1.0, 1.999999973793891, 0.0)), ('sin', (1.0, 2.0000004908645406, 0.0)), ('cos', (1.0, 2.000000128215693, 0.0)), ('sin', (1.0, 1.9999997750177423, 0.0)), ('sin', (1.0, 1.9999995675089097, 0.0)), ('cos', (1.0, 1.9999996314495718, 0.0)), ('sin', (1.0, 1.9999996616528255, 0.0)), ('sin', (1.0, 2.000000391745954, 0.0)), ('cos', (1.0, 1.999999813072522, 0.0)), ('sin', (1.0, 2.000000494836927, 0.0)), ('cos', (1.0, 1.9999997254697712, 0.0)), ('sin', (1.0, 1.9999998339679832, 0.0)), ('sin', (1.0, 2.0000000833132128, 0.0)), ('cos', (1.0, 1.999999519978772, 0.0)), ('sin', (1.0, 2.000000072981917, 0.0)), ('cos', (1.0, 2.0000004331052823, 0.0)), ('cos', (1.0, 1.9999999304595757, 0.0)), ('sin', (1.0, 1.9999999794669907, 0.0)), ('cos', (1.0, 2.0000000184913773, 0.0)), ('cos', (1.0, 2.00000030690299, 0.0)), ('sin', (1.0, 2.000000343577014, 0.0)), ('cos', (1.0, 2.000000221739045, 0.0)), ('cos', (1.0, 2.000000249292615, 0.0)), ('cos', (1.0, 2.0000003241453532, 0.0)), ('sin', (1.0, 1.9999997858124983, 0.0)), ('sin', (1.0, 1.9999999524345387, 0.0)), ('sin', (1.0, 2.000000214223591, 0.0)), ('sin', (1.0, 1.9999999691264256, 0.0)), ('sin', (1.0, 2.0000002288261394, 0.0)), ('sin', (1.0, 2.000000017480246, 0.0)), ('cos', (1.0, 2.0000000984967268, 0.0)), ('sin', (1.0, 2.000000313519172, 0.0)), ('sin', (1.0, 1.9999997779909398, 0.0)), ('sin', (1.0, 1.9999996121783492, 0.0)), ('sin', (1.0, 1.9999996055745257, 0.0)), ('sin', (1.0, 1.9999997163232017, 0.0)), ('sin', (1.0, 1.9999995603558856, 0.0)), ('cos', (1.0, 1.9999999682005023, 0.0)), ('cos', (1.0, 1.9999998421850032, 0.0)), ('cos', (1.0, 2.000000454653385, 0.0)), ('cos', (1.0, 2.00000010784471, 0.0)), ('cos', (1.0, 1.999999655208078, 0.0)), ('cos', (1.0, 2.0000002393218383, 0.0)), ('cos', (1.0, 2.0000004765778217, 0.0)), ('sin', (1.0, 2.0000002504326693, 0.0)), ('cos', (1.0, 1.9999999383813791, 0.0)), ('cos', (1.0, 2.0000002884789048, 0.0)), ('sin', (1.0, 2.0000000085757237, 0.0)), ('cos', (1.0, 2.000000295349721, 0.0)), ('sin', (1.0, 1.999999655014455, 0.0)), ('cos', (1.0, 1.9999998581889182, 0.0)), ('cos', (1.0, 2.000000022727079, 0.0)), ('sin', (1.0, 1.9999995797187091, 0.0)), ('cos', (1.0, 1.9999998385017472, 0.0)), ('cos', (1.0, 2.0000002198257167, 0.0)), ('sin', (1.0, 2.0000001799640414, 0.0)), ('cos', (1.0, 1.9999996763362007, 0.0)), ('cos', (1.0, 1.999999592412938, 0.0)), ('sin', (1.0, 1.9999995489139886, 0.0)), ('cos', (1.0, 2.000000345064116, 0.0)), ('cos', (1.0, 2.0000003016882673, 0.0)), ('sin', (1.0, 2.0000000718804123, 0.0)), ('cos', (1.0, 1.9999998771737977, 0.0)), ('sin', (1.0, 2.0000003761978054, 0.0)), ('sin', (1.0, 1.9999997751709375, 0.0)), ('sin', (1.0, 1.9999998930881713, 0.0)), ('cos', (1.0, 1.9999999373069828, 0.0)), ('cos', (1.0, 2.0000004463345387, 0.0)), ('sin', (1.0, 1.9999999586109094, 0.0)), ('sin', (1.0, 2.000000324506616, 0.0)), ('cos', (1.0, 1.999999705194266, 0.0)), ('sin', (1.0, 1.999999536666625, 0.0)), ('cos', (1.0, 2.000000357739252, 0.0)), ('sin', (1.0, 1.9999999661865488, 0.0)), ('cos', (1.0, 2.0000002840947824, 0.0)), ('cos', (1.0, 1.9999996333223842, 0.0)), ('sin', (1.0, 1.9999997129960805, 0.0)), ('cos', (1.0, 2.0000000043637853, 0.0)), ('cos', (1.0, 1.999999926469825, 0.0)), ('cos', (1.0, 1.9999995732229794, 0.0)), ('sin', (1.0, 1.999999916802802, 0.0)), ('cos', (1.0, 2.0000004938298725, 0.0)), ('sin', (1.0, 2.0000002201673195, 0.0)), ('sin', (1.0, 2.0000000573609387, 0.0)), ('sin', (1.0, 1.999999770134102, 0.0)), ('cos', (1.0, 1.9999996858570108, 0.0)), ('sin', (1.0, 2.0000003558765953, 0.0)), ('cos', (1.0, 1.9999995070504564, 0.0)), ('cos', (1.0, 2.0000001537996672, 0.0)), ('cos', (1.0, 1.9999999046161272, 0.0)), ('sin', (1.0, 2.0000002367610965, 0.0)), ('sin', (1.0, 2.000000020651543, 0.0)), ('sin', (1.0, 2.0000000021407423, 0.0)), ('sin', (1.0, 2.0000000370459485, 0.0)), ('sin', (1.0, 2.000000365085521, 0.0)), ('cos', (1.0, 1.999999606473815, 0.0)), ('cos', (1.0, 1.9999998263975511, 0.0)), ('cos', (1.0, 1.9999999374073865, 0.0)), ('sin', (1.0, 1.9999995377822086, 0.0)), ('cos', (1.0, 1.9999995716716237, 0.0)), ('sin', (1.0, 1.9999999603372907, 0.0)), ('cos', (1.0, 1.9999997357977675, 0.0)), ('cos', (1.0, 1.9999995043603218, 0.0)), ('sin', (1.0, 2.000000498185557, 0.0)), ('sin', (1.0, 1.999999578144686, 0.0)), ('sin', (1.0, 2.00000023581947, 0.0)), ('sin', (1.0, 1.999999797540904, 0.0)), ('cos', (1.0, 1.9999997948382515, 0.0)), ('cos', (1.0, 2.0000002217353137, 0.0)), ('sin', (1.0, 1.9999997181746698, 0.0)), ('cos', (1.0, 1.9999999775787278, 0.0)), ('cos', (1.0, 1.9999996462745884, 0.0)), ('cos', (1.0, 2.000000364601306, 0.0)), ('sin', (1.0, 2.000000050573829, 0.0)), ('sin', (1.0, 2.0000002475026477, 0.0)), ('cos', (1.0, 2.0000000755195115, 0.0)), ('sin', (1.0, 1.999999734316935, 0.0)), ('cos', (1.0, 2.0000001411909154, 0.0)), ('sin', (1.0, 2.000000004689205, 0.0)), ('cos', (1.0, 1.9999995358590472, 0.0)), ('cos', (1.0, 2.000000397275126, 0.0)), ('sin', (1.0, 1.9999997464641384, 0.0)), ('sin', (1.0, 1.9999997567801868, 0.0)), ('sin', (1.0, 1.9999999209550032, 0.0)), ('cos', (1.0, 2.000000122480528, 0.0)), ('cos', (1.0, 2.0000003692643458, 0.0)), ('sin', (1.0, 1.9999999501662578, 0.0)), ('cos', (1.0, 2.0000004219352387, 0.0)), ('sin', (1.0, 1.999999989248003, 0.0)), ('sin', (1.0, 1.9999997113469488, 0.0)), ('sin', (1.0, 2.000000238321025, 0.0)), ('sin', (1.0, 2.0000000951861865, 0.0)), ('sin', (1.0, 1.99999982571032, 0.0)), ('cos', (1.0, 1.9999995996582765, 0.0)), ('sin', (1.0, 1.999999608349541, 0.0)), ('sin', (1.0, 2.0000003781518605, 0.0)), ('sin', (1.0, 2.0000003820419994, 0.0)), ('sin', (1.0, 1.9999995841619416, 0.0)), ('cos', (1.0, 2.00000046087874, 0.0)), ('sin', (1.0, 1.9999998969732835, 0.0)), ('sin', (1.0, 1.999999512252601, 0.0)), ('sin', (1.0, 2.0000002736405573, 0.0)), ('sin', (1.0, 1.9999996237099111, 0.0)), ('cos', (1.0, 1.9999995066200966, 0.0)), ('sin', (1.0, 1.999999590158054, 0.0)), ('sin', (1.0, 2.0000002263048087, 0.0)), ('cos', (1.0, 2.0000003114781073, 0.0)), ('cos', (1.0, 2.0000003459144824, 0.0)), ('cos', (1.0, 1.9999997856506808, 0.0)), ('cos', (1.0, 2.000000474479436, 0.0)), ('cos', (1.0, 2.000000259809454, 0.0)), ('cos', (1.0, 1.9999996876674706, 0.0)), ('cos', (1.0, 1.9999999688959333, 0.0)), ('sin', (1.0, 2.0000000393864936, 0.0)), ('cos', (1.0, 2.00000045604787, 0.0)), ('cos', (1.0, 1.9999996997700327, 0.0)), ('cos', (1.0, 2.0000000506818085, 0.0)), ('sin', (1.0, 1.999999635816286, 0.0)), ('sin', (1.0, 2.000000101956526, 0.0)), ('sin', (1.0, 2.0000004073966795, 0.0)), ('cos', (1.0, 1.9999995735880745, 0.0)), ('cos', (1.0, 2.000000183750854, 0.0)), ('sin', (1.0, 1.9999998768270522, 0.0)), ('cos', (1.0, 1.9999998915779762, 0.0)), ('sin', (1.0, 1.9999997134072567, 0.0)), ('cos', (1.0, 1.9999997651843833, 0.0)), ('sin', (1.0, 1.9999995638829113, 0.0)), ('sin', (1.0, 1.9999998500788627, 0.0)), ('cos', (1.0, 1.9999995309283363, 0.0)), ('sin', (1.0, 1.9999999695724529, 0.0)), ('cos', (1.0, 2.000000452892191, 0.0)), ('cos', (1.0, 2.0000004685687256, 0.0)), ('sin', (1.0, 1.9999999522172214, 0.0)), ('sin', (1.0, 1.9999999644204107, 0.0)), ('cos', (1.0, 2.0000000246989176, 0.0)), ('cos', (1.0, 2.0000004883184532, 0.0)), ('cos', (1.0, 2.000000211260868, 0.0)), ('sin', (1.0, 1.9999995563252688, 0.0)), ('sin', (1.0, 1.9999998037216233, 0.0)), ('cos', (1.0, 1.9999996080443838, 0.0)), ('cos', (1.0, 2.0000004319979423, 0.0)), ('cos', (1.0, 2.0000001594628896, 0.0)), ('cos', (1.0, 2.0000003743323895, 0.0)), ('sin', (1.0, 2.000000058187175, 0.0)), ('cos', (1.0, 2.000000446501981, 0.0)), ('sin', (1.0, 1.9999995757989448, 0.0)), ('sin', (1.0, 2.000000300756266, 0.0)), ('cos', (1.0, 1.9999996449485495, 0.0)), ('sin', (1.0, 2.0000001553339297, 0.0)), ('sin', (1.0, 1.9999998535148682, 0.0)), ('sin', (1.0, 1.9999996969771494, 0.0)), ('cos', (1.0, 2.0000000556503172, 0.0)), ('sin', (1.0, 1.9999999734026357, 0.0)), ('sin', (1.0, 1.999999536487256, 0.0)), ('sin', (1.0, 1.9999998865963569, 0.0)), ('cos', (1.0, 1.999999850872091, 0.0)), ('sin', (1.0, 2.0000001236984106, 0.0)), ('cos', (1.0, 1.9999995464694615, 0.0)), ('cos', (1.0, 1.9999995630047354, 0.0)), ('sin', (1.0, 2.0000003878081167, 0.0)), ('sin', (1.0, 1.9999998774851462, 0.0)), ('cos', (1.0, 1.9999998404256705, 0.0)), ('sin', (1.0, 2.0000001519388273, 0.0)), ('cos', (1.0, 1.9999996302066878, 0.0)), ('sin', (1.0, 2.0000003126889463, 0.0)), ('cos', (1.0, 1.9999996002024265, 0.0)), ('cos', (1.0, 2.000000218142933, 0.0)), ('cos', (1.0, 2.000000196622842, 0.0)), ('sin', (1.0, 2.000000308927336, 0.0)), ('sin', (1.0, 2.000000404577941, 0.0)), ('sin', (1.0, 2.000000230336572, 0.0)), ('sin', (1.0, 1.9999997051555274, 0.0)), ('cos', (1.0, 1.9999999618145423, 0.0)), ('sin', (1.0, 1.9999995229508067, 0.0)), ('cos', (1.0, 2.000000250939859, 0.0)), ('sin', (1.0, 1.999999510145651, 0.0)), ('cos', (1.0, 2.0000004607279642, 0.0)), ('sin', (1.0, 2.00000044431625, 0.0)), ('sin', (1.0, 1.9999998436547983, 0.0)), ('sin', (1.0, 2.000000447836881, 0.0)), ('cos', (1.0, 1.9999995401486572, 0.0)), ('cos', (1.0, 2.000000465599699, 0.0)), ('sin', (1.0, 1.999999763467264, 0.0)), ('sin', (1.0, 2.000000362550451, 0.0)), ('sin', (1.0, 1.9999999521215717, 0.0)), ('cos', (1.0, 1.9999999186591664, 0.0)), ('cos', (1.0, 1.9999995656864833, 0.0)), ('sin', (1.0, 1.9999995353153235, 0.0)), ('cos', (1.0, 2.0000003660437002, 0.0)), ('sin', (1.0, 1.9999999461153237, 0.0)), ('cos', (1.0, 1.9999999751166484, 0.0)), ('sin', (1.0, 2.00000012918238, 0.0)), ('sin', (1.0, 1.9999995239177624, 0.0)), ('cos', (1.0, 1.999999608251487, 0.0)), ('cos', (1.0, 1.999999938788663, 0.0)), ('cos', (1.0, 2.00000003556954, 0.0)), ('sin', (1.0, 2.00000043981936, 0.0)), ('sin', (1.0, 1.9999995348896102, 0.0)), ('sin', (1.0, 1.9999996342330244, 0.0)), ('sin', (1.0, 2.0000003751313193, 0.0)), ('cos', (1.0, 2.000000198579759, 0.0)), ('cos', (1.0, 1.9999999242326028, 0.0)), ('cos', (1.0, 2.0000002504454213, 0.0)), ('cos', (1.0, 1.9999999391489667, 0.0)), ('sin', (1.0, 1.9999998568211237, 0.0)), ('cos', (1.0, 2.000000338920074, 0.0)), ('cos', (1.0, 2.000000013022718, 0.0)), ('sin', (1.0, 2.0000001941464536, 0.0)), ('cos', (1.0, 2.0000000171778924, 0.0)), ('cos', (1.0, 1.9999995001979436, 0.0)), ('cos', (1.0, 2.0000003567464417, 0.0)), ('sin', (1.0, 1.9999997936247773, 0.0)), ('cos', (1.0, 1.9999997913899359, 0.0)), ('cos', (1.0, 2.000000251852802, 0.0)), ('cos', (1.0, 1.9999996007362946, 0.0)), ('cos', (1.0, 2.000000123475622, 0.0)), ('sin', (1.0, 1.9999998485307595, 0.0)), ('cos', (1.0, 1.9999997646330034, 0.0)), ('sin', (1.0, 1.9999996266235338, 0.0)), ('sin', (1.0, 1.9999996314555182, 0.0)), ('cos', (1.0, 1.999999705951855, 0.0)), ('cos', (1.0, 1.9999998377632797, 0.0)), ('cos', (1.0, 2.0000002932015124, 0.0)), ('sin', (1.0, 1.9999996959803237, 0.0)), ('sin', (1.0, 2.000000338268086, 0.0)), ('cos', (1.0, 1.9999995140320823, 0.0)), ('cos', (1.0, 2.000000241016815, 0.0)), ('sin', (1.0, 1.9999996817087868, 0.0)), ('cos', (1.0, 1.9999997941572758, 0.0)), ('cos', (1.0, 2.000000035230246, 0.0)), ('cos', (1.0, 1.9999999241825654, 0.0)), ('cos', (1.0, 1.9999997335228612, 0.0)), ('cos', (1.0, 1.9999999608934576, 0.0)), ('cos', (1.0, 2.0000003391902017, 0.0)), ('sin', (1.0, 2.000000170538936, 0.0)), ('cos', (1.0, 2.0000002175861233, 0.0)), ('sin', (1.0, 1.9999997237961102, 0.0)), ('cos', (1.0, 2.0000003502968284, 0.0)), ('sin', (1.0, 1.9999996487186025, 0.0)), ('sin', (1.0, 1.999999738595603, 0.0)), ('cos', (1.0, 1.9999999150380046, 0.0)), ('sin', (1.0, 1.9999999646487228, 0.0)), ('sin', (1.0, 1.9999996115999916, 0.0)), ('sin', (1.0, 2.0000003550397953, 0.0)), ('cos', (1.0, 2.0000002180247693, 0.0)), ('sin', (1.0, 1.9999999329191593, 0.0)), ('cos', (1.0, 2.000000312160517, 0.0)), ('cos', (1.0, 2.000000261633951, 0.0)), ('cos', (1.0, 1.9999998476539316, 0.0)), ('sin', (1.0, 1.9999995670772235, 0.0)), ('sin', (1.0, 2.000000206562423, 0.0)), ('cos', (1.0, 1.9999996706940577, 0.0)), ('cos', (1.0, 2.0000004439886307, 0.0)), ('sin', (1.0, 2.0000003386292726, 0.0)), ('sin', (1.0, 2.0000000885335765, 0.0)), ('cos', (1.0, 2.0000002517243978, 0.0)), ('sin', (1.0, 2.0000004114384926, 0.0)), ('sin', (1.0, 2.0000001411456485, 0.0)), ('sin', (1.0, 2.0000003126426695, 0.0)), ('cos', (1.0, 1.9999999848142962, 0.0)), ('cos', (1.0, 2.0000002392340344, 0.0)), ('sin', (1.0, 1.9999997833651109, 0.0)), ('sin', (1.0, 1.9999995584823018, 0.0)), ('cos', (1.0, 2.000000473956564, 0.0)), ('sin', (1.0, 2.0000003840599514, 0.0)), ('sin', (1.0, 1.9999999039994705, 0.0)), ('cos', (1.0, 2.0000001469913906, 0.0)), ('cos', (1.0, 1.9999997435817065, 0.0)), ('sin', (1.0, 1.9999996745186874, 0.0)), ('sin', (1.0, 1.9999998196285633, 0.0)), ('sin', (1.0, 2.000000102422064, 0.0)), ('sin', (1.0, 2.0000004036363164, 0.0)), ('cos', (1.0, 2.000000022375193, 0.0)), ('cos', (1.0, 2.0000001347993335, 0.0)), ('cos', (1.0, 2.000000495728243, 0.0)), ('sin', (1.0, 2.0000001814357327, 0.0)), ('sin', (1.0, 2.0000003023946387, 0.0)), ('cos', (1.0, 1.999999550161719, 0.0)), ('cos', (1.0, 2.0000001720756178, 0.0)), ('sin', (1.0, 2.0000003549347647, 0.0)), ('sin', (1.0, 1.999999871883226, 0.0)), ('cos', (1.0, 1.999999869972648, 0.0)), ('sin', (1.0, 1.9999999455134063, 0.0)), ('cos', (1.0, 2.0000002371930536, 0.0)), ('cos', (1.0, 2.0000001080082046, 0.0)), ('sin', (1.0, 1.9999998177850566, 0.0)), ('cos', (1.0, 2.000000134343945, 0.0)), ('sin', (1.0, 1.999999796319205, 0.0)), ('sin', (1.0, 2.000000227322526, 0.0)), ('sin', (1.0, 1.9999995264284878, 0.0)), ('sin', (1.0, 1.9999995513080169, 0.0)), ('cos', (1.0, 2.000000276043031, 0.0)), ('cos', (1.0, 2.0000000723990548, 0.0)), ('sin', (1.0, 1.999999652523562, 0.0)), ('cos', (1.0, 1.9999998356151227, 0.0)), ('sin', (1.0, 2.000000138645765, 0.0)), ('sin', (1.0, 1.9999996198739403, 0.0)), ('sin', (1.0, 2.0000000542154726, 0.0)), ('sin', (1.0, 1.9999995828694765, 0.0)), ('sin', (1.0, 1.9999998659783786, 0.0)), ('sin', (1.0, 1.9999999848852092, 0.0)), ('cos', (1.0, 1.999999948613643, 0.0)), ('cos', (1.0, 1.9999997265020684, 0.0)), ('cos', (1.0, 1.999999957319406, 0.0)), ('cos', (1.0, 1.9999997701889494, 0.0)), ('sin', (1.0, 2.0000002351476986, 0.0)), ('sin', (1.0, 1.9999996987809534, 0.0)), ('sin', (1.0, 2.000000367013606, 0.0)), ('cos', (1.0, 1.9999995712169645, 0.0)), ('cos', (1.0, 1.999999520418222, 0.0)), ('cos', (1.0, 1.9999999894483123, 0.0)), ('cos', (1.0, 2.000000287997959, 0.0)), ('sin', (1.0, 2.000000287099566, 0.0)), ('cos', (1.0, 2.0000002843518856, 0.0)), ('sin', (1.0, 2.000000128307358, 0.0)), ('sin', (1.0, 2.0000004643112925, 0.0)), ('sin', (1.0, 1.9999998884483359, 0.0)), ('cos', (1.0, 1.9999997538726857, 0.0)), ('cos', (1.0, 1.9999996142995997, 0.0)), ('cos', (1.0, 2.000000060919047, 0.0)), ('sin', (1.0, 1.9999998000529966, 0.0)), ('sin', (1.0, 1.9999997479951888, 0.0)), ('cos', (1.0, 2.0000002993338724, 0.0)), ('sin', (1.0, 1.9999998235663998, 0.0)), ('cos', (1.0, 1.9999999154360777, 0.0)), ('cos', (1.0, 1.9999995422504095, 0.0)), ('cos', (1.0, 2.0000001127076565, 0.0)), ('sin', (1.0, 2.000000494400748, 0.0)), ('sin', (1.0, 1.9999998450258318, 0.0)), ('sin', (1.0, 2.0000003325279585, 0.0)), ('cos', (1.0, 2.0000000581654493, 0.0)), ('sin', (1.0, 2.0000004762242725, 0.0)), ('sin', (1.0, 1.9999997658116822, 0.0)), ('cos', (1.0, 2.000000257259326, 0.0)), ('sin', (1.0, 2.0000000159778497, 0.0)), ('cos', (1.0, 1.9999996319338025, 0.0)), ('cos', (1.0, 1.999999775759273, 0.0)), ('cos', (1.0, 2.000000380784967, 0.0)), ('sin', (1.0, 1.9999997017146214, 0.0)), ('sin', (1.0, 2.0000002748102745, 0.0)), ('cos', (1.0, 1.9999997868666388, 0.0)), ('cos', (1.0, 1.999999542898806, 0.0)), ('sin', (1.0, 1.999999640183239, 0.0)), ('sin', (1.0, 2.000000023525855, 0.0)), ('sin', (1.0, 2.000000291138285, 0.0)), ('cos', (1.0, 1.9999998347077432, 0.0)), ('sin', (1.0, 2.0000004219938856, 0.0)), ('sin', (1.0, 2.0000002344144883, 0.0)), ('sin', (1.0, 1.9999997066406465, 0.0)), ('sin', (1.0, 1.9999996944834868, 0.0)), ('cos', (1.0, 1.9999996699495832, 0.0)), ('sin', (1.0, 1.9999995100828447, 0.0)), ('sin', (1.0, 2.0000001856789553, 0.0)), ('sin', (1.0, 1.9999998076588026, 0.0)), ('sin', (1.0, 2.0000004675601697, 0.0)), ('cos', (1.0, 1.9999998907423828, 0.0)), ('cos', (1.0, 2.0000003691378514, 0.0)), ('cos', (1.0, 2.0000003885001014, 0.0)), ('sin', (1.0, 1.9999998055410615, 0.0)), ('sin', (1.0, 1.9999996928627282, 0.0)), ('cos', (1.0, 1.9999995870518985, 0.0)), ('sin', (1.0, 2.000000125747019, 0.0)), ('sin', (1.0, 2.0000001598231045, 0.0)), ('cos', (1.0, 2.0000002276058386, 0.0)), ('cos', (1.0, 1.9999998586032326, 0.0)), ('cos', (1.0, 1.9999997472785285, 0.0)), ('sin', (1.0, 2.000000436287843, 0.0)), ('cos', (1.0, 1.9999997948805956, 0.0)), ('cos', (1.0, 2.000000225946, 0.0)), ('sin', (1.0, 1.9999996323481335, 0.0)), ('cos', (1.0, 1.9999998572337432, 0.0)), ('sin', (1.0, 1.9999999684747074, 0.0)), ('sin', (1.0, 2.000000335280526, 0.0)), ('cos', (1.0, 2.0000003846603085, 0.0)), ('sin', (1.0, 1.9999998961303265, 0.0)), ('cos', (1.0, 2.000000089188771, 0.0)), ('sin', (1.0, 1.9999999761545975, 0.0)), ('sin', (1.0, 1.9999995195626514, 0.0)), ('sin', (1.0, 2.000000280187339, 0.0)), ('cos', (1.0, 1.9999996270388354, 0.0)), ('cos', (1.0, 1.9999996902975856, 0.0)), ('sin', (1.0, 1.9999996456213034, 0.0)), ('cos', (1.0, 2.000000195185636, 0.0)), ('cos', (1.0, 1.99999955392944, 0.0)), ('sin', (1.0, 1.9999997319799727, 0.0)), ('cos', (1.0, 2.0000001574463906, 0.0)), ('cos', (1.0, 1.9999999749884751, 0.0)), ('cos', (1.0, 2.000000150164566, 0.0)), ('sin', (1.0, 2.0000002327414177, 0.0)), ('cos', (1.0, 1.9999997853761875, 0.0)), ('sin', (1.0, 1.9999996510907547, 0.0)), ('sin', (1.0, 1.999999673712459, 0.0)), ('sin', (1.0, 1.9999997818393267, 0.0)), ('sin', (1.0, 2.000000360351173, 0.0)), ('sin', (1.0, 2.0000004599066226, 0.0)), ('cos', (1.0, 2.0000001012227324, 0.0)), ('cos', (1.0, 1.9999995087953886, 0.0)), ('sin', (1.0, 1.9999997646135175, 0.0)), ('cos', (1.0, 1.9999997460609968, 0.0)), ('sin', (1.0, 1.9999998899569988, 0.0)), ('sin', (1.0, 1.9999996841034693, 0.0)), ('sin', (1.0, 1.9999996634514399, 0.0)), ('cos', (1.0, 1.9999998944034603, 0.0)), ('cos', (1.0, 1.9999998565507668, 0.0)), ('sin', (1.0, 2.000000009072075, 0.0)), ('cos', (1.0, 1.9999999189831528, 0.0)), ('sin', (1.0, 1.9999999061749822, 0.0)), ('cos', (1.0, 1.9999996221053695, 0.0)), ('cos', (1.0, 2.0000001089925172, 0.0)), ('sin', (1.0, 1.9999996537758, 0.0)), ('cos', (1.0, 1.9999999769329226, 0.0)), ('sin', (1.0, 1.9999999997119937, 0.0)), ('sin', (1.0, 2.000000004922682, 0.0)), ('sin', (1.0, 2.0000000811851315, 0.0)), ('cos', (1.0, 2.000000229463978, 0.0)), ('sin', (1.0, 2.000000110909396, 0.0)), ('cos', (1.0, 2.000000336601729, 0.0)), ('cos', (1.0, 2.000000291726993, 0.0)), ('cos', (1.0, 2.000000225016616, 0.0)), ('sin', (1.0, 2.000000213506205, 0.0)), ('cos', (1.0, 2.0000003051160937, 0.0)), ('cos', (1.0, 2.000000327475627, 0.0)), ('sin', (1.0, 1.9999997129900242, 0.0)), ('cos', (1.0, 2.000000159272928, 0.0)), ('cos', (1.0, 1.9999997955325393, 0.0)), ('sin', (1.0, 2.0000000722907614, 0.0)), ('sin', (1.0, 1.9999998110135813, 0.0)), ('cos', (1.0, 2.0000004585168627, 0.0)), ('sin', (1.0, 1.9999995885766046, 0.0)), ('sin', (1.0, 1.9999997290716744, 0.0)), ('sin', (1.0, 2.000000450426071, 0.0)), ('cos', (1.0, 1.9999996413655754, 0.0)), ('cos', (1.0, 1.9999997231132547, 0.0)), ('sin', (1.0, 1.9999996498174648, 0.0)), ('cos', (1.0, 2.000000334811974, 0.0)), ('cos', (1.0, 2.000000374696958, 0.0)), ('sin', (1.0, 2.000000442692355, 0.0)), ('cos', (1.0, 2.000000366911271, 0.0)), ('cos', (1.0, 2.0000001226545554, 0.0)), ('cos', (1.0, 2.0000000751053846, 0.0)), ('sin', (1.0, 1.9999997439636497, 0.0)), ('sin', (1.0, 2.0000002119506917, 0.0)), ('sin', (1.0, 2.0000000833360554, 0.0)), ('cos', (1.0, 1.9999996433944218, 0.0)), ('sin', (1.0, 2.000000205487784, 0.0)), ('cos', (1.0, 1.999999761095153, 0.0)), ('cos', (1.0, 2.000000259829726, 0.0)), ('cos', (1.0, 2.000000289107437, 0.0)), ('cos', (1.0, 1.9999996429929932, 0.0)), ('sin', (1.0, 1.999999720530371, 0.0)), ('cos', (1.0, 1.9999998740967204, 0.0)), ('cos', (1.0, 2.000000226917959, 0.0)), ('sin', (1.0, 1.9999997491460861, 0.0)), ('sin', (1.0, 1.999999804293088, 0.0)), ('cos', (1.0, 1.9999999899152414, 0.0)), ('cos', (1.0, 1.9999997934041553, 0.0)), ('cos', (1.0, 1.9999995220222586, 0.0)), ('sin', (1.0, 1.999999547132482, 0.0)), ('sin', (1.0, 2.00000019012804, 0.0)), ('sin', (1.0, 1.9999995051639305, 0.0)), ('cos', (1.0, 1.9999997148093942, 0.0)), ('cos', (1.0, 1.999999956675464, 0.0)), ('sin', (1.0, 2.0000001556976583, 0.0)), ('sin', (1.0, 1.9999999097658656, 0.0)), ('cos', (1.0, 1.9999997865345498, 0.0)), ('sin', (1.0, 1.9999996381323895, 0.0)), ('cos', (1.0, 2.000000085088374, 0.0)), ('sin', (1.0, 1.9999996518692138, 0.0)), ('cos', (1.0, 1.9999998830476613, 0.0)), ('sin', (1.0, 1.999999623008396, 0.0)), ('sin', (1.0, 1.999999571921549, 0.0)), ('sin', (1.0, 1.999999526611555, 0.0)), ('sin', (1.0, 1.9999996504838697, 0.0)), ('cos', (1.0, 1.9999997485997119, 0.0)), ('cos', (1.0, 2.0000004450577444, 0.0)), ('sin', (1.0, 2.000000260708347, 0.0)), ('sin', (1.0, 1.9999997920158512, 0.0)), ('sin', (1.0, 2.000000241784272, 0.0)), ('sin', (1.0, 1.9999998647197788, 0.0)), ('sin', (1.0, 2.000000409020233, 0.0)), ('sin', (1.0, 1.999999641836365, 0.0)), ('sin', (1.0, 2.000000359425856, 0.0)), ('cos', (1.0, 1.9999999315583992, 0.0)), ('cos', (1.0, 1.999999612449205, 0.0)), ('cos', (1.0, 2.000000175422958, 0.0)), ('cos', (1.0, 1.9999995899372556, 0.0)), ('cos', (1.0, 2.0000000271949623, 0.0)), ('cos', (1.0, 1.9999995505815804, 0.0)), ('cos', (1.0, 2.0000000634769735, 0.0)), ('sin', (1.0, 1.9999996594969047, 0.0)), ('sin', (1.0, 1.9999995841188067, 0.0)), ('sin', (1.0, 2.000000311230729, 0.0)), ('sin', (1.0, 2.000000229545444, 0.0)), ('sin', (1.0, 1.9999997312223112, 0.0)), ('cos', (1.0, 2.000000054186713, 0.0)), ('sin', (1.0, 1.9999998368685954, 0.0)), ('cos', (1.0, 2.0000001132890035, 0.0)), ('cos', (1.0, 1.9999999781878302, 0.0)), ('sin', (1.0, 2.00000003436191, 0.0)), ('cos', (1.0, 1.9999998013361153, 0.0)), ('sin', (1.0, 2.000000492511231, 0.0)), ('cos', (1.0, 2.0000003091462886, 0.0)), ('cos', (1.0, 2.000000396957866, 0.0)), ('cos', (1.0, 1.9999996584909012, 0.0)), ('sin', (1.0, 2.0000001567813537, 0.0)), ('cos', (1.0, 2.0000000837024396, 0.0)), ('sin', (1.0, 1.9999995119178082, 0.0)), ('cos', (1.0, 1.9999998096218312, 0.0)), ('cos', (1.0, 1.999999728508808, 0.0)), ('cos', (1.0, 2.0000000230916135, 0.0)), ('sin', (1.0, 1.9999999310936276, 0.0)), ('sin', (1.0, 1.9999996463775396, 0.0)), ('sin', (1.0, 1.9999996770213708, 0.0)), ('cos', (1.0, 2.000000272479907, 0.0)), ('cos', (1.0, 2.0000004760261056, 0.0)), ('cos', (1.0, 2.0000002600926097, 0.0)), ('sin', (1.0, 2.000000282710761, 0.0)), ('cos', (1.0, 1.9999999313084995, 0.0)), ('cos', (1.0, 1.9999995395909, 0.0)), ('cos', (1.0, 2.00000017074169, 0.0)), ('cos', (1.0, 1.9999995647457525, 0.0)), ('sin', (1.0, 1.9999998734218447, 0.0)), ('cos', (1.0, 2.0000000771694615, 0.0)), ('sin', (1.0, 2.0000001208867313, 0.0)), ('sin', (1.0, 1.9999998370665022, 0.0)), ('sin', (1.0, 1.9999996232105126, 0.0)), ('sin', (1.0, 2.000000181423641, 0.0)), ('cos', (1.0, 1.9999998173981062, 0.0)), ('cos', (1.0, 2.000000303290545, 0.0)), ('sin', (1.0, 1.9999997658554234, 0.0)), ('sin', (1.0, 2.0000004665543853, 0.0)), ('cos', (1.0, 1.9999997142309853, 0.0)), ('sin', (1.0, 2.000000309882819, 0.0)), ('cos', (1.0, 2.0000001943211156, 0.0)), ('cos', (1.0, 1.999999958436385, 0.0)), ('cos', (1.0, 1.999999819241742, 0.0)), ('cos', (1.0, 1.9999995315727093, 0.0)), ('cos', (1.0, 2.0000000545128205, 0.0)), ('sin', (1.0, 1.9999999121532992, 0.0)), ('cos', (1.0, 1.9999995021209884, 0.0)), ('sin', (1.0, 2.0000003684555483, 0.0)), ('sin', (1.0, 2.0000001666661134, 0.0)), ('sin', (1.0, 1.9999999955811094, 0.0)), ('sin', (1.0, 2.0000002940736468, 0.0)), ('sin', (1.0, 2.000000185868352, 0.0)), ('cos', (1.0, 2.000000441761099, 0.0)), ('cos', (1.0, 2.0000003140035165, 0.0)), ('sin', (1.0, 1.9999999092950942, 0.0)), ('sin', (1.0, 1.9999997124763087, 0.0)), ('cos', (1.0, 2.000000284119805, 0.0)), ('cos', (1.0, 1.999999722125523, 0.0)), ('sin', (1.0, 1.9999997980959419, 0.0)), ('cos', (1.0, 2.0000000365431987, 0.0)), ('sin', (1.0, 1.9999996788789294, 0.0)), ('cos', (1.0, 1.9999995889142013, 0.0)), ('sin', (1.0, 1.9999996694109567, 0.0)), ('cos', (1.0, 2.000000294430161, 0.0)), ('sin', (1.0, 1.9999998154805918, 0.0)), ('sin', (1.0, 1.9999999515302709, 0.0)), ('sin', (1.0, 1.9999999631131122, 0.0)), ('sin', (1.0, 2.0000002633334404, 0.0)), ('sin', (1.0, 1.9999998055654995, 0.0)), ('cos', (1.0, 2.0000002956301905, 0.0)), ('sin', (1.0, 1.9999997726966934, 0.0)), ('sin', (1.0, 1.9999999260870904, 0.0)), ('cos', (1.0, 1.9999996757417482, 0.0)), ('sin', (1.0, 2.0000003484830273, 0.0)), ('sin', (1.0, 1.9999998679685511, 0.0)), ('cos', (1.0, 1.9999996512380003, 0.0)), ('sin', (1.0, 2.0000000365774975, 0.0)), ('cos', (1.0, 2.000000428883214, 0.0)), ('cos', (1.0, 2.0000000366602633, 0.0)), ('sin', (1.0, 2.0000004234906346, 0.0)), ('sin', (1.0, 1.9999995897502512, 0.0)), ('sin', (1.0, 2.00000021648955, 0.0)), ('cos', (1.0, 2.0000000837639065, 0.0)), ('cos', (1.0, 2.0000000490925816, 0.0)), ('cos', (1.0, 1.9999998811236714, 0.0)), ('sin', (1.0, 1.999999573566712, 0.0)), ('sin', (1.0, 2.00000004753754, 0.0)), ('cos', (1.0, 2.0000001912659684, 0.0)), ('cos', (1.0, 1.9999996664458555, 0.0)), ('sin', (1.0, 1.99999977665348, 0.0)), ('sin', (1.0, 1.9999999069006016, 0.0)), ('sin', (1.0, 2.0000001603766235, 0.0)), ('cos', (1.0, 2.000000301399726, 0.0)), ('sin', (1.0, 1.9999998310412284, 0.0)), ('cos', (1.0, 2.0000000744679687, 0.0)), ('cos', (1.0, 2.000000081958479, 0.0)), ('cos', (1.0, 2.0000001187426726, 0.0)), ('cos', (1.0, 2.0000004646743856, 0.0)), ('sin', (1.0, 1.999999534749508, 0.0)), ('sin', (1.0, 1.9999999056133482, 0.0)), ('sin', (1.0, 1.999999901406077, 0.0)), ('cos', (1.0, 2.0000001072389457, 0.0)), ('cos', (1.0, 1.9999999236571453, 0.0)), ('cos', (1.0, 1.9999998591414958, 0.0)), ('cos', (1.0, 2.000000237473685, 0.0)), ('cos', (1.0, 1.999999519473045, 0.0)), ('sin', (1.0, 2.0000003252332497, 0.0)), ('cos', (1.0, 2.000000276502045, 0.0)), ('cos', (1.0, 2.0000003647388866, 0.0)), ('sin', (1.0, 2.000000245732944, 0.0)), ('cos', (1.0, 2.0000003741813828, 0.0)), ('sin', (1.0, 2.0000001553219526, 0.0)), ('sin', (1.0, 2.0000004197107266, 0.0)), ('sin', (1.0, 1.9999995633806291, 0.0)), ('cos', (1.0, 1.9999996380593152, 0.0)), ('sin', (1.0, 2.0000001255997497, 0.0)), ('sin', (1.0, 2.0000001491431787, 0.0)), ('cos', (1.0, 1.999999827134408, 0.0)), ('sin', (1.0, 2.000000011878551, 0.0)), ('cos', (1.0, 2.0000002448192773, 0.0)), ('sin', (1.0, 2.000000139985737, 0.0)), ('sin', (1.0, 2.00000045979663, 0.0)), ('cos', (1.0, 1.9999997765840374, 0.0)), ('sin', (1.0, 2.000000250246216, 0.0)), ('cos', (1.0, 1.9999997922522572, 0.0)), ('cos', (1.0, 2.000000001311358, 0.0)), ('sin', (1.0, 2.00000005355028, 0.0)), ('sin', (1.0, 1.999999919079659, 0.0)), ('cos', (1.0, 1.9999998008839206, 0.0)), ('cos', (1.0, 2.0000001968934096, 0.0)), ('sin', (1.0, 1.9999997502272726, 0.0)), ('sin', (1.0, 1.999999865135056, 0.0)), ('sin', (1.0, 2.000000288746656, 0.0)), ('cos', (1.0, 2.000000413429406, 0.0)), ('cos', (1.0, 2.000000461824921, 0.0)), ('cos', (1.0, 1.9999998246686017, 0.0)), ('sin', (1.0, 1.999999837678458, 0.0)), ('cos', (1.0, 2.0000003376371835, 0.0)), ('cos', (1.0, 2.000000439884387, 0.0)), ('cos', (1.0, 2.000000373042078, 0.0)), ('cos', (1.0, 2.0000003785707574, 0.0)), ('cos', (1.0, 1.999999503262378, 0.0)), ('sin', (1.0, 2.000000318099722, 0.0)), ('sin', (1.0, 2.00000004278104, 0.0)), ('cos', (1.0, 2.000000159170461, 0.0)), ('cos', (1.0, 1.9999999064782974, 0.0)), ('sin', (1.0, 1.9999995227617606, 0.0)), ('sin', (1.0, 2.000000013769879, 0.0)), ('sin', (1.0, 1.9999995179627725, 0.0)), ('sin', (1.0, 1.9999996497733932, 0.0)), ('cos', (1.0, 2.0000003307649554, 0.0)), ('sin', (1.0, 2.000000010829211, 0.0)), ('cos', (1.0, 2.0000001244428667, 0.0)), ('sin', (1.0, 2.0000000810738783, 0.0)), ('cos', (1.0, 2.00000028922445, 0.0)), ('sin', (1.0, 1.9999998920787752, 0.0)), ('cos', (1.0, 2.000000159002626, 0.0)), ('sin', (1.0, 2.0000002458431734, 0.0)), ('cos', (1.0, 1.9999996187526097, 0.0)), ('sin', (1.0, 1.999999999967083, 0.0)), ('sin', (1.0, 1.9999996198094567, 0.0)), ('cos', (1.0, 2.0000004249960233, 0.0)), ('cos', (1.0, 1.9999997244273588, 0.0)), ('cos', (1.0, 1.9999995048329036, 0.0)), ('cos', (1.0, 2.0000002109459314, 0.0)), ('cos', (1.0, 1.9999995063965152, 0.0)), ('sin', (1.0, 2.0000002530013177, 0.0)), ('sin', (1.0, 1.9999997262248428, 0.0)), ('sin', (1.0, 2.0000003824699117, 0.0)), ('cos', (1.0, 2.000000468389995, 0.0)), ('cos', (1.0, 2.000000121358765, 0.0)), ('cos', (1.0, 2.000000413862939, 0.0)), ('cos', (1.0, 1.9999998902724108, 0.0)), ('sin', (1.0, 2.0000001599558335, 0.0)), ('cos', (1.0, 2.000000265366409, 0.0)), ('cos', (1.0, 2.0000004647993896, 0.0)), ('cos', (1.0, 2.0000004389569166, 0.0)), ('sin', (1.0, 2.0000004663983786, 0.0)), ('cos', (1.0, 2.0000004440619414, 0.0)), ('sin', (1.0, 1.9999998595969606, 0.0)), ('sin', (1.0, 1.9999996812219631, 0.0)), ('cos', (1.0, 1.9999995494565384, 0.0)), ('sin', (1.0, 1.9999995560691723, 0.0)), ('cos', (1.0, 2.0000002723049484, 0.0)), ('cos', (1.0, 1.9999998464153848, 0.0)), ('sin', (1.0, 2.000000079728449, 0.0)), ('sin', (1.0, 2.000000191157122, 0.0)), ('sin', (1.0, 2.0000003665850756, 0.0)), ('cos', (1.0, 1.9999996585943272, 0.0)), ('cos', (1.0, 2.0000000720364026, 0.0)), ('cos', (1.0, 2.000000309465722, 0.0)), ('sin', (1.0, 1.9999996910246791, 0.0)), ('sin', (1.0, 1.9999999938673496, 0.0)), ('cos', (1.0, 2.0000001011868247, 0.0)), ('cos', (1.0, 2.000000132489927, 0.0)), ('sin', (1.0, 1.9999997703021004, 0.0)), ('cos', (1.0, 2.000000444462692, 0.0)), ('cos', (1.0, 1.9999998824814482, 0.0)), ('cos', (1.0, 2.000000211743965, 0.0)), ('cos', (1.0, 2.0000002794439986, 0.0)), ('cos', (1.0, 2.000000228800431, 0.0)), ('sin', (1.0, 2.0000001425569405, 0.0)), ('sin', (1.0, 1.9999998811919482, 0.0)), ('cos', (1.0, 2.000000272770588, 0.0)), ('cos', (1.0, 2.000000085676754, 0.0)), ('sin', (1.0, 1.9999998352470465, 0.0)), ('sin', (1.0, 2.000000150368664, 0.0)), ('sin', (1.0, 2.000000012531005, 0.0)), ('cos', (1.0, 1.999999814874923, 0.0)), ('sin', (1.0, 1.999999909342362, 0.0)), ('cos', (1.0, 2.0000000732761003, 0.0)), ('sin', (1.0, 2.0000004838329075, 0.0)), ('sin', (1.0, 2.0000003422460724, 0.0)), ('cos', (1.0, 2.0000000712730155, 0.0)), ('cos', (1.0, 1.9999997259964053, 0.0)), ('cos', (1.0, 2.0000003793496477, 0.0)), ('sin', (1.0, 2.000000413755044, 0.0)), ('cos', (1.0, 2.0000002804471553, 0.0)), ('cos', (1.0, 2.0000002532508514, 0.0)), ('cos', (1.0, 1.999999536656441, 0.0)), ('sin', (1.0, 1.99999996668002, 0.0)), ('sin', (1.0, 2.00000013086073, 0.0)), ('cos', (1.0, 1.9999996982671266, 0.0)), ('cos', (1.0, 1.999999588233956, 0.0)), ('sin', (1.0, 2.0000003418777617, 0.0)), ('cos', (1.0, 1.99999979855113, 0.0)), ('sin', (1.0, 1.9999996318424849, 0.0)), ('cos', (1.0, 1.9999996952266386, 0.0)), ('sin', (1.0, 1.9999997360681752, 0.0)), ('cos', (1.0, 1.9999995727926072, 0.0)), ('cos', (1.0, 1.9999999274344036, 0.0)), ('cos', (1.0, 1.9999997836616927, 0.0)), ('sin', (1.0, 2.000000247554315, 0.0)), ('cos', (1.0, 2.0000004718772724, 0.0)), ('cos', (1.0, 1.9999996240164348, 0.0)), ('sin', (1.0, 2.000000490716448, 0.0)), ('cos', (1.0, 1.999999713039374, 0.0)), ('cos', (1.0, 1.9999995270270345, 0.0)), ('sin', (1.0, 2.000000116221128, 0.0)), ('cos', (1.0, 2.0000003045671035, 0.0)), ('cos', (1.0, 2.000000153957805, 0.0)), ('cos', (1.0, 1.999999627329398, 0.0)), ('cos', (1.0, 1.9999997872053792, 0.0)), ('cos', (1.0, 2.0000004876048783, 0.0)), ('cos', (1.0, 2.0000003957524735, 0.0)), ('sin', (1.0, 1.9999999893498241, 0.0)), ('sin', (1.0, 2.000000461909886, 0.0)), ('sin', (1.0, 2.0000004646528247, 0.0)), ('cos', (1.0, 1.999999818832262, 0.0)), ('cos', (1.0, 1.999999824880807, 0.0)), ('cos', (1.0, 1.9999998506911256, 0.0)), ('cos', (1.0, 1.9999996362883574, 0.0)), ('cos', (1.0, 1.9999999142056777, 0.0)), ('cos', (1.0, 2.0000003528831924, 0.0)), ('sin', (1.0, 2.00000037302265, 0.0)), ('cos', (1.0, 1.9999997645926664, 0.0)), ('sin', (1.0, 2.000000270883186, 0.0)), ('cos', (1.0, 2.000000297442369, 0.0)), ('sin', (1.0, 2.0000004149064643, 0.0)), ('sin', (1.0, 2.0000001365322677, 0.0)), ('sin', (1.0, 1.9999996987294237, 0.0)), ('sin', (1.0, 2.0000003703625304, 0.0)), ('cos', (1.0, 1.999999728703328, 0.0)), ('sin', (1.0, 1.9999998381989812, 0.0)), ('sin', (1.0, 1.999999746430401, 0.0)), ('sin', (1.0, 1.9999995862206474, 0.0)), ('sin', (1.0, 2.000000007251827, 0.0)), ('sin', (1.0, 2.000000231276292, 0.0)), ('cos', (1.0, 2.0000003944201366, 0.0)), ('cos', (1.0, 1.9999999445977332, 0.0)), ('sin', (1.0, 1.9999998966540948, 0.0)), ('cos', (1.0, 2.000000252191726, 0.0)), ('sin', (1.0, 2.00000013292221, 0.0)), ('sin', (1.0, 2.0000002506784806, 0.0)), ('cos', (1.0, 1.9999998841867177, 0.0)), ('sin', (1.0, 2.0000004955502417, 0.0)), ('sin', (1.0, 2.000000113631264, 0.0)), ('cos', (1.0, 1.9999999907944428, 0.0)), ('sin', (1.0, 1.9999997676354988, 0.0)), ('cos', (1.0, 2.0000001255770483, 0.0)), ('sin', (1.0, 1.9999996515822935, 0.0)), ('cos', (1.0, 2.00000029318698, 0.0)), ('sin', (1.0, 1.9999997511477436, 0.0)), ('sin', (1.0, 1.999999840599821, 0.0)), ('cos', (1.0, 2.0000004306965073, 0.0)), ('sin', (1.0, 1.9999996297927676, 0.0)), ('sin', (1.0, 2.0000004917164675, 0.0)), ('sin', (1.0, 1.9999998190208237, 0.0)), ('cos', (1.0, 2.000000209575532, 0.0)), ('sin', (1.0, 1.9999996418305248, 0.0)), ('sin', (1.0, 2.000000375254934, 0.0)), ('cos', (1.0, 2.0000001241989507, 0.0)), ('cos', (1.0, 1.999999613943989, 0.0)), ('cos', (1.0, 1.9999995503751802, 0.0)), ('cos', (1.0, 2.0000003375966036, 0.0)), ('sin', (1.0, 2.0000002900100347, 0.0)), ('cos', (1.0, 2.000000197790045, 0.0)), ('cos', (1.0, 1.999999750215568, 0.0)), ('cos', (1.0, 2.000000003027364, 0.0)), ('sin', (1.0, 2.0000000946633985, 0.0)), ('cos', (1.0, 1.99999983164031, 0.0)), ('sin', (1.0, 2.0000001862313788, 0.0)), ('sin', (1.0, 2.0000003621306504, 0.0)), ('cos', (1.0, 1.9999997149004944, 0.0)), ('sin', (1.0, 1.9999999245217908, 0.0)), ('sin', (1.0, 1.99999992596207, 0.0)), ('cos', (1.0, 1.9999995399031691, 0.0)), ('sin', (1.0, 2.0000002951208224, 0.0)), ('cos', (1.0, 1.999999691150572, 0.0)), ('cos', (1.0, 1.9999998026501884, 0.0)), ('cos', (1.0, 2.0000000303668175, 0.0)), ('cos', (1.0, 1.9999997602635982, 0.0)), ('sin', (1.0, 1.9999995069939434, 0.0)), ('cos', (1.0, 2.0000002412427595, 0.0)), ('cos', (1.0, 1.9999996785214287, 0.0)), ('cos', (1.0, 1.9999996386828747, 0.0)), ('cos', (1.0, 1.9999998140397206, 0.0)), ('sin', (1.0, 2.000000459603227, 0.0)), ('cos', (1.0, 2.0000004755735814, 0.0)), ('sin', (1.0, 2.0000000206680366, 0.0)), ('cos', (1.0, 1.9999997630994246, 0.0)), ('cos', (1.0, 1.9999998013305134, 0.0)), ('sin', (1.0, 2.0000001835746963, 0.0)), ('cos', (1.0, 1.9999998148898643, 0.0)), ('sin', (1.0, 2.000000343729033, 0.0)), ('cos', (1.0, 1.9999995066916554, 0.0)), ('cos', (1.0, 2.0000002274222246, 0.0)), ('sin', (1.0, 1.999999681303394, 0.0)), ('sin', (1.0, 2.0000000378873946, 0.0)), ('sin', (1.0, 2.000000046894695, 0.0)), ('cos', (1.0, 1.9999998892343178, 0.0)), ('sin', (1.0, 1.9999997057642518, 0.0)), ('cos', (1.0, 1.999999989737769, 0.0)), ('sin', (1.0, 1.9999996217565024, 0.0)), ('cos', (1.0, 1.9999998152192469, 0.0)), ('sin', (1.0, 1.9999995862063014, 0.0)), ('sin', (1.0, 2.0000003499646835, 0.0)), ('cos', (1.0, 1.9999998921228048, 0.0)), ('cos', (1.0, 1.9999995037834433, 0.0)), ('cos', (1.0, 2.0000001952634583, 0.0)), ('sin', (1.0, 1.9999995520123843, 0.0)), ('sin', (1.0, 2.0000004367193647, 0.0)), ('sin', (1.0, 2.000000068013569, 0.0)), ('cos', (1.0, 2.0000001554462177, 0.0)), ('cos', (1.0, 1.9999999763660385, 0.0)), ('sin', (1.0, 1.999999756949766, 0.0)), ('sin', (1.0, 2.0000002837406394, 0.0)), ('cos', (1.0, 1.9999996225781385, 0.0)), ('cos', (1.0, 2.00000000708611, 0.0)), ('sin', (1.0, 2.0000004107094407, 0.0)), ('sin', (1.0, 1.9999998953490918, 0.0)), ('sin', (1.0, 1.9999996415621084, 0.0)), ('cos', (1.0, 2.0000001416113404, 0.0)), ('sin', (1.0, 2.000000498587677, 0.0)), ('cos', (1.0, 2.000000366532789, 0.0)), ('sin', (1.0, 1.9999996295541116, 0.0)), ('cos', (1.0, 2.0000000972604703, 0.0)), ('cos', (1.0, 2.0000000121329915, 0.0)), ('sin', (1.0, 1.9999996016424157, 0.0)), ('sin', (1.0, 1.9999997121965245, 0.0)), ('sin', (1.0, 1.9999995456660453, 0.0)), ('cos', (1.0, 2.000000112234579, 0.0)), ('cos', (1.0, 2.0000002216502724, 0.0)), ('cos', (1.0, 1.999999573991569, 0.0)), ('sin', (1.0, 1.9999998926906541, 0.0)), ('sin', (1.0, 1.9999998691385286, 0.0)), ('sin', (1.0, 2.00000000695277, 0.0)), ('sin', (1.0, 1.9999997326768295, 0.0)), ('sin', (1.0, 1.9999996297872602, 0.0)), ('cos', (1.0, 2.0000003864940683, 0.0)), ('sin', (1.0, 2.0000001286589217, 0.0)), ('sin', (1.0, 2.0000001612604508, 0.0)), ('sin', (1.0, 2.0000002987635725, 0.0)), ('sin', (1.0, 1.9999996380652254, 0.0)), ('cos', (1.0, 1.9999996710826038, 0.0)), ('sin', (1.0, 2.0000003637881143, 0.0)), ('sin', (1.0, 2.000000093595265, 0.0)), ('cos', (1.0, 2.0000003046746038, 0.0)), ('sin', (1.0, 1.9999999455033424, 0.0)), ('sin', (1.0, 1.9999998638702892, 0.0)), ('cos', (1.0, 1.9999999001201783, 0.0)), ('sin', (1.0, 1.9999999578315304, 0.0)), ('cos', (1.0, 2.0000000146178603, 0.0)), ('cos', (1.0, 1.999999854132207, 0.0)), ('cos', (1.0, 1.999999800765063, 0.0)), ('sin', (1.0, 2.0000002139223994, 0.0)), ('cos', (1.0, 1.9999999564928872, 0.0)), ('sin', (1.0, 2.0000000962291287, 0.0)), ('cos', (1.0, 1.9999997749380851, 0.0)), ('cos', (1.0, 2.0000002328067694, 0.0)), ('cos', (1.0, 2.0000001239208114, 0.0)), ('sin', (1.0, 2.0000001807259515, 0.0)), ('cos', (1.0, 2.0000002789508775, 0.0)), ('sin', (1.0, 1.9999995078041304, 0.0)), ('sin', (1.0, 1.9999999852165413, 0.0)), ('sin', (1.0, 1.9999998592938044, 0.0)), ('cos', (1.0, 2.0000002606480987, 0.0)), ('sin', (1.0, 2.0000002984673495, 0.0)), ('sin', (1.0, 2.000000221358679, 0.0)), ('sin', (1.0, 2.0000003143831275, 0.0)), ('cos', (1.0, 1.9999997076168223, 0.0)), ('sin', (1.0, 1.9999995102175867, 0.0)), ('sin', (1.0, 1.9999996846827057, 0.0)), ('cos', (1.0, 2.000000471415973, 0.0)), ('sin', (1.0, 1.99999995844176, 0.0)), ('sin', (1.0, 2.0000003690553467, 0.0)), ('sin', (1.0, 1.999999662288344, 0.0)), ('sin', (1.0, 2.0000000128365416, 0.0)), ('cos', (1.0, 2.0000003524788887, 0.0)), ('cos', (1.0, 2.0000002293749497, 0.0)), ('cos', (1.0, 2.000000322585637, 0.0)), ('sin', (1.0, 1.9999998554678344, 0.0)), ('cos', (1.0, 1.999999722122715, 0.0)), ('cos', (1.0, 1.9999996929917407, 0.0)), ('cos', (1.0, 2.000000283310476, 0.0)), ('cos', (1.0, 1.9999999218756177, 0.0)), ('sin', (1.0, 2.000000264402326, 0.0)), ('sin', (1.0, 2.00000047941989, 0.0)), ('cos', (1.0, 1.9999995251120781, 0.0)), ('cos', (1.0, 1.9999999956680292, 0.0)), ('cos', (1.0, 2.000000176598077, 0.0)), ('cos', (1.0, 1.9999996276456529, 0.0)), ('sin', (1.0, 1.9999997027196392, 0.0)), ('sin', (1.0, 1.9999995146875493, 0.0)), ('sin', (1.0, 2.0000000558485955, 0.0)), ('sin', (1.0, 2.0000000709519905, 0.0)), ('cos', (1.0, 2.0000002226946543, 0.0)), ('cos', (1.0, 2.000000492292392, 0.0)), ('cos', (1.0, 1.9999996731030083, 0.0)), ('cos', (1.0, 2.000000447309612, 0.0)), ('cos', (1.0, 2.0000004360943775, 0.0)), ('sin', (1.0, 1.9999996621080127, 0.0)), ('cos', (1.0, 2.0000000860498965, 0.0)), ('sin', (1.0, 1.9999995097407595, 0.0)), ('sin', (1.0, 1.9999996995044222, 0.0)), ('cos', (1.0, 1.9999997771125193, 0.0)), ('sin', (1.0, 2.00000021029978, 0.0)), ('cos', (1.0, 1.9999999444941923, 0.0)), ('cos', (1.0, 1.9999997876829263, 0.0)), ('sin', (1.0, 2.000000474119551, 0.0)), ('sin', (1.0, 2.000000348607097, 0.0)), ('sin', (1.0, 2.000000184289638, 0.0)), ('cos', (1.0, 2.000000244751524, 0.0)), ('sin', (1.0, 1.9999996238288782, 0.0)), ('cos', (1.0, 2.0000001927744746, 0.0)), ('sin', (1.0, 1.999999719589327, 0.0)), ('cos', (1.0, 1.9999998899477656, 0.0)), ('cos', (1.0, 1.9999996004560978, 0.0)), ('sin', (1.0, 2.0000000564322624, 0.0)), ('sin', (1.0, 2.0000000204516275, 0.0)), ('sin', (1.0, 2.0000003258197125, 0.0)), ('sin', (1.0, 2.0000001829575975, 0.0)), ('sin', (1.0, 1.9999996698240912, 0.0)), ('sin', (1.0, 2.0000000349259808, 0.0)), ('sin', (1.0, 1.9999998030293524, 0.0)), ('cos', (1.0, 2.0000000730847285, 0.0)), ('cos', (1.0, 2.000000482900844, 0.0)), ('cos', (1.0, 2.000000381757073, 0.0)), ('cos', (1.0, 2.000000054344731, 0.0)), ('cos', (1.0, 2.0000001923442543, 0.0)), ('cos', (1.0, 1.9999996607563055, 0.0)), ('sin', (1.0, 2.0000004193106697, 0.0)), ('cos', (1.0, 1.9999997525683972, 0.0)), ('sin', (1.0, 2.0000003861429194, 0.0)), ('sin', (1.0, 2.0000000636755875, 0.0)), ('cos', (1.0, 1.9999998320842294, 0.0)), ('sin', (1.0, 1.999999987462813, 0.0)), ('cos', (1.0, 1.9999995504764927, 0.0)), ('cos', (1.0, 1.9999997789935953, 0.0)), ('sin', (1.0, 1.9999999974886187, 0.0)), ('cos', (1.0, 1.999999561438968, 0.0)), ('cos', (1.0, 1.999999628778907, 0.0)), ('cos', (1.0, 1.999999863477483, 0.0)), ('cos', (1.0, 2.00000017579911, 0.0)), ('sin', (1.0, 1.9999995480322224, 0.0)), ('cos', (1.0, 1.9999998111297081, 0.0)), ('sin', (1.0, 2.00000023886952, 0.0)), ('cos', (1.0, 1.9999998902276914, 0.0)), ('cos', (1.0, 2.000000189421849, 0.0)), ('cos', (1.0, 1.9999996720844828, 0.0)), ('cos', (1.0, 1.9999998788744695, 0.0)), ('cos', (1.0, 1.9999999241015294, 0.0)), ('sin', (1.0, 1.999999831924017, 0.0)), ('sin', (1.0, 1.9999999080040178, 0.0)), ('cos', (1.0, 2.0000000401896783, 0.0)), ('cos', (1.0, 2.000000457907605, 0.0)), ('sin', (1.0, 2.0000002198827125, 0.0)), ('cos', (1.0, 2.000000427431843, 0.0)), ('cos', (1.0, 1.9999997690514402, 0.0)), ('cos', (1.0, 2.0000000855048548, 0.0)), ('sin', (1.0, 2.0000003046855372, 0.0)), ('cos', (1.0, 1.999999559311791, 0.0)), ('cos', (1.0, 1.9999995676186413, 0.0)), ('sin', (1.0, 1.999999858454064, 0.0)), ('sin', (1.0, 2.0000004583565816, 0.0)), ('sin', (1.0, 1.9999995134144464, 0.0)), ('sin', (1.0, 2.0000003156433497, 0.0)), ('cos', (1.0, 1.9999995169012559, 0.0)), ('cos', (1.0, 2.000000438264403, 0.0)), ('sin', (1.0, 1.9999998804449564, 0.0)), ('cos', (1.0, 2.000000092508785, 0.0)), ('cos', (1.0, 1.999999970489743, 0.0)), ('cos', (1.0, 1.9999996634643915, 0.0)), ('sin', (1.0, 2.0000001931858837, 0.0)), ('cos', (1.0, 2.0000001813352086, 0.0)), ('sin', (1.0, 2.000000486700722, 0.0)), ('cos', (1.0, 1.999999830514962, 0.0)), ('cos', (1.0, 2.0000002730537463, 0.0)), ('cos', (1.0, 1.9999998553862381, 0.0)), ('sin', (1.0, 2.0000003254728047, 0.0)), ('sin', (1.0, 1.9999999097596113, 0.0)), ('sin', (1.0, 2.0000003401925706, 0.0)), ('cos', (1.0, 2.0000000216346754, 0.0)), ('sin', (1.0, 2.0000000758293255, 0.0)), ('cos', (1.0, 2.000000291449805, 0.0)), ('sin', (1.0, 2.0000002359042877, 0.0)), ('sin', (1.0, 2.000000277575609, 0.0)), ('sin', (1.0, 2.00000005940148, 0.0)), ('sin', (1.0, 1.9999996473925432, 0.0)), ('sin', (1.0, 1.999999993591153, 0.0)), ('cos', (1.0, 1.9999995912528223, 0.0)), ('sin', (1.0, 1.9999996774221345, 0.0)), ('cos', (1.0, 1.9999995363912393, 0.0)), ('sin', (1.0, 1.9999998820438527, 0.0)), ('sin', (1.0, 1.9999999163680686, 0.0)), ('cos', (1.0, 2.000000014140193, 0.0)), ('sin', (1.0, 2.000000186376365, 0.0)), ('cos', (1.0, 2.0000001843395103, 0.0)), ('sin', (1.0, 1.9999997770110653, 0.0)), ('cos', (1.0, 2.00000020260657, 0.0))])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "global_var.tensor_cache.memory_default['numpy'].keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "73e8f424", + "metadata": {}, + "outputs": [], + "source": [ + "nn = deepcopy(global_var.solution_guess_nn)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "1dfb4177", + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "with open(r\"/home/maslyaev/Documents/EPDE/examples/saved_objs/well_enough_ann.pickle\", \"wb\") as output_file:\n", + " pickle.dump(nn, output_file)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "547b7391", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "6104fe69", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAGeCAYAAACpVGq5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAACE+0lEQVR4nO29e3wU9b3//9pkyXJNwi0ESRCCykXkCEm0euq1gNpa21NBbCMV9RTEKj1fzylI2h6/hR4xnPb0tNVvAVvBJCgCvSjqrwoW6w3C7gZF8Z6QEEXlmgQlCZd8fn+888nObmZ3Z3fn8pnN+/l47GNmZ2Z3PrP7mc+8P++rRwghwDAMwzAM4wAZTjeAYRiGYZjeCwsiDMMwDMM4BgsiDMMwDMM4BgsiDMMwDMM4BgsiDMMwDMM4BgsiDMMwDMM4BgsiDMMwDMM4BgsiDMMwDMM4htfpBsSjs7MTBw4cwKBBg+DxeJxuDsMwDMMwBhBC4Pjx4zjrrLOQkRFd76G8IHLgwAEUFhY63QyGYRiGYZKgqakJBQUFUfcrL4gMGjQIAF1Idna2w61hGIZhGMYIra2tKCws7H6OR0N5QUSaY7Kzs1kQYRiGYRiXEc+tgp1VGYZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhXEwgAFx9NVBZSctAwOkWMUxisCDCMAzjMqTwEQiQALJ9O/A//0PLqqrw/QyjOsoXvWMYhmHCeeghEjqWLQNefZW27dlDy6oqYN8+2v/ww8Datc61k2GM4BFCCKcbEYvW1lbk5OSgpaWFq+8yDNNraWwEDh8GPB6guNj454JBQAhg2DDg7LOtax/DRGL0+c0aEYZhGBcwZkxyn9MKLWpPO5neCvuIMAzDuICZM539PMNYBWtEGIZhFEVrjtm9O7XveuMNoLaWzTSMerAgwjAMoyhGzTEDBpCA4fEAX36pf8yhQ2ymYdSETTMMwzCKsmwZCRd6ZGSQoDJiBPDmm8Dx47TMz6ftGRGjuxQ8vF6gutrKVjNMYrBGhGEYRlEOHYquufD7galTgZMnAZ+Pto0bBzQ0AFlZZMrRi66pqQGmTbOsyQyTMKwRYRiGUYjGRgq5ra3V11xoNSQeT0gIkfh80bUoALBgASc6Y9SCNSIMwzAKEc8vRAgyv+TlxT4uL4+OGzkSeP994MQJ0pQEApT0rKTEtCYzTEqwRoRhGEYhli2Lvs/rJSGioQEoKIj9PQUFdNyf/wzcfjttO32alhs2kMYlGCQNDMM4CQsiDMMwCnHoUPR9NTXALbf0NMdEw+cDxo6llPAA0NkZOkdxMWlFkk2UpjqBQABXX301AoFA2DqjHiyIMAzDOIzWL2T9+p77Y/l8xKO6mjQpWtItgkZP6PjlL3+J7du3o6qqCpWVld3rLJSoB/uIMAzDOIxZfiF6lJUBEyemdwSNFDQefvhhtLW1Yfv27fB1qY2qqqq6j1u/fj0+//zz7mN/+MMfYvHixVi5ciVK2GnGMVgjwjAM4zCx8oUk4hfSm9iyZQtKS0tRXV2Nxx9/HACwbt06PPnkkwCAjo4OAMCxY8dw7NgxAMCRI0e6969bt65ba/Lwww87cAWMhAURhmEYh4mVLyRRvxA9ZATN5MmhbcOHJ6dhUYUbbrgBgUAAc+fOxZEjR5L6Dq1QUl1djdLSUmzZssXMZjIGsF0QmTFjht2nZBiGUQ6tX0jX8zCMVPxCIpERNHv2AJdeSttuvBH4/vfdlVNEqwUZOHCgqd89d+5cBAKBbgGH/UjswyOEfRUHNm/ejNmzZyORU7a2tiInJwctLS3Izs62sHUMwzD2YUTQyM+nDKpmmmTWrQNuuw3IyQFaWoBFi4Df/Ma877cSj5nSWRSys7Nx3XXX4cknn8S8efOwdu1ay8+Zrhh9ftumEWlubsbRo0ftOh3DMIzS6EWzSKzyC2lsBM45h4rktbTQNtVziqSqBcnMzITX68Xdd99t6PjW1tYwk01tbS2CwSAaVfxx0gTbNCJr1qzBTTfdhMGDB7NGhFGKQABYvBhYuZLey3V2omesproamDu35/Zg0JpoFj2FgscT7p+iWlVeo1qQu+++G6tXr4YQAvfffz+eeuopfPzxx3jllVdQUFCAw4cPo7S0FKNGjcI111yDBx54IOG22GhASAuU0ohs27YN06dPt+NUDGOIQAC4+mpaVlYC27fTDFS7zjBWE+kXGVkx12zcklOksbERwWAQtbW1GDRokKHP3HbbbWhtbcXx48fx05/+FLt27UJDQwPOOecc9O3bFwUFBWhoaIDf78fChQuRn5+P4uJilJeXG/r+mTNnpnJJTAxs0Yhs3rwZs2bNQnNzc1yNSEdHR3fYFUASVWFhIWtEGFOZNw947DHg5puBrVuBI0eAwYNp37FjZD8/91yyn19+OXD22Y42l0kjGhuBw4dJE3HFFcAXX9D2f/s34IUXKIKmtta6UN3aWv2cIlZpYZLBqBakvLwcW7duRVNTE/x+PwoS+NE6OjqQlZWF3bt3o7i4GBkZGeiUqWd1GDx4MMaNG4dFixbh8ssvx9k8KMTFsEVDWMzq1au7148dOybinfL+++8XAHq8WlparG4qk+Y8/bQQJSVCVFUJQfNA4y+GMQun+1swqH/OYNDa8ybCzJkzdZ8Dka9gMCg6OztFe3t70udqamoS+fn5orS0VJSXlxs6rw2PzrSgpaXF0PPbUo1IbW0tAGBal5jNGhHGSZJ1uF++HLjuOmDYMNaMMKmzfj1p5GQBOi1eL0W1lJVZd/6PPwZKS8lhta4O6NsXyM01PzonURobG3H48GF4PB5ce+21OBSl6E4qWpBoSO3IJ598gtLSUvTt2xf79++PqiFZvnw5rrvuOgwbNow1IzEwqhGxVBDZtm1btzACAHV1dVizZg0qKipQVFSEWbNmxf0OdlZlUmHLFspaedNNwJIlqTvisa8akyqBAIXPvv12z312mUc6OoC2Nkpqdvo0sHcvMGmS9eeNhVFzTDAYxNSpU3Hy5MnuNO5mEmmyiYeFj1DXY/T5bWmtmenTp4c5qdbW1mLNmjVYvHixladlmG5uuIGWZuQlYl81xgwee6ynEJKREaqMawc+H70uu4ycs7dtc14Qqa6uxty5c3Uf7BkZGTj77LPR1taGvLw8eDweS4QQAAl9LzuwmoNteUQ2b96MFStWAACWLFmCbdu22XVqppehzVg5YICxz9x9N9CnT+yohTfeUDvfAqMu2j65bl1o+znnAEuXUur1ZIvapcI3vkHLJ54IRZHZjYyQmThxIvr27at7jN/vR11dHRoaGkwxxRghLy8P+fn5mBRDQnvjjTc4NbwJ2JpZNRnYNMMkilFfkPJy4PnnyWYeCJAPyO7doRTY8VD7zmFUwkifbG9PrZ5MMrz3HlXmlRoZJ7KsGjHJBIPBbl9DO+no6MDbb79tuDKv4o9T21EqjwjD2MmyZcaOu/FGctBrbCQnvb59gcJCmpkWFcWuhqpKvgXGHcTLolpdbb8Q0thIocOjRoXMQk5kWY1n3sjJyUGeQ9X5fD4fRowYgfz8fBQVFSEjhso0Ozubs7AmiaU+IgzjBFGc7QGQFmTrVqCpidTgHk/4A0AWB8vKIu2Inq/azp362xkmGuPHAxMm6Duo1tQ4k79jzJie2w4dCu/bVk3wtREyu3fvjnpcIBDA5MmTLfMHMYJMhBbPgfX48eNh+1g7YhwWRJi0QJskKlZW1BtvBH7xC+Dkyegz0MjtkY6E3/kO8Kc/cQp4xjiVlc47qEZSXd0zjFibZVXry2I2Y/SkIB2sdEpNhMg2eDyeHoKG9j07sSYGm2aYtGDMGBIMiouB5mb9Y6QzYKQWJBp5efSZ4mJg1apQjoX9+4Hf/96sljPpitZBdf360PaxY511UJWUlZE2Ro+aGmtzmSxbtiyqb0hGRgbGjh2L/Px8x0wy0ZAOrCUlJTFTw7/xxhtspkkAdlZl0oL164FbbwXOnOm5z+sF1q4FZs9O3A7f0QF8+imlgBcCuOQSmkEOHAj84x+0jROdMXqo6qCqJTLduyyAZ3U+k0WLFuF3v/ud7j6r84SkipE8I5EaE5Ufs9qin2ZreZXII8IwdlFWBqxeDbzySs99qdjgfT6awUbyxRf22NIZ96Jn+pBI04fTz1mp9Tt5Ejh6FDjrLBLmrVBEaP1CNmzY0GO/9uGtiklGD9kuqR3p378/9u3bF1XwWGbUe94htIU+nTI3s2mGcTVS/f3SS8Crr4bvSzaleyRGIh4YJpKyMmDHDv19Vps+jCKds3/2M3o/dSq9N5yqQ1vGWruuw5gxY1BSUoLi4mLd9O1CCCXNMdGQTqwfffQRAjESsBw+fNjGVhlDazZ88kna5kTElIQ1IoyrieXzJiclqY5rZWWUa0FPC+tUxAOjPoEAmQO1OO2gqofPB3zlK7S+cydFjMVEq8vXTqeFoPVf/hI4eLCHrr+6uhrz5s3DaR0Vkdfrxdq1azF79mxlNSF6GGnrhg0bcOutt0IIoUxtGu24KSdsdkVM6cEaEcbVxNJ6ZmaSb4iZiRgj0wio9lBh1OGRR0i7AFB+mlWraKB30kE1GlOnkgBy+DBQXx/n4IceIoHjV78KqQMrK0Phak89FdqvmVqPHz8eF1xwge5X1tTU4JZbbnGVEKJFmmn0OHjwIIqLi1FSUmI4WshqtFpeKXBoI6bs1vKyIMK4mk8/jb5v1y6y0ZuBNoLmt78lIQcIPWgYBghXecvncmYmsHEjKQc2bkzQ9GETPh8JIwBpRXqwZQuV7K2upmI5AOnyjx2j9ebmULhae3to/5gx3br+ysrKHjlDYiUIcxPSTFNVVQVvNDsu1AnrdTJiSg82zTCuQ5szRI6JWqTnv5loE515PKGaIT/6USh0mGH0JrxnzlC0lURVx+avfIUeQjt36jyIZPXIuXMT/+Kum6MyJ6d709lnn43vfe97ePbZZ3Hw4EHX+IXEwufz4ZZbbsGkSZOiRtPIsF6nzTSBALBwYfg2K8ZNo7AgwriOeNpNIaxRf2u1xnPnkiBy4AAJQyyIMICxSBlV+cpXqM5Mt0ZEK/FnZwOtrUl/998AtLS0dL9vbGzsLoLa3t7uWpNMokgzjcSpsN7KShJGpM/StddSigKZcdpuWBBhXEd1tTU5Q4wgx+ZBg0IziPXrgdtu45wiDKVyHz8e2Lu35z7VHZulw2owSBFoX71sjGnfPbXr5QFwGMB+kIPqunXr0k4I0Yb1NjQ0oFPHkczj8aAqVgpoC9DKlTJSprOTxsx/+zfqtyNHOhNSzgnNGNfh9wMzZ+pnULU6EZORkGC17yjGShYtAiLzdMlZp9V9M1WEoER9J04As2YBm1qvAV54If4H776bkvh0durPDgB0Itwh0QPnKuraQbykZ4MHD8a2bdtsNdFox65oZhizxy6uvsukLffeGxJC5M1ll88b5xRhItE6qGr//3Hj1EjlHg/Z/t27gdOn6En0t2fPoNZ/BkFMQyNG9/xQeTl5bo8YASxZQmab994D8vNxqrAQIuKGlO86AfzM2stRAp/PFzWFPQAcO3bM9kgavUgZidNjF5tmGFcg1YoffxyeuGziROBb3wKefZZSF1g92HNOESaSaM+Rujqgyw3C8VTusQhvvwDgwRdtGShu26bZGvFQ1aseec45QEMDsvr2xVQAtTrnygAwDHBV4rJUyMvL69YIRMOuSJqyMhJA9PyNnR67WCPCuAIZmfLtb4dvf+cdGuz37LE/LDJNIg+ZFFm2LLrJTs40VRVCgEgtn+zUdEFenEI1ukJoMjKo3kGs6pE+H6qrq+Htim+XhhrtBHx+djYa//IXFHz+uf0pPG2moKAAn3/+eczMq3YWyHvmmfD3ZmWfThXWiDCuYNky4D//U3+f3XU7ZE6RwkJgwABKL9+vn7qqd8ZaDh2Kblt3eqZphJhaPlyMaejK/eH3U7IRrRZEh/Hjx2P4+efj0z17MLJrm/Z516+11R3xzCYRz0xjdSSN1kl1W0jJhXHj6G88dMj5sYsFEcYV6JSm6MbuwV6bU+SZZ0gQGTlSvSRVjHXoRSBocTInQyp40AkRqSjX5qXX04JEUFlZief27MEYALMArAPQR+9A1eOZTcRIJA1gjZkmlulQ4vTYxYIIoyzawf7xx3vud3Kwl2PxlVfSeFpfT6+iImfaw9iLU7lsLKGxEXmftSB/6EQUNr+Fg2eGoBFjkOtpRt5dNwGvnInrgKVXWfckgOeHDsXqG27A3WvX9vzQhRdSzGgvQGZejRVJA1iT8MwVuW2E4rS0tAgAoqWlxemmMDZDw3nsV36+EE1NzrbzssuoLeedJ4Tfn+CH/X4hrroqiQ8yTlJdLYTXq98nvV4hqqqEaG93upUG6Wp4O7JEJyBuwx8FIMRPsCx0UXEuBuQGovua2vUdnRkZPX+sRYtsukh1CAaDMX8v7cu8c+r31WDQtFPoYvT5ze52jLLEC5WtqlKjbofUpn7wQai+SEy05dK11UvjlFFn1KGsjBLn6VFTA9xyi9oOqmFUVwOZmfDhJDwAJuNtAMC7mGTY27a6ujpqjZWjmZloy8mBZ/Jkqomg9fJ2sva8Q0gzTVFRUcxaO2aZaQIBYP788G3KOdpbKw+lDmtEei9+vxAlJc5I8kZoaBAiEBBi7dpQu4YPp7YFArS/G63m49Zb6eDZs4XIyQl9cM4cWp83z5kLYhJC/l3y5fGo0zcTZtas7gt5AdMFIMS5eD+hi6msrNSd1QeDQdKoGFFx9hLa29tFZ2dnTO1IXl6eCAaDIhAIiIawwSQx7rkn9PNmZgrx3/8tRGmpPdpko89v9hFhlEXWQ9Ci9ZtzGj0/gUOHwqMPun1YtKXTu2zo2LQp/IPS63HdOuBrX6PCH//5n8A3v2lB65lk0PotPfdcaPsPfgC89poaEQiG0V7Ms892b74AbwEAPsI5aGt/E/0Mft3q1avD3mdkZIScMn0+lzgr2IORtPapRNNo/9onnght/4//AK66ijLnOpXOXQ9O8c4ohfYGuu468pEDKDz2Rz+iwf/gQYokdNoks359nHH1l4dR9tVGupgozmmGUPsW7VWkVYr/KBcjAOThIA5jOALPfo7ir4+I+hXSSbW9vR2XX345Ojs74fF4sHTpUvz1r3/FoUOHUFtbiwJ5s9bW6t8Lque/t4iPP/4YpaWlhqJpnn/+ecPfq0o/Nfz8tlYxkzpsmuldGNHequQEGNMJzMjFxHsNGED2qcrKCFuPufj9fnHVVVcJv9+vu/7YY4/p7u9txHNSra52uoUJEONirsR2AZDZMRZI1OFS3jDSjiVfJSW91mHbCjONKv2UnVUZV+LWLJW6bTbD2ezLL8k+9f3vx48ZTZBAIICrr74agUAAlZWV2L59O6qqqnTX/+d//kd3v/Y7egOyuq4eNTXkxOoaysqAzZt1d10wZxIA4K23Yn/FsmXLou7zer2ojixgIrMBlpQA11xD2zIyqI/bXI1WFeIlPAOAQ4cOJVSbZvx4YMoU/X1K9lN75KLkYY1I70LrWKWig2okTU3k9DV5MrUxw9MpRgw5KZqe2yNEbq4xrcfttxs7bvlyHS/Y5Ln11lsFAHHzzTeLoUOHCgAiJydH9OvXTwAQPp9P+Hy+sJnZwIEDRU5OjgAghg4dKubMmSMAiHm9xMH27rt7/i0yKlXF/hkTv1+IIUN0L2bNTxoEIMTgwbEVFXfffXfUWXww2g/S3i7Evn1C/H//X/i0PS8viqd376CpqUnk5+eLoqIikZGREfV3nTlzZtzv0htHneinRp/fLIgwjiOjT4JBGosibyDVoxHa24U4fToUALMTpfGFivJyIYqLhRgxQohdu0iaKS6m7UaEkiR5+umnRUlJiaiqqjKkVk/kVVVVJUpKSsTTTz9t3o+rANr+OWhQ6C845xwhli4VYsoUNfLZJMx3vxu6mEmThFi1qjucYsdTn3fvikz10dDQIAKBgAgGgyI7O7tHP/B4PLEFESH0+3SkuaYXkoqZRttPtfLlqFHO9VMWRBjXYOS564aBfvp0ausqzNe/iIwMIcaMIeGjqUmIzs6Qw0t7O72P5nSifS1bllC7tD4dZgsf0V7p5EdipH+q5LcUE+3TKiuLGu/z0bZAQDS8sl8EXu8QL78curZhw8IVFUb+//z8fNEU64ZVxYlBUZJJeqZiP2VBhHENaZGlsqFB/OT2AwIQ4jY8qn8xwWC48KGHtPVMmhR9NEkwG6XWBDNgwADLhZDs7Oy0Mtmk1TMzzpPKyMOsurpaeL1e3f/e6/WKqqoq0W7khnUq3acL0JpppIZJT/NUrel8KvZTFkQYV1FV5fIxCRBP4ZsCEOJ8vJXaxbS30/QTOupqwJAtXas+t0sLEu3ldpON3y/EhAku75+SOE+r6oWvxn2Y+f1+cf755+v+1zHNMZFEi6Bx3Y9qDfHMNLm5uT3urWeeUaufsiDCuIrrrw+/cVznADhzpvgUI2hcxRlxGV4SfhSHLignJzHbktSMGJmi6pCMwHD33XeLPn36CK/XK+677z7Rp08fkZmZKQYOHCj69+8vBg4cKLxer/B6vTGdFI2okd3ED3/Y8yd3Xf/UsmlTzKdVPEWF3n8vnSsTEkRkH586VYg+fegk2dlCXHpprw3l1cPoZGLXLvr5VOqnLIgwyiPN1X5/+CTtvvvIMjF8uOJ+IVp7e1eEzGg0dF/HIvwvrQQCydmW2ttJVZSEvrW6utrQ4FVeXi6Ki4vFiBEjRFNTk2hraxNtbW1CCNG93t7eLs6cOSPa29u7t0nVcXFxsSgvLzd0ruzsbNdoR7R/7cCBoZ/8vPNc7qDq9wsxdGjMp1U0RUV19d4eDqrnnHOOWLp0qZgyZUp8vxA9pG/U3Ll0EmmS7IXF8KLR1NTUHakW6966+OLmrv+qU0yeHOZ77Fg/ZUGEUZ4kJ/vqoGloA0aLAKaJr+GF7s15+EwEMVUEqt9JLRox2hRVJwmUjIpZs2aN6NOnT9SBSxvZ0NnZacymH0Gk6jhWyKHbtCNG+qbyfkt6yDpHgBATJ+o+raSiYvx4OqxvXyGAAwIYFfP/TKYPCSFI6lu1Klz66eWhvJG0t7eLQCCg87uPFsA0AVwigDNdf+1x6Xss9u1ztp+yIMIoz7Jl0Qd5VzgBVldTFSlA9xo83QNDikKVFEQMlFE3qgUpLS1Nbgarg9SOlJaWGtaODBgwQJSUlIjKysqUCnpZhYqOf0mjVe/060cX0aePiPW0am8XorExdL1/+MMTMR1Uq1P5QXRvHg7ljUQK/OHOq3o/X6cyPx0LIozy3HVXdEHEFbZ3v1+I0aOFAEQ1vie8OGnNg0tOUadMIbtV//6hL8/LEweeeUa8U1Ul3nrmGTFw4MCYAkB2drZoampKWgsSDakdScZko6J2xO8nM4xr+6aWaDdZnKfVmTMhueXPf94jJk6cqPvfJeQXokdaSX3WoS/wf08gyrgDnBL33utsZ2VBhFES7eRM+zyNnAi5YrAvKwtreBBTrXtwGSyjHuthHwgETBU+ojc13GQTLfxQvpYvX55yqXOz0ROSnXb8S5oUHvQXXECHfeMbD/f435JyUI0Gh/IaQj+SZkmU4WCq40I+15phlGTMGCozUVwMnDjRc78QVIpC2VLqjY1UKXTXLuDJJ0PbJ0wAbru9640AYKwCpmFkGXWvV3f3KQDRykfIOhYej8dQ+fFUkbUz8vLykJ+fj5KSEpSXl0c9/mc/+5nhGhpWIv/a2lrgscdC28ePB5YuBSZPVrxvRqOsDHjxRf19MQqPNDY2YvjwYwCArVsbu7efd955WLp0KSZPnoz8/HzkWfGDmHrzpA+R99ZZZ30LwM+69srKvWe6j8/OzkZ1dTVKS0uxZcsWu5trHJsEo6RhjUh6Ec8vRPnkZTG0EU0YJfJxQAwfRjba/HwLPNajzBynRtE2mO0PkihGUlbLl5EaGlZiQOGkdt+Mht9Peb4TVO/Q//JfXR/pqREBUnBQjUSaHwsLqV0DB7o0LMk+2tvbxbe/Lf1BOgUQFMB8AdSIaM7FdsMaEUZJDh2Kvq+mBrjlFjWr63YTo9pogfdzNKx9Cf/1AM3mJk4EGhqAggLzm3Em4v0qAMU6x914442oqalBQ0MDCqxoSBwiZ3CTJk2Keuzu3bsdnb3FUDgpXfk5Lo8+CnzyCa2PHQusWkUqyTjqHaqs+2HXu3PD9snKuqZp2AoK6GbZsYO0IV98Abz8sjU3j8uRmru9e3147jmpOWoD8AMAAQCzAYwB8EnY55TWjtgkGCUNa0Tcj9YvJDLhDtzmF2KgPPCePfR20CAqhmcqXTPH97KzxQNd5+3sWv6vxnY/duxYx7Qg0YgegqiGA6vfHz2zviv6pha9Sn0ZGULs2GE4rvOee+4RFBYqBNAQ9t+Y4hcSjSuvpPaOG8eJzXTQH37ORLxX4/4y+vyOIv8zjHnEM/27wi/k8GGaqT3xRM/9Hk/XvU9MmgQMGAAcPw689x5w/vlmNaMRhw8fRsZf/oI7vv51eADMREgTcjOAxwCsr6zEhH/+Z5wcOdIWnxCj+Hw+jBgxAvn5+ejfvz8aGhrQ2dmpe2x2djZqa2shhMCwYcNw9tlnW96+ykrgnXfCt2VkAFGaqDZ6N11nJ3DJJaH3mj4rkX3M4/Fgw4YNkP5OwNkA+sLj6YDQ+Zyp3Hwz8NJLQF0dUFVFTmVMN9XVwLx5wOnT2q1k3MjMFDhz5hZ4PJ6Y/9OAAQNQWlqKRYsW4fLLL7fl/oqJHVJRKrBGxP24vqidEeeBCHv25ZfT5kcfNbMZmtmMzutM5DZFScRvpPt6LUKrOMjJCf1055zj8gyqSUbK6P/+R7s+OlkABirrJov8M7ZtC7V1yBBObKbDrl36f+1zz32WcE4fAJZVylYqfLeiokJUVFSI+fPni/nz5yf0WRZE3I/fT0kcXavyTkKS+vGPafeCBeY1Y9myZd2hsN8DxMkoAkmnS3IvGA3vtdKJ1YiMqbSQHIskQmL1K+vuFIAQGRmzjFfWTQYjfwYjhBDioYei+x4nGj7/la98RQDWVMpWxll1yZIlWLx4MRYvXozVq1cDAGbMmGH1aRmFeOwx4N13w7dluMlNuqyMPGn1iOJhe9FFtNy1y7xmHDp0qFvd+jiAi6Mc54kRkqkSRsN733jjDcuc7NLWQTUQoH6pxcBNN378eEyZMiVi6wcAgLvu+l/ccsst1pn7jPwZDAIB4P77aT0vr6fvcaLh8zt37gQArFu3DrW1tQgGg2hsbIx6vCWYLgJpOHbsmJg+fbo4duxY9zYppdXV1Rn6DtaIuJNoDqrnnutClbffL8T550efguiwfz/tzswU4sSJ5E/d0NAgAoGACAaDYtiwYWEzmaldbTkdOWvUqUGjKk6bafx+IYqLE1YcqI/WqXrIEMMV0MhBNfz39nh+JgAhhg49aX234sRmcfnXfw39LLt307bOTn3NXTL3l5n3mTLOqoFAAPX19Zg2bRoAoKioCADQ3Nxs9akZB4nmoPrhh8CKFbTe3u6S2WZlJbB3L60PHkwX8Mc/Ak1NUT1sCwqAoUOBI0eAxx8H7rgjuVPHSvJ1EMCnAD7zejH+mmvQ/9lnyXE2EHCNk5+cXcvZm91OrJWVFAqpxbUOqlqn6scfD23/zW8olvyaa4CRI3vcdFoH1fXr13dvHzVqFL7//e9j/foO7N8PHDnSx75u5do/wRq0f+2GDbTN66WfKBgEhg0D9G6FyPtryJAheCfSIzsCr9eLdevWmXwFcTBF7EmATZs2CQBhWhIt7e3toqWlpfvV1NRkjUbE7xfiqqtcM3N0G8uW9axbpXWrUN6FQavSyc0NNX7zZsPhj2PH0kcuuyz5ZsycOTPqjMXr9Yr1jz4q2t97T4iXXw5XPbmweqld2hHtXzt8eLjiwHXaOi1J+lhE/31lZdfv2tetZGKz0lIh/uVf6KRZWS78M8wlyb82DG34fKxK2WaGZivlrKpl2rRpYvXq1VH333///bo/jumCiFRd3nOPud/LCCEMpdtQmyTvfL1aOn37JjZ4a80xeXl58QcMvba5uHqp1U6sRv5aVzqoJhkpU11dLTIzM3V+Y4d8R9vbydbwzjshQaS11aKTuQOz6gLGqpRtau2gLpQURBYvXiwqKipiHmOpRkQ+JQIBKoMN0GzXZTNHVYk204x8NrpCEEk6/DH1wduoNqB7wEiz6qWxBkvta/DgwaKkpERUVlYmVDAvzX6ucJL0sbj66qt1fuPviczMTud+p85OcioDyEerl2uvzXKf0auUvWrVKktKQSgniGzatCmmJiQapjqrOibi9w6M/LyuUXn7/UJMnZrwnW/GQy76DDVG1tQ0c/Kz2kxTVZVWP1eIQCD8gmI4VUvN28svv9xDVS+1UdXV7zj7O/37v4dOumiRTSdVE7/f8F9rGHmfCSFEZ2en6aHZyoTvAsC2bdsAAPPnzwdAjqr19fV2nDocDg+zlGXLohfN9HrJf9Kq2iumU1kJ7N4dvs1A+GO8SF8jUbVlZWW48cYbdff5/X7U1dVFrx2TJlVLI0MQi4qKkBHj91++fHlCYYeRUcCuCiePhjZkNyMD+PWvY9aUGTNmDEpKSnD55Zf3cA4WQgAAhgwZErbdtu4lC6pMnBja9sQTVBo5GKT9vYy//52WGRnAb39rqFxQXOR9BthXnVsXU8UfHYLBoKioqBB1dXXdr4qKiqjOqpGYHr4bbeboopBHVXG9X4jWtjRkSKjxo0cn5MUou1ikm0a830DrG9K/f3/dGWpU+6108ispCaUIHThQiEsvdX2/TlQ7Eg298iuAED/6EdWYGT7cJdq6aGhvwB/9iLZFi+sUlCAv2m+YmZkp1q5d292tRo6krx0+3CatJmuvezB5Ml32xIn0PsZfqwxKmGaOHTsmcnNzU1KjWiaISL2W9tXLVX/JoB3c8/J6/qSu8gsxMvgZuPO1jv+jR9PHcnPjD95GHrIx7bfSyW/p0pAAlUb92oggEsuBNS2fbdGE56efjuv3dsstt0T9HbUCb3u7EI88Ql977bU2PfzS2pHHOPLv3bkzNJa6ya1RCUHEDEwXRORTYsoUIebODe/geXlkPC4poRuZiYuRwd01fiEmDn5SJpgzhz7+4IPxP2NkhhqXhgYh/vKXnv3aLSNXDKRz3aRJk6L+TsOHDxdVVVWipKREPB1xD6flsy1B6Upq3V577TWdVO7RNW8vvkhfNX68jdeWZn5PyeB24ZkFkVi0t+v/oy4OeXQK1xe0i8RkL8af/pQ+fued8Y/Vy2qpN0ONSZr3a20uhGTMNJEOf65/tiUoXRn5zfQiJ+rr6St9PiHOnLHp2pK1caYRbheelXJWVQ6fT99xVYjQenZ2r3aMMkpZGbB2rf6+KGVY1CYyo2CKXoznnEPLDz/U39/Y2IhgMIja2lo89thjPfZ7EvUOjNWv08Ah2+fzYcSIEYYcWLOzs8Nq1AQCwHe/G36M651UE/SOnjlzZtSv8nq9qKqq0nWELiwEMjOBjg7gs89SbrUx8vLIG7OkBLjgAto2cGBq3pkuo6wMePVV/X0uKSllCMtTvCtLWRl5ZBcX6+8/fjx8n1ZIYcJ44onw967LzqzNn/yPf9A2jwdYuhT461+BQ4eSHvykIPLRR/r7Y6VwBwAhBPLz85Fn9Pyx+nVNDdBVasHNFBQUoKGhAVlZWdi9ezeKo9zDra2tmDt3LgDghhtuwN13i+7/IT8f+L//N26mfnewZ0/4+4gbUJvCfVeMKow1NTXdpTgi8XpJGGloAPbtA846y4yGx6GggE6YlQX84Q/A/PnAhAkuCbszj0Ag/L3rxlcDuH0+YB1pNIu0AhldV1sLbN1K2+Sze8IEYPhwFw3uY8bQrKu4GDh9mrYJATzwAPDOOySIJDn4nXsuLffvp9o6kSxbtizqZ2PNUHs72rBDIJbmaDSAaRgw4DKsW9f130LgscfoL9+40UUh5dF49FFaZmQADz3UI65ThukWFxenVONr7Fha7tuXYnsTweejgeWb36T3gQDwz//c8+mcxnQVx8Xw4T0r7aYLvVcjAoRUfwUFNFNcs6bnMWkyizQbvYm8EKGCdoCLBvfqauDWW4EzZ3ru83p7mmsSYPhwYNAgUrDt2xeeFgEADh06FPWzsWaoMZH9euBAUsX06QMMGZJeI1cXMs9IYWEhZsyYgQceeCDiCDKrfvklQC4QxDXXhI5wpbJTavEAYMcOWvbrB1xyCXDxxWRa7roBZ86ciRdeeEH3azIyMnD22Wejra0trtZt7Fhg+3abBRFJfj5dV00N8PrrrinqmCo1NSQsA5Ta6NprSTF08qTLTN5x6N2CiFb1t3s3CSIeT/jItGAB8Pvf94pOnwgWPrvtp6wMaG4G7r67574UBVGPh7QitbXkJzJxYriqvKqqSuczHohUno6yX3d20gDe2krLzz5zkXRojEgzzQMPPBDx+5UBWAegDwCpNaGl6/qpFr2ZwJdfhpnkgoEAPB4Pdkcm5tPg9/sxdepUnDx5Mm4yK0c0IkBI6CopCfnDbNhAA5AQ0UvPpgH//d8kdGRlAVdfTds8nvQSQoDeLogAoX9UziILC+mf3rWLvLNcVFLdTsaPp2eanh+v65RIfj+wZEn4NhMNseecQ4KI9E8w3S9ED9mvv/Mdetq++Wba9uPIUufh2pHHAbwLoLbH58499xZkZ88B8E07m2sO1dXAvHkhU6IWrxdlp0/jcYP/tdGMmo4JInr3y8GDaevDp3VZe+452ubxAG+/nb5yF/uISOQscuNG4N57aZuc7m/YwBE0ETzySOinkOZ510YgLFsmdffAhReaboiVDqu/+hXJtbb4hUgnHu3DKM1TZEvtSE1NTUSK/GERR9J9/e677+CGG26wrX2mUlYGbN6sv6+mBodjRMdkZGRg7NixCQu7jgkivaw0h9Zlra2NtnV00PuSEn25zO2wRkSLzxe627QcOpS20nciaCV17b0/YQLw7W8Dzz5LExVXuCJobex/+xst+/Uj73yAnAhGjjRFByodVg8cIKWEEBb4hUSiN1r1gn4cqR3JzLwYn3zyu669xwH8B4A7ABQCOIgBAwagtLQUixYtwuWXX46z3TTV3LAh7K3weOARAu+++65p5hgtcmhsagJOnSLXI1voBZFgWuIou9xrToyFDTlNUsKShGaxMDmDjN/vF1dddZXwu7zehxDGsvy5JnmZDSkLZXrmP/wh9JWDB58U2dlXCmCaAEZ3J5GKW0smUdyeCckE2tvbxcKF2jL2szSJu7IMJUBTlu3bQ2UqzjtPiFWrRA0gDgBiVJyEZcn2sc5OIfr2pVPW1Zl7OXHpZcnN0iWpLCc0S5Yky6cGAgFcffXVCAQCYeuVlZXYvn07qqqqoh7jFmJYFLo1pK5xoqquJh8gPUxS90oV67/+a2jbsWOZaG3dDiAIGdEBmOQXosWMMsAuRVql9u71obpaOqieAbAfwDRQSO/JHp+78847u5OfKc/PfkY+TEOGAO++CyxYgJ/NmIExAD7ROTxZc4wWjyekaLPdPCN9+KZODd23w4a5RP2aOJHKSteavY1ij1yUPLZrRIQwLH1rtR0yPfeiRYvErbfeKgCIm2++WeTl5QkAIi8vT8yZM0cAEPPmzes+fs6cOa7RmLi+uq4Wv1+IoiJLLyaWUgI4KYDvCQDC6/WKqqoq0W62OqmXzSIl+r/3mYj3yVXwdRSpYqup6daGnBkwQDyzbJm4ZeJEcUF2dkwtSGdnZ8p97Lrr6PcbP96Bos6ygNOVV1Ijfv1rmxtgH3/9K11iRoYQv/0tFdB0Tc0uDVxrJhW0JdVl/ev+/YW49FKx97HHugUHrcAxdOhQAUDk5OTEHeQAdFcl7tu3b/d3PP3008oJJdrinsOG9RzgXVVdV8sPf9jzIqSq28SLiaZiBaamrCqPi7YM8MyZdGKfz32jWYLEEgAzMzu7BcBor0GDBkUtnOcoOhd0JuJ9LEHEDO66K3Q6x4o6P/ggNeD66x1qgLX4/UKMGkWXWFZG2zo7XWT21sCCSKpI6fvRR6lHDBggBCCenzixW3AwInAk85o3b54yviX6D9Hwl2skda1UNWhQ6ALGjxdi6VKqyGzixTQ0NIjq6r1dp5G+CnJmPtV8vxA9ZD/evZuuNStLiMsuc2A6ay/RBMDnnvssbgXfyJcq92IsCeskIL4X0e6MjAwxduxY3SJ2iSJvnR/9KHRax4o6y77cr58QV1yRdn1ZO0d65hmnW5MaLIiYQUODEK+/Lk4PGdLdMz4DxFRATAPEaIsEEWmyUUEoSavqukakKhMvhv7LUQI4IIBPu07xSdf7UQLQr3RqCZ2dQkyYoMB01nr8fpIptX+rVtmlreArhcFor4EDB4bdi45TWanbb6fqtN0sc4wQ+reKY0Wdz5wRYsSItOrL2jlSbm7o99250wFBz0RYEDEDnbvPqCrUKqHECXbs0B+IXGeOsTmSZNmyZV0PuiwB/J+uUz0ugCzr/EL0kKPcggUKTGetR+vLNHCgEKtW9bSxNzU1ifz8fFFaWirKy8sN34tOmmz+8atfiWOZmWHj0Ok4gohZKBOEJfvyN76RVn3ZyBzJjbAgYgKrLrtMnIzSK/RUodFet99+u2lCid0Dod8fcpORLwtcKexj507bpCrpkEyvG7tO9arpD4m4pOsop0E7oxw+PHRZDz5I2/ft66nsam9vF52dnSIYDAoj2pHIlx2aSu05/qj5v94CxHxAN2R30qRJlmjalAgp1WuAY6oZ81BG0DMZFkSSpKGhQQQCge7BaWqUgVtvBnL33XeLPn36CK/XK1asWCGKi4vFiBEjxK5du0R+fr4oLi4WK1asEH369BGZmZkpCyV2DIQ33xy67HHj9GeXrsHvF2L0aEulKm3/kRFT9CrtOuV++wWRdB3lNKQiayWjHRk0aFC3pvL666839T7U3tf33nijmAaIJTNmiC+6LqQD4ebhrIi2BQIBSzRtkYKII47qadyXlRD0TIYFkSSJHHCkICJVoJ1dS6kNKS8v7xY4mpqaRFtbm2hraxNCiDD7rJx9CSFEW1ub+PDDD0V+fr4oKSkRixcvTlgIyc7Otsxko40SlPd8VhZtiza7dAULF4bu7tGjLZGqov9nI7pOfVqMGFFgj1+IlnQc5TSk+nyK1I5kZGQkfE9GRr5pBYpokwa5/bEo0XjaC5Fjj555uLy8XJSWllrqcySDsPr1o1Ofd55DE5I07cuRl+VqzXMXLIgkycyZM8MGl1Eg1WcNID7Q9JJNOTndwkeyDmFy8JMzsuLi4oTs1dpXSUmJqKysFA0m2Ej17vHIl2vQ6uwHDgzd4a+9ZolUVV1dHUXb5RFAuwCEeP99B6S4XpBTxIznU7K+I5Evba6gRYsWha3r5R+aGCUa73uAIfOwmY6psWhvD+USWbPGoQlJmgoi+/eHhI+77nKx5lkDCyIJ8PTTT4uSkhJRVVWlmwfkHJAadIl2EMjNFR07dpjmIGWmUJIq1dVCdPnE9Xi5TvvpgFQ1e/Zs3f+loKBNAEK8/LLpp4yPNqfIuefSdQ8a5O5RLgKzZpRm3YsDBgwQAOUWGjx4sAAghg4dKq6//noBQHzta1/r3h7rNS1Kv33o9tst14Loceed1ISf/tS2U4Yj+7KMAvN6KYrG5X35zTfpcvr1E+LECffmDtHCgkgCxH2wa16dEUsrHmapDIR33nlnys6sfr8Qmohld086bLIpa31DZJI6+ZJOkCUlrQJwUJCTOUV+/3u6/qlTHWqINfj9ob/2Jz8xZ0ZphskmlVcxIN6LGHOkmfid6mpbtCCRrFhBv/HcubaeNpz2diE6OkL5gF591cHGmMMvfkGX8s1vOt0S82BBJA7aB8egQYOiDgQZGRniR8OGRVWPWq0iSGUgTJZ/+ZfQ5VmYdNQe/H4h/umfLJeqjPwfs2Z9IQAhHnjAtNMmx6FDIeHs4ovTIiGU3y/E2WfTJV1+OW0zc0Zplskm0ddvNP31ACD+cu214u3+/cXnGRniwK5d5lxcgjz+ODXpiiscOX04crBavtzplqSE3y9EdnbI5JUucNG7OIwZMwYlJSUoLi7G8ePHox7n9/vx64MHIXbs0D/A4gJiPp8PHo+nu6x5cXExysvL434uOzsb1dXVhot4yUJhwSCgPXziRGDpUmDyZKo55boaU5WVwJtvhm+zoILUshgVATMzM7F27VpMnDgAALB/v+mnT4xhw4AZM2i9pgaoqnK2PSawdi31YQC45x5aejzmFWEsKChAQ0MDampqsHDhQuTn52PixInmfHkEo0Gl+aYC+F7XNgHgx9nZuOSeezDp7beRc+wYRpaWWnL+uO0bTcvGxtjH2cI119Dy+eedbUeKrFoFtLbS+je+4WxbHMEmwShprNKIRDqlRr56pN+WBmipGrBgVh2PSJONdHAz8opHNIWP9uUae6XWQXXo0NAFjB5tSSp3IYS46667ov72sg898gg14+tfN+20iSN/m2XLQr/L8OGuTAill7E/I4Oivay+lPb2drF///6UIt+i3qualx2m4ERpagopg0+fdrQp5GwOkFOby0oX6PVfr9eVt2JU2DSjg9YcM2zYsKgDgW4onNbZb/Hi0IBQWupI508kTXV2dnbcRGhpFZ5vk1Sl7U/9+vWLK8w+/zydevLklE+dPEZ+G5egwqXo+XPJXEFer1fcd9993et333237v05YMAA0b9/f5GdnS0uuOACsTA7W3QqfDOePh0aK/bvd7QpxHnnhX4fF6V7V6H/Wg0LIjoYnZFEDYWTzn6dnUJMm+Z450/Wbh2NF19kB9VEMPJba4XZd9+lJgwYIMRVVzk0eUsjiVO1S4nMFSTzCcl1rbCyatWq7vxDH330kThz5kz359vb25UPUR07lprjqI+oVClo8624KN27av3XClgQ0aG6ulp4vV7dB4bhSpWy899/vxKq7UTTVEfTjvj9QhQUhN8MrnZQLS62fCCPZd7TqyXzxRfhTXFs8qb4Qy4R3HYpWmElZsRL5KxAsZvxiiuoOevXO9iINFApuK3/JorR57cXvYiysjJMnDgRxcXFPfb5/X5MnToVJ0+ehC+Wh9uYMT23HToEaL9TiNQbaxDZVunMWlhYiBkzZuCBBx7QPb61tRVz584FANxwww0QXW197DHg44/pmDFjgPvuA/74R6CpyaUOqsFg+LaMDKCzM+WvbmxsxOHDh+HxeBAIBKIeV1NTg2nTpmk+Bxw+DOTkAC0ttG3DBuDWW6m7DBsGnH12ys1LDI/H1r5qBadOhb836W+2DO3Y4vF49MeaQIA6BgD07w/86lfAo48qdTPKvuqow2p1NTBvHnD6dM99Xi+wbp3dLUoZ1fuvVfQqQURLRkYGOjs7u5dAjIFBi6KdX3r1Z2VlYffu3XjggQfg8Xi6BQ09Bg48H8uXP4cnnngcjY2PAsiCx0NBFP36kUP6yJHmRR5YinzSezz0hJcMHQrMnw88+yxw8GDKA/kYPUHU0Od6bnNMfs3LoxCowkI6aSBADzxFHnKJsHcvLb1e4De/odtPoed1cvzhD6FZwdq1wE03AQsWACdPKnMzSkHE0QiwsjIK69OZWKKmBtBMBFRl+PCQ8HHXXYDfnwb9Nxls0c+kgNlRM1q/ilWrViWXmVBxfZpx3xFt8zsjlq7QbIYwoqY1wUE1ljkmlnlPOXuw9Hf685+pESNGCHHlla6KOvD7hRg2jJp/7720zbXZKLUhFAMG0EVlZgqxa5eS/g4yAuy66xxuiMtLF+zZQ83t21eIL790cf+NAptmoqDVHHg8HsyfPz++OSYaiurRjGtHygCsA9AHgKdrGy1dp9k0oqlKcjapNcfs3r076nGxzHvKTd5k+667Dhg0CPj8c3pVVQElJTY3JjlWryYlGADcfjstzcwdYit6KrMzZ4CLLgq9V8iMpoRpBghp94YPB956izrA8OGuUSk88wwtp08npSTg0v6bIr0yoZlMEgYYNMdEIjt/cTHwox+Fti9cSGpuBYhMhFZSUqKTCO1xABfrfv7cc29Bdnb8RGjKUFZGamw9Ukw6p01+d+jQoZjHJtKfLMirlhiNjWTbuOyy0LYNG4DaWvKxcfwp0xOZeK+2NmSB83qBjg5lm2yM6mq6ED28XtqvEFpBxFH5qKAAaGigpIXjxlFjHnqItitOIAA8+CCtX3+9s21xHHsUNMljd/Vdw2hDeWXxJUdDIaITO7JmaoSp4HTXcqpwQfcI5/rrLYk0SNYcE0lTkxA5OdSkc85RoLqmC6MOXNhkY/j94SkBFDcznDgRat6RI063pot77qEGzZ/vdEsMcccdod/w44+dbo01cIp3q/H5yFOrthb41rdC2594QrkZZWztiLQJCAA/BhAE8CmAgwmniXcE7RT5hRdom8dDeeknTEhaTdvY2IhgMIja2tq45pi6ujo0NDSgIM4srKAAePppWj95khQ1DQ0OTt5cNgsHXNlkY1RWUh/W4rjKLDr9+oVuq2uuUUQRfN11tHzqKeCqqxRpVDja4WrjRtrm9ZJVVKFHhu14hFDI8KhDa2srcnJy0NLSguzsbKebE47HE/8YxX7ejo6Obt8RCmOuAXARgAYAY7uOygJwssdnlewqFv0HHiPfCyAYDIaF6cbjwAFg1CggMxNob4/+ULWN2lp9x5VgUNmoAxc2WR9tpNe111IYFUBP+DvuCEV6+f1KmhouuoiaBgCLFlHUkqO0tQFDhtCNBSjSqHBc+MhICaPPb3VFbjfgwumZz+fD/v0efPbZKAwZMhOAHNEHg8psTQOQ3+NzympHqqvpqa5HCv/BzJkzo+7LyMjA2LFjkZ+fj7wEtS35+UCfPuSHeOBAUk2zBoOCl4oorDiIzZgx5BhcXBwSQgASPlasAPbscVhlpo+c1WufK467FjU2Au+8Ey6hOt6onrjwkWEPdtiJUkFZHxGJ4qG8eujb1jsj3sdOXe73+8VVV10l/E6He2rrv6f4HzQ0NIjKykpRUlIicnNzo1571BIABpEuRc8+m9THzUXWUCouDoWNDh7soONKfF5+OfQX//KXCvjaJItyMd3G0GtuZPQsNyo6LnxkJA37iNhN5LRswQIlbZRANKlczohPAbgl5uezs7Pxy1/+Etu3b8fDDz9sQQsTYPXq0GxHzuqTnCKPGTMG3//+9xEIBNDc3Bzz2KSirbqQ1dulWttRZNSB3w98r6vo/GWXAd//vrL999VXaTlzJvDv/66Ar02ylJVR4/VIMdLLSvTGD2lOcGxWr2SjjOFajZ6J8E+QKtpQ3oULQ9sDAcrJoCCxxr+hQ7+B0tIPdEJ9Q7S2tuLJJ58EAKxbt85+k43W40s7wEyYQE6qkyfTf2LQbLJlyxaUlpbizjvvjHpMKuaYSJQSRAByvPZ4gDlz6P0LLwDbtyvbf2V+G5liw7W5QwDKfaHFBU8lJeUnJRulT14ekJVF6zffTI+OBIar9MQmDU3SKG+aEYJCefftE6KmRoizzgrp2hSuBPmXv+hHuu7Y0ZFQEb3Ily3mGiMxnAmYTYxcV6rmGC07doS6R1f9M+dpaBBi504yyyjcf99/P9S8H/zA6daYwFe/GjIj/O//usbOFGleUKImn0tsHm1tlEkVEOKdd9Ivm6oWNs3Yic8HjB0LXHxxuAfiwYMk7paU6GdOdIhAAPg//4fWs7OBVatCUnlBQZaBRGj6ZGVlYfv27fjVr36FRiudw5Yti75PqmHjTJGlFqS6uhqDBg0ydNpUzDFaLryQmnnwINWVUIIxY4CvfAU4diy0TRbDUaD/SiXYb38b2vbUU8r5IhpDXkxNDfD667Stf38yif2//wf84x/K25ny8sJn8FOnKjCrl9rp886j9336ACNGKKdqeP11CuwZOZKUuK7W6JmFTYJR0rhCIyKEqxzPZN4f6ewnhL5UHpkILSMjIyHtSElJiXj66aetvYAkZz9Gr2HSpEmJ1yIywNSp1NzNm0392uRRvP+6yBcxPkY0ei6grU2IgQOpue++q8isvr2dsq3160cNc9qZXoef/ISadsstTrfEelgjYjeK2yj1UmMDNNENBik3W6RUHpkIrbi42LB2BAACgQBuuOEGBAIBXH311Qik4vyovYAnnui530D4aTJakMrKSkPJyhJFOT8RxfuvXpS2S3wRe2JRyLnd9O0bXoVXiVm9z0fZ1q6+mt6/+KKz7dFBNkk2kYE9ondFRYVYvXq1WL16taioqEjos67RiAihdCXIVCdhUjsiK/sWFxfHqexLr0GDBok5c+YIAGLevHnJh/0auQAd27r2fPHaKl/l5eXJVWVOAFm99OqrLfn65FDcxv7f/61084zj9wtRWJgWF3PdddTsRx5xuiURPPQQNSw3VymtyPbtocdDY6PTrbEeo89vywWRioqKMOFj69atYn4CtQBcJYjInAylpaG6ET6fEJde6vjNYKbmPXbtmtivaEJJXAEl3gVUVXXrhrXfdeuttwoA4qabbhL9+vUz1EYzHVOj8cYb1PTsbCHOnLHsNIkh++/554cE6rw8ZRwnb7xRQQfJZLjzzp72JZdejLyUn/7U6ZZEUFcX+o0XLHC6Nd3Iclg5OU63xB6UEURyc3PFsWPHwk+agCLGVYKIEKFieLt2hQ8wChTDM3vCK7UjkyZNMiyIRBNK7rnnnu5tesLJ3sceE+/KhFsRr/klJWHHX3/99QKAuPnmm5XSgmg5dSpkxr7oIsfl1BDt7SQZTZ5MjVuyRIirrnK8gZ2dISXCuecKsWqVawJMiIYGij4KBkOJ4wAhJk4UYulSIaZMcdHFhFixgi5j7lynW9KF9nfOzAxJ+w5Gf2mbJO/5vn2VC0izBCUEkbq6Ol2hA4DYunWroe9wnSAiRKjnFReHBhwFQiEjBRE5GUtlEtbe3i72798v8vPzRUlJiVi8eHFSQsngwYMFANG3b99uIWLevHkCgFi0aJF4acqU7oZ3di3PdC2ndgkzUrBJ5mWHFiSSSy8N/RcKyKnhyCfMqFFKNFCG7WZlCXH8OG1zVdij3gwg8uWaiwnx+OPU9Msvd7olXRj5nblJtmH0+W1pya36+nrd7bm5uVEzV3Z0dKCjo6P7fWtrqxVNsxa9UEcZyisRwrbmSPLyyI+rrY1yfvXrR+GjqUS3+Xw+FBYWoqGhobuY3sqVK5GRkYHOzk7D33OsK2y0vatg1YYNGzAaVPnmjUcfxf1ffNF97CceDx4TAt/KyMBwIXBQCKyTWa4SoLy8HFu3bkVTUxPy8vJMC8+Nh6x1dvbZoejNDRuAW2+lbjFsWMgJ0BEaG6mDAMAnn9DS4Qb+7W+0vOwyYOBAWndV2GN1NTBvHnD6dM99Xi9laXPNxYQYPZqW+/c7245ujPzONqNgk9TDSmlo69atuhqRoqIisXr1at3P3H///bozVldpRBQMhfT7SZUttSCvv27NjFKaa0pLSw05s8Z6aX+3zoilfGUl8b3Sp8UJLYgQ+t1CqVBUBRv49a/TaVeutP3U5uD3h8xdkS+X+YVoaWoKDWunTzvdmi4UdLpWsEm2oHT47tGjR6PuW7p0KVpaWrpfTcpkfEoABUMhKyspVFQIYPZs4JJLrJlRFhQUoKGhATU1NVi4cGFSYb+SnwKQOhVPxPIUgDIAJxP8zvLycpSUlHSnardLC6JF+bIYijWwowP4+99pXfE8X9GprATefjt8mwvSucdj5EjqEqdPK1ZNGlD691W4aY5g6c9RVFSku725uTnqPp/Ph+zs7LCXq4nMb2FjMTxt6g1t2ZCyMmuzUcr8I1Io8fv9SQkleYjeQS8G8LjB9pSXl6O4uBgjRozAwoULUVNTY0luEKMoKKeGo1gDX3uNMlECwI4dtp46NaLdgOPGJVUTSUUyM4HCQlpXJruttv7Xd79L2/r0cfR3zssLTfq+8x2uL9MDq1Uzubm5oq6uLmxbIqd1pbOqEOGhvN/5Trh62ybHP9WcpPRykaxYsUL06dNHZGZmCgBiNCCmgRxQD+qYZk4j5KCKOK8JEyaIESNGiKamJkdMMLGwwnHYVBTQJUuf73/9V6V8vo1j5AZUqE8myxVX0KWsX+90SzTI6MV9+6hxmZlCRERv2klnJ/VdQIhXX3WZo3UKKGOaWbp0KbZt29b9fvPmzZg/f77Vp3UeWV5940Zg0SJgwICQenvDBluKZOhp2CVOmAD0NCX33XcfWltb8d577yE/Px+NAIIAagEM6/qcQMgkkwngMwCz77oLffr0gdfrxX333de9vnz58m7TywsvvIDGxkYUFBQ4YoKJhZy0ScfLsWMVmyFF1u3wem2v2zFmDJW5+cMfQtsUKn8THyM3oEJ9Mlmk37IyGhEgVFF6zBgq6HLmDPDVr9qmjY5k3z6KV+jTh/qvqxyt7cAOqaiiokJs2rRJbNq0SSxevDihz7pWIyJxWC2hwMTWMO3t7aJz2bKejpHy5fWKjj/+UbQ1NwshhGhraxNtbW091lXTfkSjvZ3qTQAULatck9vbKeHJyJHUyPPPtzWfSHV1KBWETldwuvxNfPx+IS680D03YJL87Gd0SQrlDQvn3ntDv7tDYejr19PpL7rIkdM7hhLhu5LFixd3r8+aNcuOU6qDw7FbX34Z/j4jA0ggqtZWfD4fTXml5iiSmhpkTZvW/bZv376666ppP6Lh8ylYq0OLbNDcucDKlcDeveTrUFJiy+nLyshR9Y47eu6rqQE0XUFNKiuBN94I36byDZgkyoXwSmSc/DnnhLY98YQjYejSt+mSS2w5neuwRRDp1ZSVARMnhucQkVx4ITB+vKWnl0XV+vYFfv1r4NFHU88dYjpywPB4ohe0iyacuBytIKIc8n+RFfoA4PHHbR3Id+8Of6/8c1zbl9evD20fPZrGgmefJR29UjdgaihpmgH0bXfStiexaVxhQSQ2LIjYSeQDNRCwdIYZCADLl9N6eTlw550UtHPypGKz73jGfiEUc6AwDzmbVG4QB/T/l8OHbR3I33mHloWFwE9+AvzxjwoK0lqi9eX9+4EVK2i9vV2xGzA1tIKIEIYKYduDIpnETpwIKcYGDLDllK6Do5ntQDr+XXBBT5HYQsfVhx8GZALbuXNpqaSTVHV19NHL6yVhraHBxUkkohM5iCuFw97OZ86EfAv/8hcSomtqFO8KvcRBVYsM3/3yS+Dyyx3zB+2JImHogQD1ZQB44QVbTuk6WCNiBzKCRuPH0I3JqkKtZnjTJtrWpw9w9Chw5IgC6cO1yMZOnAj079/ToQVwiTNA8kiNyPHjJDQOHuxoc8KJZVa04X956y2gtRUYNIismICigrSW8eNpwhFpUwLSti/360dzrYMHgVdftdWNyDgOmHfl8PanP4W2PfkkKWlsdlFRHhZE7MLn01cVajNWmqAq1NMMnzrleJkbfZSPv7Se/v2B4cNJHt2/XzFBRIsDzhnSxeL88ylxliuorHShY0vyyIft8OEkiACOlyUKR2qjCwtJ0v/wQ5JsbbDtKeSiojxsmrGTWKrCtWtNURWqljskJsuWxd6fk6OwM4B5KO0nos1SuWRJaPvChZbr4P/6V1oq43MQDW0GVa2DamFh2mRQjYbM9bJ3b2ibUrlepDa6pgb44Q9p2z/9ky22PcUqJSgNCyJOETm6PvusKV+riFnUGJ9/Hn1fIED7lXUGMA9low6A8IF8xQoyOwDArl3hactNQj7Tg0FAFu9+911b8v8lj3waFxeTDVTS1ES/2Z49iju2JI8rHrYyudm3vkXvX3/dluRmZWXRfUKUG4sdhk0zdpOXR3rMIUNIF9/WRtu3bqXR1gJ9plKaYa0Ty9q1PfdLW67yzgDmobQgAtD/IP+3664j5w3AkpwMejPo5mbF1dnLlgH336/fMGlyTdO+7LAbUWKMGQNMnUqms9des8WZ5b33wt8rNRYrBAsidlNQQLrLQ4fCtx85Ytpo26dPaP0XvwCeekqhkMdeHKobDaVziUhsMngrEnGZGHGS8Kn1NLYeJR+2UpC++OKQD48Nzizynh48mJRjyoefOwSbZpzAYkeON9+k5eTJlHtBqZDHmTOj70vzUN1oKO0jIrHJ+cg1pkWtX8iGDT33K+/YYh55ecDQobSena1oZVlpPlu1KrTt4EHLnVk++oiWS5a4JPzcIVgj4gQWZ1t95hlafvObtHTcyqE1x+zaFf24Xjh7BFxgmgFs1cFHKheUnGGzZq+bggJK2FVYSOHWL7+swJgTiUOqtmCQljI5sXK/iyKwRsRpImdOMttqkuzcGYpbl4KI42id+WSGNaYbKYh8/jlwxRUKJYSKhsWz/ePHQ6f53e8UnWFXV0ePKe6Fmr1Ro0Jpkj75RMGHrQOqtqNHqeou0CvnVwnBgohTaLOt3nln+OCeQrbVigoS+vv1Ay66yOQ2J0ssc0xGBjB2rIJPGvsYMiSU+vnlly0JRjEH2WdLSoDZs2lbZiZw992mSk/Swe/KK+mrlVRnl5UB3/mO/r6aGuCWWxR8GluHxxMyMTY1OduWuNhkNqutpeW4cUBuri2ndC1smnEKbbbVPXvC90nbpSSOE6DW8vH886GPvPmmg0mFtI2Sd6Qefj95sitXAMce5M+UlxeaPSmVEEqL7LNZWTTde/ppKo+7Y4epEQjbt9PyqqtoqZQ6W9uvpQ1UksbFGY1QWAh88IHCTtdSkC4oILXNp59amqtImmX0rJlMOCyIOEm0bKta4iX9gr65ur3d4ZDHRJy/lHrS2Ivrsi9qQ3mvugr4299ou0nSkxDASy/R+pVXmtJic4nVr+Uf1Us1e8prRLSC9C9+Afznf5Lf0/e/D6xcaXoor1QSsiASHzbNOE0s2yVAA34clMymyuYYQ7giIVQk0udHCiGAaREI775LX6WUaVFLrIlBZiblxlHKhmQfUhBRViMChJKbzZlD72tqSAVngT309ddpOXCg6V+ddrAgohJ6tksD/iLKhDxu2ULu4dXVIb2kHn4/UFenoOHffpT57xLBQslXBi9ccIGiSrLI/D9adu0i7WYvRVbhVVoQAWgcPX6cohOl1G9yFfSjR4EDB2hdplNgosOmGRWQtsvPPuu5L0F/EYkjIY833EDLuXPjH9uLzTHxUDJcVYuFobx/+QstlSpyp/ULeeyxnvt7uW+IRHnTjMRCe6jsKtosBX/9K+UQUc7nSyFYI6IC0nZZVRV9pgnEVAvn5oYUKuXlNoY8NjZSu0tLqZRsNNgcE5W8PIqcASgDo5LhqtGI1OItWJBwBI22vkxdHW177z2F6stow89bW3vu70U5Q2KhTcyntFxmoT1UdpW77gptU6oIoB6BAHD11c7mDRCK09LSIgCIlpYWp5tiD8GgEHRb9HwtWhT1Y3//Ox2SlyfEmTNCdHYK0d5uQ3ujtTXyFQza2Cj3ceBA6KdqbXXBz9TUJER+vhClpULcc4/hfqqHke7jKMuWRW+Y1ytEVZUL/jDr+fLL0M9y7JjTrYlDtHE2GEzpa6urqUtE6yrV1Sa130zk/ZvgfWsEo89v1oioSoL+IjJs95prSPlgueVDTmOXL499nPY62BwTlZEjQxPq9993wc8ktXgbN1LUgVatnaC9XUlnay2ffhp9Xy/MGRKN/v1Dqd6VN89ITM4p4hqfL22JgiefpG0m+8kkAvuIqEYS/iKBAPDww7Tp2mttaCNgTMdYXk5VhbnKkyEmTaK/+N13LS8Kag4+H5nbIknQr0nJCq7S2C8E8OijPfezX4guo0dT/c79+8nhWFnkOHvWWcDevZQPZ8gQU8apU6fC3yvp86U3fifpj2gGrBFRDaP+Iprw2P/3/4AvvqD1GTMsbJvWlhgrPFdy442KpsVUk4kTafnOO862IyEsUmdkOD0ySWN/aSk9pCJhvxBdXBM5I8fZQAD47ndp26WXknYvRV+JL7+kZUYGjc1K+nzFKz5qsxrS6dud0cPnI3VvjPwijcHDCC5/DrWTbsHmDSSCe72kfLBMs/bQQxRz/9//Dbz6avTjJk0K3XlsjjGMFETefdfZdiRELF10AgUchw8PCR9LligweHOV6KRwTeQMEMopcttt9P75503JKXLwIC0vvhhYuFChuZjWHBMrvYIDNiQ2zbiUMUeCwH8CwNcBkArt9GkLNGtbtlC0zo9+FApd3Lgx9mcqK4HJk1kASZBJk2jpKkFES6QOWhZwNGBnysykj2ZmAvffD6xY4UDWf22YbryBmquY6eIajYiksZGcW0aNorTvQMpZgmXFjilTaKnMXEzJkB2CNSIqI+2YRUU9dNXVKIMX0hgZ7nCVsmZNa4K54QZaxssNog3PHTFCkTvPXUiNyEcf6VsDlEX208mTSWDV9lWDDnC7d9NywgTKqurI4K0N0z1yxOaTpweuyK6qZcwYMr9JIQRIOd72rbdoKQURZYhVMdrh9AosiKiMtGN+9BFlI9VQhsdRg4t1P1Zz4QKUjU/QzqkVPqQJ5le/AgYNMvZ5zpaaMiNHAtnZpBn48EOnW5MAsp/u2QP85jfhWhGDqd9lXURHFQ1cliBlXGWaASzJKSI1Iso565aVAV/7mv4+h8dvFkRUR9oxJTHCzTJwhlYCfgqj0QoX8dZ//vOQ8CFNMBs2UCrkWGhnv8roIN2Jx+Ni84ws4JhkQj6pEZk61eR2xUNrN5eN0IMFbUNI08zHHwNnzjjbFkOYHG/b3BzSBikjiMg+XlMDvPhi+D5F0iuwj4hbkOrvwkIKjXngAeThIHxoQwf64Vv4Kw7gLDShEHk4SEU72tpIuHj4YdJsSEcsIULb5TGSDRuMt4nDc01n4kRg506XRc5IYsXhAjELODqmEeEq0aYyciRp/0+fJt/Pr3/d6RYlgAkh2W+/TcvRoynbtRK4oGI0a0TcglR/19RQWCyAs3AAA3ACAHAfHkQNLkYDxqAAXfZOmahm3bqQluOxx0Lr69aFjjGKzB8/YoRiLuHpgSsjZ/RIICHfiy+G3l54oT3N64bNMabi9YYqPcgChsojJ3lyXAOAAQOAe+5JOJRXSbOMCypGs0bETcjZWNeNs6f/pThaPxQDcRzFCMIDwIeT+p+VNTJaWpI7t5wt3Hgj8ItfhIc08CzRNFwviMRKyBelsNhDD9Hb7GwgJ8eGNmqjY954I/pxfj/ZimwP33Ef2p+0vZ22Pf88yZ3KF3uTk7ysLOB3vyOH66ws4PXXDUd9SaTlY/hwa5qaFLEyA+/apUQEGAsibqTrxnnstmagHvgnvIk+OG3d+SJNMKyitgzpI/L++2RjV6oKrRHkoL5pE+VnOK3plxq1d+O9v8HhIHWlbdtoW0eHTQ8uNseYjt5P2trqWKLOxPH5SJr6p3+isK1jx2h7gqG8O3bQUk8OtxWtZKinmlIsM7BHCIVao0NraytycnLQ0tKC7Oxsp5ujFGNHn0ZDkxdfHbgbryzaDDzwgHlfXl5OU5qPPyb15KhRPDO0gTNngIEDaVZ58cWkLXBFunc9amuj+ot4EH/YsWxkWr+eEgbqkZFBD5u2NtKIsMnREOvXA/PmhcudEq+XnoXK1FmJhp45MfKBrdMptdUALrqIlkOG0NzNMW2QkRo6+fmW93Gjz2/2EXEZ0gF61y6g8WNSaO3tcyFqv7oIwaEz0Tj5GyREJMuECSH/D7+fTlhQwDNDm8jMDCUjralJOcmjslRn3wVvpr6kYVmG6S1bKGdEWxvQp4/+MRwdkxSuKfYWiyRDebXVAOThR4+mlIokdWL5hSiYGZgFEZchO/3FF4c6/bFjHhR/fQRKjjyPMW8/Q0KEdL5asYIGXa8XuO8+/fXly+lL8/OBF15g4cMhpJCZnx/a5mBBzNSR/iI6lLX+HjVn9LUllj24ZHK+H/ygZ2UyLdzvU8Lkgrb2kaQ0pWT1aJlnXg8VK0YLxWlpaREAREtLi9NNUYLqaiG8XiFIDAl/eb20XwghRHu7EJ2dtN7WRq9Y652d9BnGMfT+U48n/L3raG8XoqpKt9MGMVX3WoNBE8//9NNClJRQGwYM0P+R5SsnR4imJhNP3rtoahIiP1+I888PjUf5+S77SYNB/RuvpEQIvz/uxyJfpvbleDQ0CBEI0EkHDow+mNjYKKPPb3ZWdRmGS6Zrpd2+feOv8yzQcaqre9rZtZph14RDapEFHCdN6tFp83AQOWhBC3Jwnq8ROYXZaPpisLmRsjfcQMt4JQoCAa6PlCLST/mLL8gv4vRpiv5SJp+GEbT5mk6doqiqzMyE6iYBPcsu2UI8G5DCFaPZNONCIrXKjpdMZ0whLezsCVCAT3AbHgUAXNPxFGo+GmKO2Vr6glRXxy9RIO0ILIibgs9HjpoDBtD7Q4ecbU/CSGlq40bgP/6DtskUsTHspHl5oQi3RYscqh7tMr8QLfwIcyGyPpPXC/z+9wqUTGcsw7X29kjy8nSThOzF+QCAKdgDT3Y2fJuqSYjYsiWx749WqDFWiYLy8pBvFN88puHxuLD4nRafjxLZRUZWxSiGN3RoSHu5dKlDeR5jxQyr6BeigQURFyLTCH/nO8Cdd3Jy03RCaoZlUMeECWnynCwoAD7/vEemyrdAKSgvwFuUeGLu3HBhQgoXeugVavzlLykPhBFuvJFvHouQNWdcKYgACUfQvPMOmWKGDaOgQ9sUbNpaSWvX9tzvkpkMCyIuRJaGufpqWrJWOX2QmuHLLqP3S5ak0XNSW8DR48FhDMVnGAkAOB97w4/NziahQtZEMlqo8cknKTw3GuXlpHGR0h3fPJbguiq8kcSyk65d28NO+tZbtJwyxeZnvwyjLC7W7/cK+4VoYWdVl9HWRpmHAeCqq5xtC2MNPh9w3nnA3/8OfPBBmj0nNc6Ab507H3gcKEIdBuLL8ONaW8NrJcnijBUVZIxPtlCjXokCxnRcbZqJJDKp2bPP9jDbyBozU6bY2C6AaiW98IL+Pq+XhKbZs5Xv6yyIuIx162gMzcsDzj3X6dYwVnHeebT84ANn22E6mroee378MQDyD4mLFEo2b078nFyiwHZcb5oBqK8MH06vYcOAl1+m7S+80KMWga3F7rTp24PB6MeFhVGqjeWCyMqVKwEAdXV1AIDVq1dbfcq05lEKMsDgwa4x/zFJILOrpp0gAgA+HwIBoKKKzDIXnHUEmFdubokCIHahRsZSXG+aAUhoPnSoZ+iPTJsqEcJejYgjqVqtxVIfkSVLlmDx4sVYvHhxtwAyY8YMK0+Zlmj9kd58k7YdOODijJtMXKRG5MMPHchHYAOVlcCnB2kedMGv7yBhATAvFj0yIoa1ILaiNc2oXc0sDrHSpgLAsmX4/HOSVTIyQkUrLWXmzOj7MjIo4scFfiFaLNOINDc3o7a2Fs3NzcjtymizYMECFBcXo76+HkVFRVadOu3QE4CPH3dRZUsmYcaMofGvrY3CtaWq281oNcpat46MTA+Cn43CsOElOHuMB5gxIzntiLZQ48KFrAVxEOlcfeIEKRCGDnW2PUkTK4MkABw+3K0NOeccoH9/i9qhvXl2745+nN8PTJ3qun5vqWkmEAigvr4e07rsVFL4aG5utvK0aYdexk2JazNuMjHxeoFx44D33yfzTDoIItE0yrNmAcAIAH6IzwVJXo8+ShWfr7kmvlAyYQKVbdcTPlw0GKcTffvShPzgQTLPuFYQ0RLptAoAGzbg2UM/AZCHwmFtAAyGjidKIuYYF2r/LDPN5Obm4tixY91CCABs27YNAFgbkiC9LeMmQ6Sbw6qh4mAeT8ih1e+PXsCRCzUqT1o4rAKhSC89tfPBg3j+icMAgC9ff8O6NsSqnOdSc4wWW6NmVqxYgdWrV3ebavTo6OhAR0dH9/vW1lYbWuY+HKllwNhKugkihuskASFhQhNlA48H+Ld/o+19+wI/+YnrVNC9idGjyYfN1Q6rQKgPbtoE3HYbcPo0GjEahzEMHgh8BApffNd7QWQwTeps2UKp2+fOpbB1mW5ei0vNMVoMCyKbN2/GkzKELgZLly4N04JIlixZgjlz5mD+/PkxP79ixQr8/Oc/N9qsXoO2fz34IPCnP4WiEZn0JN0EET30tN09iFbAkbUfSpNWuUQiijeOgTZCgDpwy+n+5vvtyaKN0bILS1x+LxgWRGbNmoVZZMxNmM2bN2PcuHFxhRCABJl77723+31raysK08FAniJd0c8YP56ybS5e7GoBmDFAOgoiUst97BjQ0UHX2NLCAnU6kjamGR2qUYZ5WIfT6ANA5lEgTwcvTmEd5gFYn9yXSy3Ij35Egnd7e/Rjc3LS4uaxPMW79AuRQkhzczPq6+ujHu/z+ZCdnR32YijLJhDKpupyAZgxgBRE9u0joTMdKCigEHRpfX3ttTRKYc+EkRa5RCLpkqTLinaixnOJ7iE1uBhl2c+QX4fRAo7RijbGEkICAarflAY3j6WCSG1tLWprazFt2jTU19ejvr4ea9aswZAhQ6w8bVoSWV+GSX9GjqRy6mfOkDCSLrz3Hi1Hj6ZoChao05O0Ms1IpL/IRx8BVVVhuzKg8d+IVsCxsrJnzSRt0cYHHyR/qFho6jWly81jaR6Rr33ta2hubsaSJUvC9i1evNiq06YlW7eGahlceaWjTWFsxOMhrcju3WSekdlW3U5tLS2nTnW2HYy1SNPMJ59Q6oFYecFcRdfDP2/IafRFG9rRD/+CP+NjFKAJhcjDwfDjtQUcP/sMePddEmJaWkJFG2VinT/9Kfa5I8sVpAmWh+8KIXq8mMT49a9pOXQolT1geg/DhtGyy8KZFsh8TCyIpDf5+SR8dHYCf/ub060xn4ILBmO8l9wMbpv9JWpwMRowBgX4JPxAbQHHd9+l5R//GKoYnWjRxpqatLNnWu4jwiSHNq37Sy/RthMnOK17b0Pm/tMWm3U7LIj0DjIyQplG5TM3nTgzsgDveymn+6SyqfAA8HlOGfvwl1/GP0ZSXk6+JmlcriBdlGVph14ivbY2TuveG9Bmc5b+FO+916PgpytpbwfeeYfWWRBJT7T9t62NtukUrHU9DQ1Ae7sHffsCYy7MJUGhsDD5EgWR9KKijSyIKAqnde+96Amhp06lhxC6cSM53+bkpJVmmdGg139bW9Oj/2qRAvWECUDm2ZrEe7t3kyCSStbJSF+QNNSCaGHTjKJwWvfei6FU6C5FCtDZ2SHnfya9SOf+q0UKIt0Vd30+6tQyWU5xMQkURikvp8+MGEGlDdLQFyQarBFxEZzWvXeQUCp0F6BV1e/cSduOHEk/VT1DpFv/jUYPQUSiLUsgCzgOH043wunTNJDL/CDLlwObN0evGJ3GWhAtLIgoTF5eqLzAXXdRSYE0i9pi4pAOwqeeqv7EifRT1TM9MZTC36VEFUQA/VpJJ09S0cZTp0I/St++lCq7FwofWlgQUZiMDBJCMjKA//ovsqunsb8So0FqdwsLafL01ltkznCjEMr+Tr0P2X+HDgX27qUJ1fDh7uy/enR2hiJxdQURLZECRuQAnub+H0ZgQURhZNju1KmALFjcy/trr0E7kfq3fyNB5F//1Z3m4t6iqmdCyP7b1gYMHkwTqr17gXRJqv3ssxSB6/UC48Y53Rr3w86qCiMFEc6m2juRvm9yoGtocLQ5ppLBI0/a4/PRBEqWCzt4MObhruKRR2iZk5NGGWMdhH9ChWFBhAFCgkiMWpHKk5dHya1OnACuuIKW7O/UOxg9Gnj7bao5M2GC061JHq3TtUww+MUX7HRtBiyIKMpzzwEffkgzx69+1enWME5SVETLujoa8NwY9lpQQKaZV14Bbr2VfEbY36l3UFhIgojbq/DqOV13dLDTtRmwglRRfvMbWg4bFvIPYXonY8eS8HH8OM3I3EhnJ/DGG7ReXMz+eb2JdKnC21vyozgBa0QUQqv6e/ll2saqP6ZvX2DUKEo1UF/vzsKHH35IglTfvgaiDJi0Qlbhdbsgwk7X1sGCiEJwvgUmGkVFJIjU1QEXX+x0axInGKTlhReyc19vQ2pE3G6a0SOd86TYCZtmFIJVf0w0pMNqXZ2z7UiWQICWJSXOtoOxn3QxzQDkXD14MK3n51N/lkVxmeRhQUQhuL4MEw23R85IjYieWptJb6RppqnJ/dqDgoJQ+Zh//udeVQ7GUlgQURzOt8AA4ZEzbmPXLuC112idNSK9j1GjyITR3u5eZ2stH3xAy0mT2OnaLPgxpxiyvgwA/PCHNINk1R/jZtPMb39LmTW9XnfnkWCSw+ejMQxID/NMzBozTFKw25iCaOvLZGdzvgUmJIgcOEBps/v1c7Y98dBGgG3ZQts8HmDPHo4A640UFgKffkqCiJvNc0KwIGIFLIgoxj/+Qctp0yh9MMBCCEM1OrKzgdZWYN8+9QdBvQiwU6c4Aqy3Mno0mejcHjnz+efAsWM0UTzvPKdbkz6waUYxZOrgq65yth2MWng8wMiRtP63vznbFiNwBBijJV0iZ6Q2ZNw4yonDmAMLIgoRCACPP07rXF+GieTMGVo+9ZSz7TACR4AxWtIlqRmbZayBTTMK8fDDZP/3eLi+DENofS0OHKBtfr87s+1y8qfeS7okNWNBxBp6rSASCACLFwMrVzobUqh90Pz5z7QtMxP46CP3PWgY89HztWhrc4evRV4e+Tm1tJAqe8gQrrjbW0k30wwLIubSa00zlZXkj1FV5Ww7xowhQai4mBwRAeD0aXpfUqL/IGJ6D272tSgoAP7lX2j9xhs5+VNvRppmDhwAduxwti2pwIKINfQqQaSxkTI81tYCTz5J2zZsoPfBIO23Gzc/aBjrcbuvxeuv0/Kyyzj5U29m+PBQfqQ1a5xtS7Js3QocOkT9mPPhmEuvMs1otQseDy0PHXJWzc0VHRmjuM3H4vPPKQulx0PpsJneh9b0LPnrX4F77nGf6XnVKloOGgT07+9sW9KNXiWIVFcD8+aR6UMO6HLp9QLr1jnVsnAyMoDOTqdbwahCXh5lpiwsJNPGoUPkb6G6r8Urr9By8uRQoTCmd6FnWm5udoePExAuSL3wAm1rb3ens7jK9CpBRFXtQ14exaS3twNf/zo9aNipj5EUFJAAkpUFXHstDYgPPKC+r4UURC67zNl2MM6hnfxFotLkLxp6gtTJk+4RpNxCr/IR0SJVhVqVoVOMGhXKovrjH7NTH9MTn4/6qkz17oboAymIXH65s+1gnMPtPk7sw2cPvUojAoTU3F9+CRw/TmFlHR3Oah/27iV7er9+wCWXsFMfEx0piNTXO9uOeLS0AG+8QeusEWEA9/k4Aepq0dONXqcRkWruRYvo/eWXO699ePFFWn71qyyAMLEpKqKl6lV4162jh86oUcBZZzndGsZJ5OTv3HPpfb9+7q4oroIWPd3odRoRgB72cpb22mvOPvwDAWD5clr/2tecawfjDqRGRHVBRObnyc52th2M88jJ30cfkeNyVha9d8ukKy+PnMOPHqXluHHsw2c2vU4jIrnkEopOqa8Ppc52gnXrgCNHaJ0FESYeY8fS8uhRij5QCW2enj17aNvHHzubp4dRA58vlF21pYXM4W6hoCA0WbzoIvbhs4JeK4hkZwNTptD6a6/Ze27tgC2L3En7KQ/YTCwGDQrNxFTzE9FmCT51irYdP85Zghli0KBQGLfbas58+CEtJ01iHz4r6LWCCBBKsvTqq/aeVztgHztG24QgaZsHbCYeqppnOMKAiYdbq/Byandr6dWCiKxwa7cgwgM2kwrSYVU1jYjbQzUZ63FrFV4WRKyFBRFQmOHx4/adlwdsJhVU1YjokdGrRxgmEjdW4W1tJV8ngEJ5GfPp1cNEQQGl5+3spCiaQMC5tvCAzRhFZUEkLy+k7bvzTjI/ujlUkzEXN5pmpDbkrLOA3FxHm5K29PrHn9SKvPlmKOTQDvLyKJ4eAK68kgdsxjjSNPP6684Kz3oMHRpKWvWTn3CEAROOG00zbJaxnl4riMjIFSmhA8CGDfaFGo4aRaWxAeA//oMHbMY4UiPS3q5erY633gLOnKG+PWoURxgw4bjRNMOCiPX0yoRmgH5kysGD9hUzev99uhmzskgjwgM2Ew9ZCVTLE08At9+uTiXQYJCW06ZxBkqmJ3Li19REJnE3mKRZELGeXiuIOF0Vcs0aWl54ITBggLXnYtIDPeH56FG1KoHW1tJSrzYHw5x1Fgkfp07RxC8/3+kWxUcKIuef72w70hkXyKPW4HTkyp/+RMusLGvPw6QPbgj7loIIFwNj9OjTJ1R7yA3mmS++CJnp29udbUs6Y6sgMmPGDDtPZxi7VMjSL+X110M34TvvcApsxhhOC8/x6OggHxGABREmOm6KnHnvvdD6li3OtSPdsc00s3nzZmzbts2u0xlCVoXMywvVxsjLsy5yxQ2qdcYdqFhSfdMmUrlnZ3N2YCY6o0cDO3aoHTkj/bGeey60bcMG4NZb1fHHSidsEUSam5tx9OhRO06VELIqZFYWVYV85x3g17+2LnLFab8Uxv1I4XnwYODdd0nVPXSoGmHfa9fSMieHHVWZ6LghckZPkD50iCeNVmGLaWbjxo246aab7DhVwvh8NGheey29f/JJ4OqrrcnPUFYWvcCeCqp1Rn2k8PzKK/T+1Cng7bedC/vWFnDcsYO2HTnC5kYmOtrIGVXR88eSgocq/ljphOUakW3btmH69OmGj+/o6ECHpkZ0a2urFc3qwbXXAv/zP8CLLwJffknJzUpKzD/Prl3h7zMyKIyNYYzi89Fr2DBSHzc1kVbECfRmjidO8MyRiY4bNCJlZZTOXS/6q6aGfaDMxnKNSHNzM4pkKkgDrFixAjk5Od2vQm3GMYtobKQQWp+PhBDAuuRm//gHLYcPB1at4oyqTPKcdx4t33/fuTa4IZKHUQspiLz5pnqZgbVERsm4IeeJWzGsEdm8eTOefPLJuMctXboU07rExTVr1mD+/PkJNWjp0qW49957u9+3trZaLozYZQ9sbwdeeIHWN28GLr8cmD8fOHmSk5kxiXPeeRSB9cEHzrWBZ45Mosjh/ORJ8iuyQvNsBrIQqtcL/O53wKOPkvaRJ43mY1gQmTVrFmbNmmX4i2tra1GSRA/z+Xzw2fxU1nMi1doDzXIi/d3vqJJjXl6oxg1nVGWSZfx4WjopiOjB5kZGD73MwBs2AHfcoWYkimzrJZdQAccFC3jSaBWW+YgcPXoUtbW13SG7dV2lQleuXImioqKEhBqrsWtWJ7OpFhSwmo9JHRVMMwAJ1llZNEjPnk3OtDxzZCJxW/qCyNTuPGm0DssEkenTp4c5qdbW1mLNmjVYvHixVac0FbPyNMhZwIkTwEcf0bZ9+8j/RMVZAOMetBoRIZwLmc3PpwH65Elg6VIqW8AzRyYSt6Uv4Boz9mFLHhGtf8mSJUswY8aMhCJp7EDmZwCAzz6jPA1ZWanP6vRmAceOqTsLYNzDuHEkfLS0UN2OESOcaccbb5A9PScHmDKFZ46MPm7zJ2JBxD5sEUQS9S9xApmfobYWuPRSys+wbx9liUwFt80CGPfQty9p0xoaSCvilCAiI8G++lUgM9OZNjCMmXR0hDTYLIhYD3sqaPD5gIsvBkaOpBne66+n/p1lZcDzz+vv4yRmTKqo4LD68su0vOIK59rAuAOpeZaa4pwcNdMXfPABOVzn5NDzgLEWFkQiyMgAvvUtWn/kEXOyrP797z3PwTBmIB1Wf/5zZ3IydHaGsryyIMLEQ2qepeP+qFH03qnMwNGQZpnzz+dyBXbAj0Qdvv1tWv7tb8D27ZRlNRWkIFJQwEnMGHORgkhTU+r9NBneeot8ngYOVM/Gz6iJzxeeXTUry9n26MH+IfZiW/Vdt9DYSOq4AQPCs6wmW3XxuedCNTheeYVUkpzEjEkVGY2l1a45UR1UCj+TJ0fPsMowkcikZl98Qc7WubmONqcHLIjYCw8dEZidZfXBB2k5cmTouzmqgEkVvX568KD90Vh//SstWX3NJEL//qFaSfv3qyeIBIO0ZOHaHtg0E4EZVRf1KpIeP84VSRnzcLLGi+zfwSBQX0/b3nuP+zeTGFIrolrxu1OnQn14925n29JbYHkvAjNi3fVmq198wblDGPNwMicD58ZhzGD0aHrQNzU53RJCmjsbGkIlCp55hpNP2gFrRGIQqW5esMBYZAJXJGWcwC7zCPdvxgy0DqsqMGYMFeDTprw6fJgE7JISfQGcMQcWRHSQse7FxaHOl5lJQoiRyITx40P5HSLh3CGMWch+OmoUvR8yxJ5orLIy6sd6cP9mjCJNM6poRMwwyzPJwYKIDjLWfdMm4Kc/pW1nztByw4b4tvDKSmDv3vBtnDuEMRvZT3/7W3o/ZoxzORm4fzOJoppGhAVs52AfkSj4fMDYsT23R4tMkPZFjydcci4qAubMAZ59lj7LuUMYM/H5QiGG770H9Oljz3n79g2tV1QAmzdzxV0mMVQTRPTIyAj5izDWwYJIDBKpExPNflhfD6xYQevt7Ry2y5jPuHEkgHz5JfDxx6EB3ko++YSWY8cCixcDP/4x58ZhEkOaZj75hDTOKtQpGjIktP6LXwBPPcUCth2wQjUGsVR1a9eGq+qWLYvuLCjtizxIM1bQpw9w7rm0LhMxWY2sw3TppbTk3DhMoowcScLH6dNU8VwFTp6kZb9+wNKlNP6rmII+3WBBxCCRQsazz5LzqqxFc+hQ9JBFti8yVjNxIi3ffdee80UKIgyTKJmZoQe8KuYZKchPnEhmGRaw7YEFkTjk5QHDh1PHvP320PYtW6jQ2PbtpA3R86jmbJOMXUg/ETs0ImfOADt30joLIkwqqBY5w6ndnYF9ROJQUEDajkOHwgf5L7+kZDcACSV6CMHF7Rh7sFMjsmkT0NpKabonT7b+fEz6Iv2ZysvJsb+kxNn2aKvuMvbBGhEDxErgFA2vl3KOsH2RsQM5g3vrLeCqq4wl3ksWWcJ9yBCuxcGkhhRE6uqcqR4dCWtEnIGHEQPESqcdDavTbDOMlvPOI1Ngayvw0ks0qJs5u9SGp8v6SUePcvprJjlkf9L61TlRPVrLmTMhjSILIvbCgkiCxIsr57hzxm7koH7WWaGwWrMHdb3w9BMnuL4MkxxmVzk3g8bGUIoFvRxSjHWwacYg2rTv5eX6x5SX0372C2HsRNbIkEIIEBrUzaqRwfVlGDNRMZ26NMtMmKBGTpPeBAsiBpHptGtqgIULQ0LJqlW0HDGCtnPcOWM3dgzqnP6aMRMV+5Msy8FmGfth00wCyHhyKZRkZZHNfP788KySHHfO2EksHyYrfZXYDMmYicfjrHnvlVdomZPjXBt6K6wRSRKfL5QnhJPeMKphRQ6bvLyQ5mXBAjZDMqkhzd0DB9L7sWOd7U9+Py1VSa7Wm2BBhGHSgLw8Mg/KKriTJpk/qPftG6q7tHw5myGZ1JCa5RtvpPe33WZ/f2pspErqMjs2QMn64lVYZ8yFTTMMkwYUFNCg+c1vAlu3AnfeCfzgB+Zq6l59lZaTJlG2YYA1gUxq+HxUtBGgAqF29yc9R+6jRzkazG5YI8IwaYLPB1x+Oa2//LL5g/rLL9PyssvM/V6mdyMFkbo6+8/N0WBqwBoRhkkjrriCli+/TDM5s3xFAgHgD3+gdSnsMIwZOCmIOOXozYTDGhGGSSNKS0kT8vnnwAcfmPe9f/gDcPw4rbNGhDETKYh88gnQ1uZsW4CQnxVjH/yTM0wa0bcv8JWv0Lo0pSSLdOSrraVMrQAN0ocOsSMfYx5DhwLZ2bS+b5/959dGgy1cyNFgTsCCCMOkGdJ08uc/A1dfnXwBPJmxtbgYaGmhbZ2d5mZsZRiPx1nzjDba7Mc/5mgwJ2BBhGHSDOkn8uqrwPbtyVc1ZUc+xi6KimjphCDy0UeUkHLAAKrJxHmh7IcFEYZJIxobaRDNzAS++IK2bdiQXF4EFdNwM+mJkxqRt96i5eTJ7B/iFBw1wzBphNVVTTmtO2MFTgoib79Ny8mT7T83Q7D8xzBphNkF8PLyqKYSAMyaxY58jDWooBG54AL7z80QrBFhmDTC7LwIAwYAZ87Q+oMPki1fW+CRYcxACiL79lF/y8y079ysEXEe1ogwTJqTSlKzv/2NHgyTJtHDgh35GCsoLAT69AFOnaI8NclGeiXKl1+GtDCsEXEOFkQYJs2QVU0HD6b3+fnJm1PWraNlaalpzWOYHmRmhvybduxIPtIrUd55h0yXw4ezudFJWBBhmDRDVjX99a/p/YgRyeVFOHkSeOklWj9xwsQGMowGmThPFlIEko/0SpQtW2h59tnWnYOJD/uIMEwa4vMB3/gGRbm88QZw1VXAb39Licji0dgIHD5MPiUnT9K2l16iB4MQwLBhPHAz5mF1pFcspCBy6pQ1388YgwURhklThg0DLrkEeO21kLrbiCCi92A4fJhLozPWUF0NzJsHnD4d2qaN9JLmQbOQgrbHA+zdS9vq61nQdhIWRBgmDZGD7bRpJIgApO6+9db4g211NR0no2UAax8MTO/G7gq4eoL28eMsaDsJCyIMk4boDbYHDxobbMvKyBTzhz/03Mel0Rk78HisEwb0NDASFrSdgZ1VGSYNSaVOzOuvhwZjGfrLqa8ZK8nLI6dq2WcnTrQucR6XLlAPHl4YJg1JZbD9+c9ptujxkPZj1SrOqMpYS0EBmRNl5eh//3d7K+CyoO0stphmlixZgnFdqfOGDBmCWbNm2XFahmHQU829YAHw+9+HO65qHfj+/nfa1q8fsHo1rV9zDTByJCczY6zD5wOmTKH+99Zb1va1vDz6/o4O4MYbgf37gaYmFrSdwlJBpLm5GV/72tfw4osvIjc3F7W1tSguLoZgTyCGsRyZ2KywEBg6lLKkejyUtTIygkbPp+TEifBj+LZlrEZmN92zx9rzFBTQ/dHUBNx9N3DFFVy6wEksVUgtWbIEc+bMQW5uLgBg2rRp2Lp1q5WnZBimC5nYbONG4L77KNOqFCYiE0YtWxb9e5IplscwyTBlCi337LFW8D18mIQQAJg6lUsXOI1HWKie8Hg8qOtK5F9fX4/p06cn/B2tra3IyclBS0sLsrOzzW4iw6Q9RmrN3HMP8Lvf6e8LBjlShrGHEyeAQYOAzk7g009Jo2cFzz8PXHstcO65wAcfWHMOxvjz2zKNSH19PQCgtrYWzc3NKCoqwoIFC7Bt27aYn+vo6EBra2vYi2GY5IkVQQMAF18MPPZYz+2pFMtjmGTo3x845xxat9I8EwzSUi93CWM/lgsiubm5mDZtGoqKilBRUYHZs2fH/NyKFSuQk5PT/SosLLSqiQzTK4gVQQPQPj15XwiOlGHsR5pn3nrLunOwIKIWhp1VN2/ejCeffDLucUuXLsU0jR63ROPtlpubi+bmZmzbti2qmWbp0qW49957u9+3trayMMIwNuP1AmvXArNns+2csZcpU4DNm1kj0pswLIjMmjUrobDboqIi3e25ubnd2hI9fD4ffDzyMYypyAia/v3JgbWzM/bxnEGVcQqrI2eOHAlV9OU+rgaWmWaKiopQVFTUQ+hobm4O05IwDGM9MoLmo48Avz/6cZzYiXEaaZp5801g507zv/+JJ2hZWAjk5Jj//UziWDrsVFRUhJlzNm/ejOnTp4eZbhiGsQefL9wBNVLoKC/nDKqM84wZA/TpQz5KDz1k/vc//jgt+/Y1/7uZ5LA0odmsWbNw9OhRrFy5EgBw5MgRziPCMA6jTXR2xx3AI48AH38MLFwI/OIXnNiJcQZtdl+ZVGLLFsp3E69idCLfXVtL2w4cMOe7mdSxNI+IGXAeEYYxn44OICsrNOiz8ME4jZFw8WSfVlZ+NxMdx/OIMAyjLlozDWeVZFQglYrRTn43kzq2FL1jGIZhmFiUlQETJ+qH1KYaxWXldzOpwxoRhmEYRinsyOrLEWLqwH8FwzAMowTSkbqkBBgxgrYNHmxOFFdeHkXjAMD3v88RYirBggjDMAyjBDLfTU0NcOWVtO3ee2l7qowYAWRm0vrSpXSOhgZzvptJDRZEGIZhGGWQjtRTp9L7t98253vffBNobycNy3nnsZO2SrAgwjAMwyjHhRfS8o03zPm+HTto+ZWvsH+IavDfwTAMwyiHFEQ++AD44ovUv08KIpdckvp3MebCggjDMAyjHCNGACNHUqKxt95K7bsCAeAvf6F1FkTUgwURhmEYRknMMs/8/vfkHwIAF12U2ncx5sOCCMMwDKMk0mF1+XLSaiRCYyMQDFI9mT/9ibZlZlIF6mCQ9jNqwJlVGYZhGCWRGpFPPwWqqii/iFHGjOm57cyZ8OyqXF9GDVgjwjAMwyiF1GbIBGQA8MQTpN0wqs3g+jLugavvMgzDMEphVrXc2lr9+jLBINeXsQOuvsswDMO4Equ0GZw/RE3YR4RhGIZRCrOq5eblAVlZwMmTwLe/DXzyCdDUxPVlVIMFEYZhGEZZPJ7knUqzs0OffeABYMIEEko4tbtasKKKYRiGUQ5Zibe4GBg6lLbl5hrXZgQCwKWXAqdOUW2ZCRO4voyqsCDCMAzDKIesxLtrFzB3Lm27/nrj1XIrK4G9e2n9W98y5gDLOAMLIgzDMIySyEq83/42vd+yBbjyyujJzbRJzDZsCG2fMIGTmKkM+4gwDMMwSvPP/wwMGQIcPQr84x/Rk5vpJTEDgDvuCK2rnbCid8IaEYZhGEZZGhuBN98kfw/Jhg36yc04iZk74YRmDMMwjLIkmtyMk5ipAyc0YxiGYVxPolqOd94Jf89JzNSH/yKGYRhGWcrKKImZHtu20X4tTz8dWv/xj0k7kp/PScxUhp1VGYZhGFeQkQF0dobeX3cdCSOjRgGHD5MZ55lnaF+fPsDNNwM33USJzYyG/TL2w4IIwzAMozQyuVlhIUXA/Pa3ZIJpawP+/d+BnTt7fubUqXBfEbW9IXs3bJphGIZhlEYmN9u4kcJ2KyuBQYNo386dwIUXRv8sR8uoD2tEGIZhGOXx+YCxY/X3vfFG9M8lUiSPcQbWiDAMwzCuIFYEDeNeWBBhGIZhXEGsCBotGRmkPeFoGXfAsiXDMAzjOiIjaLT4/cDUqcDJk1xt1w2wRoRhGIZxDTKCprgYKC8P36dNXubxsBDiFlgjwjAMw7gGGUGTlQV88gnw6KOUR+QHPwD++EegqYnNMW6DBRGGYRjGVUhNh1Yo8XiA+fPZHONGWBBhGIZhXItW6GBzjDthHxGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRyDBRGGYRiGYRxD+VozQggAQGtrq8MtYRiGYRjGKPK5LZ/j0VBeEDl+/DgAoLCw0OGWMAzDMAyTKMePH0dOTk7U/R4RT1RxmM7OThw4cACDBg2Cx+Mx7XtbW1tRWFiIpqYmZGdnm/a9KsHX6H7S/foAvsZ0Id2vMd2vDzD/GoUQOH78OM466yxkZET3BFFeI5KRkYGCggLLvj87OzttO5WEr9H9pPv1AXyN6UK6X2O6Xx9g7jXG0oRI2FmVYRiGYRjHYEGEYRiGYRjH6LWCiM/nw/333w+fz+d0UyyDr9H9pPv1AXyN6UK6X2O6Xx/g3DUq76zKMAzDMEz60ms1IgzDMAzDOA8LIgzDMAzDOAYLIgzDMAzDOIbyeUSsYOXKlcjNzQUANDc3Y/Hixc42yGSam5uxceNGbNq0CVu3bnW6OZaxcuVKAEBdXR0AYPXq1U42x1TkfwjQ9dXX1+ORRx7p7rfpyIwZM9Kuv27btg2rV6/GjBkzUFRUhK1bt6K0tBSzZs1yummms2TJEowbNw4AMGTIkLS5xtmzZ2POnDkoKirqcf8VFRU50ygLWLNmDZqbm5Gbm4u6ujosXbrUvvFG9DIqKipERUVF9/utW7eK+fPnO9gicwkGg2L16tWioqJCTJs2zenmWMbixYvD3s+fP19Mnz7dodaYz/z580VdXV3Y+3S6vkg2bdok0nE42rRpk8jNzRUARFFRkVi9erXTTTKdY8eOiWnTpoljx44JIWgMSqf/sqioSADo8Zo1a5bTTTONioqK7v9PCPpP7by+XmeaWbFiBebPn9/9fvr06VizZo2DLTKXadOmYf78+WklqUfS3NyM2tpaNDc3d29bsGABtm3bhvr6eucaZiL19fXYvHlz9/tx48YhEAg42CLraG5uxtGjR51uhmXs27cPQgjU1dWFjT3pwpIlSzBnzpzu2fO0adPSSrO1YMECCCHCXqtXr8amTZucbpppbN26NUz7kZubGza+Wk2vEkTq6+u7VU+RbNu2zf4GMUkTCATChA4peNl581jJ1q1bw0yGfr8f06dPd7BF1rFx40bcdNNNTjeDSZI1a9Zg1qxZqK+v7x5H06mvRpqYtm3bhpKSEodaYw25ubmYMWNG9/hZX19v62S2V/mIRJst2y39MamRm5uLY8eOhW2TA2A6aoI2b96M5ubmtJqBSbZt25ZWDy09Nm7ciCFDhuDo0aOoq6tDRUWF000yDTmm1tbWoqioCEVFRViwYAFmz56dNv+rdkypr69HfX192lyb5JFHHkFxcTEGDx6MxYsXY9y4cbb63PUqQSQacpBg3MuKFSuwevXqtHLmlA6rzc3NmD17dlpdm6S5uRlFRUVpOxGYNm0agNDDbM2aNZg9e3baCJVSEMnNze2+1oqKCowdO7bHZCEdqKioSCuneElubi6WLFmCrVu3YuXKlZg+fTpuuukm28acXmWaiQYLIe5G2qjTzf6em5uL+fPnd5toBg8enFYPbKnST2eklkBy0003dWu40gmtqUJqmNPN3F1bW+t0EyxjyZIlKCoqwqZNm1BXV4ejR4+iuLjYtvP3KkEkmtpezsoY97F582aMGzcurUKwm5ubsWTJkrCH1fTp09NqcK+trU07O7seWodjAN0zzHRxqo42bubm5qbNNUpWr17dHZ6cTkjfSWluKioqQjAYRG5ubo/+axW9ThCJdoOkm82vNyAfylIT0tzcnBaDX319PVauXBmmqZNCSbqYZ44ePYpt27Zh5cqVWLlyJZYsWQKAcsPYNfhZjTSpafuk/B/TZeIjNT6R911zc3PaCZrbtm1Lm/tPS319ve51LViwwL5G2BYorAgVFRVhsfybNm1KqzwiktWrV6d1HpFgMCgqKipEXV1d9ysyFt7NROZJWbx4cdr/n+k4HEX+jxUVFWmVf0IIGkO117lp06a0zHkDQGzdutXpZljC9OnTe4yddj4Xe2X13ZUrV3bPSPx+f9p5sW/evBlPPvkkamtrsXjx4rTL5Njc3IyxY8fq2tnTpTs3NzeH5beR0RbpOCOT/XXz5s1YvHgxZsyYkTYaysj/8ciRI2k13khkVk4gfa9x3Lhx2LRpU7dTbjrR3NyMFStWYOjQod0+PvPnz7dtvOmVggjDMAzDMGrQq3xEGIZhGIZRCxZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxDBZEGIZhGIZxjP8ffQXA5/pL2V4AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import epde.globals as global_var\n", + "import torch\n", + "\n", + "differentiator = global_var.AutogradDeriv()\n", + "\n", + "global_var.solution_guess_nn\n", + "\n", + "sobolev_pred_1ord = global_var.solution_guess_nn(torch.from_numpy(t_train).reshape((-1, 1)).float()).detach().numpy()\n", + "# sobolev_pred_2ord = sobolev_nn_2ord(torch.from_numpy(t).reshape((-1, 1)).float()).detach().numpy()\n", + "# base_pred = base_nn(torch.from_numpy(t).reshape((-1, 1)).float()).detach().numpy()\n", + "\n", + "sobolev_deriv_1 = differentiator.take_derivative(global_var.solution_guess_nn, \n", + " grid=torch.from_numpy(t_train).reshape((-1, 1)).float(), \n", + " axes=[0,]).detach().numpy()\n", + "\n", + "sobolev_deriv_2 = differentiator.take_derivative(global_var.solution_guess_nn, \n", + " grid=torch.from_numpy(t_train).reshape((-1, 1)).float(), \n", + " axes=[0, 0]).detach().numpy()\n", + "\n", + "plt.plot(t_train, global_var.tensor_cache.get(('u', (1.0,))), '*', color = 'k')\n", + "plt.plot(t_train, sobolev_pred_1ord, color = 'k')\n", + "plt.plot(t_train, global_var.tensor_cache.get(('du/dx0', (1.0,))), '*', color = 'r')\n", + "plt.plot(t_train, sobolev_deriv_1, color = 'r')\n", + "plt.plot(t_train, global_var.tensor_cache.get(('d^2u/dx0^2', (1.0,))), '*', color = 'b')\n", + "plt.plot(t_train, sobolev_deriv_2, color = 'b')\n", + "\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "96b66618", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using net Sequential(\n", + " (0): Fourier_embedding()\n", + " (1): Linear(in_features=21, out_features=128, bias=True)\n", + " (2): ReLU()\n", + " (3): Linear(in_features=128, out_features=128, bias=True)\n", + " (4): ReLU()\n", + " (5): Linear(in_features=128, out_features=128, bias=True)\n", + " (6): Tanh()\n", + " (7): Linear(in_features=128, out_features=1, bias=True)\n", + ")\n", + "solving equation:\n", + "0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.00000043981936, dim: 0.0} + -0.9918493622898968 * d^2u/dx0^2{power: 1.0} + 1.4857876345926648 * t{power: 1.0, dim: 0.0} + -3.9448922927621832 * u{power: 1.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.0000002504454213, dim: 0.0} + -0.02862032765756233 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999996264795334, dim: 0.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006608472868227343}}\n", + "cond: loc: tensor([[0.]]), operator: {'u': {'coeff': 1, 'u': [None], 'pow': 1, 'var': 0}}, value: tensor([[0.8000]])\n", + "cond: loc: tensor([[7.9500]]), operator: {'u': {'coeff': 1, 'u': [None], 'pow': 1, 'var': 0}}, value: tensor([[2.0686]])\n", + "Step = 100 loss = 0.000246.\n", + "Step = 200 loss = 0.000073.\n", + "Step = 300 loss = 0.000052.\n", + "Step = 400 loss = 0.000180.\n", + "Step = 500 loss = 0.000144.\n", + "Step = 600 loss = 0.002840.\n", + "Step = 700 loss = 0.000062.\n", + "Step = 800 loss = 0.000089.\n", + "Step = 900 loss = 0.000364.\n", + "Step = 1000 loss = 0.000043.\n", + "Step = 1100 loss = 0.000481.\n", + "Step = 1200 loss = 0.003222.\n", + "Step = 1300 loss = 0.000704.\n", + "Step = 1400 loss = 0.004115.\n", + "Step = 1500 loss = 0.000296.\n", + "Step = 1600 loss = 0.000137.\n", + "Step = 1700 loss = 0.000359.\n", + "Step = 1800 loss = 0.000603.\n", + "Step = 1900 loss = 0.000047.\n", + "Step = 2000 loss = 0.000187.\n", + "Step = 2100 loss = 0.001779.\n", + "Step = 2200 loss = 0.000397.\n", + "Step = 2300 loss = 0.004507.\n", + "Step = 2400 loss = 0.002870.\n", + "Step = 2500 loss = 0.000936.\n", + "Step = 2600 loss = 0.000087.\n", + "Step = 2700 loss = 0.001175.\n", + "Step = 2800 loss = 0.000116.\n", + "Step = 2900 loss = 0.000069.\n", + "Step = 3000 loss = 0.000101.\n", + "Step = 3100 loss = 0.000257.\n", + "Step = 3200 loss = 0.000109.\n", + "Step = 3300 loss = 0.000548.\n", + "Step = 3400 loss = 0.000713.\n", + "Step = 3500 loss = 0.000089.\n", + "Step = 3600 loss = 0.000791.\n", + "Step = 3700 loss = 0.001018.\n", + "Step = 3800 loss = 0.002033.\n", + "Step = 3900 loss = 0.000433.\n", + "Step = 4000 loss = 0.000604.\n", + "Step = 4100 loss = 0.001266.\n", + "Step = 4200 loss = 0.002940.\n", + "Step = 4300 loss = 0.003681.\n", + "Solved!\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhUAAAGeCAYAAAAje/P4AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABhKUlEQVR4nO3deXhU5fn/8feZZMiQkAUEEpE1CCSuMANo3EE261YVRLTFtVCrtXaDYqutbS2VfrW/1lpFa13ACoi11q2yGMQtgSQqorJIZBMJkGSYBJkkJOf3x5mZJGYCCUwy2+d1XVwzmTMwzwnJmXue577vxzBN00RERETkGNnCPQARERGJDQoqREREJCQUVIiIiEhIKKgQERGRkFBQISIiIiGhoEJERERCQkGFiIiIhISCChEREQmJxM58sYaGBnbt2kVqaiqGYXTmS4uIiMhRMk2Tqqoq+vTpg83W+nxEpwYVu3btol+/fp35kiIiIhIiO3bsoG/fvq0e79SgIjU1FbAGlZaW1pkvLSIiIkfJ4/HQr1+/wPt4azo1qPAveaSlpSmoEBERiTJHSl1QoqaIiIiEhIIKERERCQkFFSIiIhISCipEREQkJBRUiIiISEgoqBAREZGQUFAhIiIiIaGgQkREREJCQYWIiIiEhIIKERERCQkFFSIiEvE8niI+/HAsHk9RuIcih6GgQkREIlLTQKKs7Bnc7nx27Pg/BRcRTEGFiIhEpC+//Btudz6lpQ+wc+diAMrL/4vbnc/OnQ/g9W4L8wjlmwzTNM3OejGPx0N6ejr79+/XLqUiItKC17uNurp91NcbfPihK/C4aYJhNN76des2koED76Fnz0vDMNr40db3707d+lxERORwCgoGBu43DSC+ees/Vl1dxPr1l3HBBZ32+VgOQ8sfIiISMez2CYH7TWckvqn5sTSqqkqoqirWkkiYaaZCRETCyr/k8cUXBnv3fkhGRvv+vmlWUVzcuFSiWYvwUVAhIiJh1XTJIz39MNMTrTCMxiCie/cJh3mmdDQtf4iISFg1DQSaBggANEBSRRIJlWCvhNSN0Hf7Wa3+Wx7Ph1oKCSPNVIiISKfzL3mAwd7dRSQmBX+ea1Qx3bqNwNyxBe64A+Ol16ke8h47Hwv+/EOH9mopJIwUVIiISKdruuSR0OXwSx6GYWD0PxFefBWeew77z67HXn6IhC7d8KZ+DTQ0eW5jEDFw4G9DPWw5Ai1/iIhIp8vNXYjh+1zbYskDGw7HIOz2LOz23o0PGwZcey2Ov/yLvOsMzrisGtfb17X6GtZMiHQmzVSIiEin69p1KAlfHceh48taHHO51lpLHmYtNluQdZEpU7BVVcHNN8OCBXAugAGYNDQY2GxWkLJnzyKysq4HTOz2njgcAzrylAQFFSIiEgZlK2YFAgqzwcCwmViT59ZShmEYGEYriRYAN90EX36J/a/3YK80qOtuBRI2mxlojFVbq/yKzqblDxER6RRe7zaqqoqp2r6Snbb3AGhoMEi059K//xxSUk5pueRxOHfdhSPnHPKmmuQ8PwTD8C+n4Ls1fbeJ5OYuDPn5SEuaqRARkU7RNDmTdOvGMEzq6z9l+/ZPATjvPG/wJY9gEhLgmWewnX46WX/fTMrQH1B8+t9bPC0lZThduw47xtFLW2imQkREOoWVnJkABNvTw5pNaHNA4TdoEDz0kHX/0fm+B623toYG67a6uoiysgXHMnRpo2MKKsaPHx+qcYiISIzLzLwO5/Njgx5zOgvJzGy9kuOwpk+Hiy/Gvq8e+4EkUlJO4fjj57BrVw4NDVbUsmfPIjXF6gRHvfyxdOlSVqxYEcqxiIhILCssZO/yD2CKNYtgszXQNDnzqBkG/OlPOE79H3lX1LB62ToOHFhH375gmlZQUVenpM3OcFQzFW63m4qKilCPRUREYphn/k/46Idd2b//OHbtcjF06KOkprral5zZmtxcmDEDWx3k/mtQk6RNf/CgpM3OcFRBxZIlS7j66qtDPRYREYlVmzezq1sh3U/dwZtvTsXhKKRPn5k4nYXk5W3F4eh77K/xm99AaiqZj3+Bc89vgj7lmJZZ5IjaHVSsWLGCcePGtem5NTU1eDyeZn9ERCR+BMpIF/6aL8fYARg7dinjxn1AVVUxNTXb25+c2ZreveEXv7Du/+1vvgebJ21u3DgTj6coNK8nLbQ7qHC73WRnZ7fpuXPnziU9PT3wp1+/fu0eoIiIRK+CgoEUF4+keMxz2DK8AKSl7eXDD10UF49sXmYaCnfeCZmZ2D/djb0+ndRUF1988SibNrmoqUlWJUgHa1dQ8dhjjzF58uQ2P3/OnDns378/8GfHjh3tHqCIiESv3NyFGKb1VtMpTamSk+HHP8axD/J+nMVJOYuYNGkk8+c/wtdfpwCqBOlIhmmabUqBLSkpAcDpdALWjEX37t1p418HwOPxkJ6ezv79+0lLSzuK4YqISFQxTaomZlN819YWh1yuYlJTnaF/zf37oX9/8HhYld90KIYvoLH2CfFTJciRtfX9u80lpRUVFZSUlATKSLds2QLAvHnzyM7ObtcMhoiIxImVKzG3bgVCXEZ6OOnpcNttMHcuuc9ls+Ha7ZjmoaCVIDk5T3XcOOJQm2cqvqmkpASXy6WZChERad20aRSs+IwvHy2jvLwf48ffTEXFE3i9O3C51oam6iOYPXtgwADweql681GKje+3eEqHzZTEoLa+fx9VSenSpUuZO3cuALNnz1YTLBERacnjgf/8h9d6jGP79mF88snDDBoU4jLS1vTubW2NDvDU0wCYZvNKEAm9o+qoOXnyZC13iIjI4b34InXeQ9RM8jBixFuceupCYNSRtzUPlZ/+FP7+d+yvvY/95p44Ugbx6KM3c/bZf6V//8+pqfmK1NSOH0Y8UbgmIiIh5/Vuo2r5w6wYMpozL3gJgK5dO7nqYtAguOQSqxLkP1NxOgs59dSZlJSMxWarpbJyWcePIc4cdU7F0VBOhYhIfFi1ygjcD2vVxRtvwKRJeAd3o+7d16iqT2HNmovIyNgD9Mbleh0wsdt74nAM6PjxRKmQV3+IiIi0VW7pNDb0fw4zMfj+G51WdTF+PJx4IgWPfw6fnQdAeroV8JimNhkLNS1/iIhIyGX+5RP4wYigxzp1/w2bDW67jdz7wKi3HmpsvqVNxkJNQYWIiITWxx/DunWsso0FGqsuwvaWc8MNZL6XjPPW4Ie1yVjoKKgQEZHQeuEFTOC/X19LeXkW9fUh3ua8vTIy4DvfafKASks7ihI1RUQktIYPZ91HDZzOOlJTa/jqqy6kpBiYpolp1oZuV9L2+OADvBOcFM8HxwkjSE6byVtvPUFWVim9eg3j1FP/QlrayM4fV5To0OZXIiIiQX3xBXz0ESuHncEDD4zlmms+JiXFSow0DCM8AQXAiBE4+o0g7xpwFkwnN3cmr71WyKpVU6mre087l4aIggoREQmdl6yeFNUX1+B05nPxxRH0Zn3LLdjqoOaFR6jyFHHLLR9w/vlLAe1cGipa/hARkZDwerdRd8MV7C7Zxfr7Tbp330NCQm+GD4+QXhBuNxx/PKte9wYe8vfQaOylYVF5aXPqUyEiIp2qoGAg+PbtyjCtJY/6+gjqBZGRAZMnk3vfQjbMMTBtZtDyUu1cevS0/CEiIiGRu+d7GIes+8EaXkVEL4hbbiFzBTh/4gh6WOWlx0ZBhYiIhETms2Wc9oOEoMci5s36vPPgxBPh64O+B1ReGkr6LoqIyLE7cACWLWMdpwFN36Qj7G3GMODGG7FXgr2qC6mpLjZufJRNm1xUV4ehh0aMibD/bRERiUr5+eD18lb9lZSXZ1FeHuaGV4dz3XXWzqWTa3H2WsqYMTO59dZC7rrrBdatm47HUxTuEUYtBRUiInLs3ngDgJc805k2bStebyF9+szE6SwkL28rDkffMA+wiQED4PzzsdWC8dxzDB0KI0caXHDBIr7+Ol89K46BggoRETl2b7zBHnpRsq8/dXVJTJgQAQ2vDsfXttv7yhO+nhUljBmzGFDPimOhPhUiInJsSkth8GCW5HyLnd87yKpV8/jvfyO85bXbDVlZrPpfTeChxl4VBv6qFVDPClCbbhER6Sy+pY+9lyXidOZz5ZVRsHyQkQGXXmptie5LKo3YMtgooqBCRESOmte7jaoPFrN/CPTNex+AAQOiZPngu9+1elb8snvQwxFTBhtF1FFTRESOWkHBQLgWuBbSzH0AGEYEddE8nEmT4LjjoLzc94ANaKChwYbN1hDOkUUtzVSIiMhRy+WXkd9FszVdusCUKVbPioNdSU118eqrVs+K2toIK4ONEgoqRETkqGUua8D5g+DHomL5YOpUq2fF9Uk4T3mbE0+0elY8+ugLbNignhXtpaBCRESO3htv0IBVPhqxXTQP59xzISsL2143xsqVXHEF2GwGffsuwu1Wz4r2iqL/eRERiSj79kFJCZsrT6K8PIstW1wMHhyhXTRbk5AAkycDVs+Krl2LueYa9aw4WupTISIiR+eFF2DyZB7InMecijuYMKELr7xiYJomplkbmU2vgnnnHTj3XFblNz6knhXNqU+FiIh0rHzrXTi/60XU1SUxZkyEd9FszVlnwQknWD0rTGuX1ahLOo0QCipEROTo5OdziARWlw0DYMyYMI/naNlscPXVVs+Kf48L+pSoSDqNAAoqRESk/Xbvhk8/Ze3QYfzm9xNxuYo4/fRwD+oYTJ1q3a5+CwDTtN4eG5NPpS303RIRkfZbtQqAbVcch9OZz3XXLSAhIbxDOiajR8PAgdh3ebE3ZNC1q4s//9nqWWGzRUnSaQRQUCEiIu3i9W6j6sMlVA2B1LxPADjllCivkjAMuPpqq2fFY2M544xCqqutnhWFhRG2dXsEU1AhIiLtUlAwkOJJL1L8GCSnVQJgt1utuYuLR1qtu6ORbwnE9t/XMQ4cYMoUAIP33vuYDz8cq0ZYbaCgQkRE2iU386/R25r7cEaMgMGD4eBBeOUVrrzSyuHs1+8ZNcJqIwUVIiLSLplFGdHdmrs1hhGYrfC+9iRduxYzdaoaYbWHdikVEZH2yW/sEtW4o6e1w2fUmzoV/vAHCm5aBsXLmDHDaoQFUFcXJbuvhpFmKkREpH1WrcJeCZWVmWza5CIhIcpacx/OqadCTo6vEZb1FhlTSzwdTDMVIiLSdjt3whdfsM3IYerUD0lI6ML+/QZ2+4zoas3dGt8SSOa995Lc/2yKv/t2i6c4nYWkpjrDMLjIp5kKERFpu7etN9m3+19HXV0SZ5xh0KVLFLbmPhx/I6z33wfUCKs99B0SEZG28wUV76RMAOCcc8I5mA6Smwsnn4x97yHs9ekkJbl48MFHfUs9MbDE04EUVIiISNutXg3A2+UnAXDuueEcTAeaMsVqhPXQWeTlFbJrl9UIa+NGNcI6HAUVIiLSNuXl8MknlA49jttmXUZOThF5eeEeVAexOl9he30lhsfDVVcBGLz7rhphHY6CChERaZt33gFgw5X9cDrzmTZtAWlpYR5TRznpJOtPbS38979ceaX1cHq6GmEdjoIKERE5Iq93G1XrllI1BGx5WwEYPTrGm0H5Ziu8bzxDnz7FTJxYwvnnqxHW4RimaXZa9w6Px0N6ejr79+8nLWbDWxGR2LNqlRG4b5oGhmEGbv1irhnU+vVw6qmsauz11eScDfx9KyAGz/0b2vr+rZkKERE5otxB/2ix30fjbYw2gzr5ZDXCaicFFSIickSZm/rH5n4fh2MYMGUKmSvA+a/gZS4xe+5HSUGFiIgc2duNnSUbm0DFwVuIL6+C998DGhth+W+lOX1XRETkyN59F3slVO/vxaZNLr78Mob2+zicU06BoUOxl9Vhr0/HZrMaYX3+eRyc+1FQoqaIiBxefT1kZNBQfYCsHlXsrUhm7VoDl8uMjf0+juRXv4L77qNh8mWYi/5D374Gu3ebvPZaLRddFOPn7qNETRERCY1PPoHqajYlj2BvRQpduxqcfnqM7fdxOP5GWK8sI+HgAa64AoYOLWb//ovUBOsbFFSIiMjhFRQA8N6AaQCMGgV2ezgH1MlOOw2GDAGvF155hSuvhIkTnyErK5/du9UEqykFFSIicni+3TrfT7oAgLPOCuNYwsFXBeLNhKpVj+FylTB2rNUEa9cuNcFqKjHcAxARkQhXUIBnKIz83k9Z/ZcHyMsbGe4Rdb7JkykY/wcgHz5ykZ5uNQMzzb0UF7sCT4v1JlhHopkKERFpXWUlbNjA9oldGHbSaiZMWBC7m4gdzvDh5D7eO/4agLWTggoREQnK691G1ZqFVA2BvRdabxfjxy/C4YjD6X7DILP/TfHXAKydtPwhIiJBFRQMhCTgMcD0ApCaGsfT/VOmwAt/9H1hAxpoaLBhszWEc1QRRTMVIiISVG7uQox6K3fA8O0nFtfT/SNGYE/rj70cUusG4fE8yqZNLvbvVxMsP81UiIhIUJm9ppH88+9T/GB1i2NOZyGpqc4wjCqMDAPHhdeQN20exrdHMPSJmfTqNYOGhlpKSpI45ZRwDzD8NFMhIiLBbdwI1VZAEVf7fRzOlCnY6sB49TVSE75mwgSDQYM+ZvPmsWqERdz/dIiISKvefx97JdTv78amTS5eeilO9vs4HJcLBg6Er7+G118PNMLq3j2fsjI1wlJQISIiwRUU4NgHL933MrfeWohhzMTpLCQvbysOR99wjy48mjbCyp/PuHEljBljNcL66is1wlJOhYiIBOdrz/1+5emAwRlnWPt9GEYc7PdxOFOmUPCtPwHL4fPlZGRYWaz19XFcGeOjmQoREWnJ44H16/GSxEfbMgA444zwDilijBxJ7vzj1AgrCAUVIiLS0tq1YJp8mHURdXUGvXrBgAHhHlSEMAwyT7hBjbCCUFAhIiIt+ZY+Co//NoBv6SOM44k0vu3QLdZbaWOFTPzSd0BERFry7Uxa2j+FBx4Yy9ixKpdsZvRo7F2PtxphHcpm2zarEVZVVRxXxtDORE23282SJUsA2LJlC6WlpTz++ONkZGR0xNhERCQcTDMwU5F20ps4nfnU1y8A4nB30tYEGmH9GWPKKLr/biaDB8/glFPeJytrOsOGzSMtLf6+X+2aqZg9ezbjxo1jxowZ3H///fTo0YMpzaaAREQk2nk3vEXVceXsPKknzlEvAJCUpHLJFvyNsF5+hew+XoYPNxgzZhEHDsRvz4p2BRWlpaUsXbo08PXgwYMpKtKUmIhILCkoG0PxfPj84X1kZOwF4NAhq1yyuHiktdGYWIkmffviTa6iavnDTJ/e2LNiz574DMLatfyxfPnyZl+vXbuWcePGhXRAIiISXrmF49jgWoGZ2FgmCY3lkjk5T4VtbBHFZoPJkym4/P8BP2PECDBNK5u1ri4+e1YcdaLm0qVLcbvdPP74460+p6amBo/H0+yPiIhEtszF5SqXbKspU8i9jxY9K5oGYfHUs6LdQYXb7eaxxx6jtLSUKVOmHDZJc+7cuaSnpwf+9OvX71jGKiIiHe3AAVi3Dv9bozYSO4IzzyTzsxMUhPm0+6ckIyODGTNmMGvWLAC6d++O2+0O+tw5c+awf//+wJ8dO3Yc02BFRKSDFRdDfT1fJbooL89i82YX2dnaSKxVNhtcdVXTB4D47VnR5rN2u93Mnj27WQAxbtw43G43K1asCPp3kpKSSEtLa/ZHREQimK8/RXG37zBt2laefLKQ/v21kdhhTZmCvRLslQapKSN48kmrZ0VDQ/wFYW0OKkpLS5k3bx4VFRWBx/wBhvpUiIjECF9/ijWOc6mrS2L0aCvx0DAMbLY430isNWedhaNLH/Kmmjj3/IYePWZy662FPPNM/AVhbQ4qnE4ns2bNIjs7O/DY4sWLcTqdqgAREYkFTZpeFVYMAbSJWJv4lkBsdWA8v5QrrwQwWL/+Y0pKxuLxxE/rhXaVlM6ZM4d58+YFvna73axcuTLkgxIRkTDYtg1276Y2oSsffJ4KwOjRYR5TtJg8GR56CF56ibMeqaV37y6cddYzeDxWI6x46a7ZrqAiIyMjkKApIiIxxjdLsW7YFGo+NejRA048McxjihZnnw1ZWXjN3dSteoQbbzyXM85obISVlXU9YGK398ThiN3tXtsVVIiISAzzJWmu6XUxYM1SaGfSNkpIgKuuomDyw8CdTJoUn42w4rPmRUREWvLNVHyelcQDD4zlwgvjJxcgJNQIS0GFiIgAXi988AEA3U5chtOZz+mnx+emWEftnHPIXJ8V142wFFSIiAje4teoGlTHrlF9GeGyNo50OOJzU6yjlpAAV18d+NI0468RlnIqRESEgrqrYD7ATjJ8uQD19fGTCxAy116L/V9/xV4BSScM589/ncG55z7BsGE74qIRVvyETyIi0qrc5aPjOhcgZEaPxpGWTd414PriZ3TvbjXCeu21F9iwYXrM96xQUCEiImQ+syuucwFCxjDg2mutRlj/eo6pUwEMTHMRbrfVsyKWKagQEYl3O3fCzp2YvvpR7Ux6jK69FrDyVE4bsJKzzy7h7LMbe1bEcp6KcipEROKdr5R0b8/zKS/fwN69/Zg06WbKy5/A642PXICQys2F00+n4P99BKXj+P3v46dnhcJQkTjh8RTx4YfxtQ+BtJEvqChJuZZp07byj38UMnCgdiY9Jtdea/WsqPdvyBYfeSoKKkRiWNNAoqzsmcCargIMaca/iVjCWdqZNFSuuYbMFeC8NfhMRKzmqSioEIkxTQOGL7/8G253Pjt2PMCePY1rujt2/B9udz67dj0c5tFK2NXWQpEVXBbuHQRoZ9KQ6N8fxo4NfOnvWeG/jVWxfXYiccgfSGzf/gBlZU8DsHfvImpr9wBQW7uHvXutAGP37qcoKhrF7t0LYjJpTNrgo4+gpoZD3XtR/GlXQDuThsz112OvBPv+BBITnTz44KNs2uQiMTErZvNUFFSIxACvdxtVVcVUVZUEAol9+xZhNpl59W8M9c0Noqqri9iwYToFBQM7Z7ASWXxLH+tPnsrBgwbp6TBsWJjHFCuuugqHtxt5U+o523yQzZtn8v3vF/LFF7Gbp6KgQiQGFBQMpLh4JMXFrqCBRFsMHPi7mC1zk8Pw7Uz6UW42DzwwlssuK8Kmd4bQSEmBKVOw1YFtwTOBnhX5+R/HbE6TfnREYkD37hMC949mq2rThK1b76a4eKRmLOKNb6bi4NCPcTrzGTcutpszdbobbrBuFy9m6mUHAUhNfSZmG2EpqBCJUk2XPDyeD9v0d7Kybgr6eNNApGmAIrHNu7OIKvsXVA2FE4a+AkDfvrHdnKnTnXMOZGfjTa6i7+cPcPHFJVxwQew2wjJM0+y0zhsej4f09HT2799PWlpaZ72sSExataoxEjBNo0kdfHP9j/8ZFVX5eL07OfXUl1m//jISE7tz8OBnQZ9vt/fmtNNeB0zs9p44HAM6YvgSAYL/DBn4eylA7DVnCot772XV+b8JfBmN3+u2vn9rpkIkSg0c+FusixItA4oGSCozSCyHPuc9gOtGL2c9OYH0ggryzvyC3NzWp11ra62Of1oKiX256y7VJmKdYfp0qxFWHHyvFVSIRKm6ur00/ZTTlOv7cOY1JmdNA8ceE2P9J9j+uQAmTcI28RK6bNqD3Z6Fw5HNNy8DTQMULYXEtsx/e7SJWGcYNIjMQxfExfdaQYVIFGmaR7F79+LA42aDbxq7wffAXXdhrF2LraIKvvwSXnoJbr8dunSBlStxjPoWea9P54zRm3G51rb6etXVH8bcmq/4HDoEaxv/77WJWAfzJ2zS2ACr8XseO2LvjERiWNPS0UOH9gJW5YZh880u2MBuz8R+za0wciR06wZ9+sBll8FDD8HGjYEdFG33zcO46SbrzaUV/s2PtBQSg9avh6+/xn6oG253Fps2uTCMR0lNdWG3x25zprC56irsNV2xl0OyOYy//vVRtm/PAbpQU/NVuEcXMtqlVCSK5OYuZMOGGzDNQ4FlikBTKxIYlvMUvXtPaX2/hoED4dlnYdw4+N734OmnsX+9DfuPMklISMHr3UrjdAc0XV6xcjgkZvj6U5T3vYSrr36KhoYu7N9vkJw8A9Os1Z4fodatG46xU8mb9hTGDWdjt8+kpGQ9Awd+SmXlMnr2vDjcIwwJzVSIRJHMzOsYOvTJoMecrjVkZX2nbW8GN95oLYl07Yrj+VXkPTiKM0ZtOuxSSF3dvqMdtkQiX3+KNVmXUVeXxCmnGKSkaBOxDnX99dT2gOqi57jh2gLGjFkCxFZpqWYqRKJMSckrpKT4lj0MwDSglXLSw7r4Yli2DC68ENu/X4Ff/xpmX+k7aJW6NTQY2HxLK3v2LCIr63pUahoj/DuTmtZGH9rvoxOcdx4FiwAO0JU8HKY1zeivuPKL5NLSI9FMhUgU8Cdo7t1bQn39cgDMeoP+288jOSUXu73X0a2Bn3MOPP64df+++7C/+i52exb+ZQ+bzQy0/VZ+RQwpL4dNmwBYs7s/oJ1JO4XNRu6nV7QoLW28jf7SUs1UiESBpm/iqanWpxtbgsn2/qvha+vxo96gaPp0K2nvT3/CccPPySt8hz2ZG9i48UZf7ob/iY0Xvpycp47utSQyFBYCUD8kh7Uf2gHNVHSWzIv/TPKEFyme3/KY01lIaqqz8wcVQpqpEIkCubkLMQzrM0Cgj0Rg19EQfLqZOxcuughqarBdfwtZPa7G6SwM+tSUlOF07aptLKOab+nj4wvO5N57x3L66UWcdFKYxxQvBgyAs86y7vt+lWOptDR2zkQkhmVmXseOz1YEPRaSxjkJCfDkk3DccfDRR/C73zU52Lymvrq6KCY3QoorvsqPbafvx+nM55prFpCQEOYxxRH7FTdhL4fULYns2vEwmza58Hhio4xXQYVIFNi/v4hDCT8CmjS6CvWvb2YmPPqodX/uXOyf7sJuzyIl5RSKi+ewbVsODb7XjqVs9XjjPVBK1b73qBoCSYPeBmDECP1/dibHt64n7yfH4/zeISbVHs+ddxYyZ84LrF07Peq3Q9eGYiJRYNkLN9LluKfwervSvcsA+uXcyVdfPYHXuwOXa+3R51MEM20aLFoEubk0FBewujA9cCgaN0KS5oJtIvbNDen0/9kJ7rnHmhEcM4ZpmW/Sq9cdXHnlQ5xwwh0MGfKXcI+uBW0oJhLlAi25PcXUJvwXAKPWTu7IhaSmjuSkk5aQl7c1tAEFwN/+BllZ8Nln2P7y9+D5HDG4EVK8yN19c0xXH0SNW27Bm2VQ9WU+N13yOmPGWG33y8qie9ZIMxUiESqs21IvXAjf/S6kpMDGjVSllTWro/dzuYqjPls97txyC1Wrn6D4sZaH9P/ZuaJp1kgzFSJRLqwzBNddZ2WoHzgAP/954OFvboS0cePMqF8Djju+JE3QJmLhllv785ibNdJPkkiEysy8jhzjmaDHOnyrZMOwNiAzDHjuOewffIHdnkVysouHH36UTZtcmGayKkGiTWUlfPop9ko4UJ3Jpk0utm7VJmLhkjnujzj/2D/osWjdDl1BhUgEW/bn9QCBqotO/ZV1Oq1NxwDHHb8j74xSTjttCS7XSB588BGqq1MAVYJEFV9/iqT0E/n+rdu49dZCTjxxJk5nYcfk58jh2WxwzTXWfd8+ftHesyK6Ry8SwzwbXqJ+7H/Zv/84vt5/GkOHhuET5X33QXo6fPQRtiUvUlg4iDPPHMljj42kWzdrgzG1744i770HwPbTL2XnziQSEw1cLm0iFk72K27EXmmQugnqt8z29aw4jg0bfhiVS4sKKkQi1Kcr7mfgqetZ9ebVjMz7gD59wvCJsmfPxpyKe+4hd8hTqgSJZr6goqDHtwAYPhy6dg3jeATHcTnkvXs7zlthzJKPeOCBQt58cyoHDrwXlUuLCipEIkigjHRnPvtP+BiA8eOfJyXlA6qqiqmp2d75nyh/9CPo3Ru2bCHz5YOttu+O1jXguHHoUGDPjwLvcADOPDOM45EA2w/upCYLDmz9Hz++fgXnn78UiM6lRW0oJhJBmi4fdMmw8ii6ppSHd1vkbt3gV7+CO+6A3/4WrnoesCpBDKMhsD36gQOfqhwxkq1bZ1XzpKVR8PlxgIKKiJGdTcFz1t2TmIDp2xLdv7ToFynlpYejmQqRCBKsjDQiSsxmzLA2QvrqK+wLX8Vuz6JLFxcPPvgobncvANzu/PCMTdrGt/RRfvlpTJ16IUOHFimoiCC59t+0KC+NxqVFBRUiESQz8zqcSf8MeiysywtJSXDvvQA47nuMEUNf59RT/84JJ4wKbI1eXv5K1E3VxhVfUPHp2V0YPjyfyy9fQHZ2mMckAZln/xrnEy0bzEF0LS0qqBCJMAeXvgJEYGOi666DE0+E8nLWfDKCkpJRTJ3qIiNjL6AqkEjm9W6j6st8qoaAd8CHAJx//iKqqxUERpSbbrRuv1FeGk1N5iLkaiUiABw4wKrFJuXlWWz7YjhDhkRQY6LERLjrLgByH0rDQFUg0aKgYCDF9+6m+DFITKoEIDlZQWCksZ97GXZPIqmb4OBbN7FpkwuvN7qazCmoEIkgnpfmUfPjz7jnnhco/WItJ5wQYY2JvvMdGDiQzH97cG6+M+hTommqNl7ken4Yc+2gY5HD0Y+8mqc56Tdw1n+XsmTRwxw8GF1N5lT9IRJBtm99in5nbWfsF4u5ZtpZgNWYyDAipDGR3Q6/+AV8//vw9NPwe7A+mzTQ0GDDZmsI8wAlmMz8BJJfJegmYk5noap2IojtyqkU9roO8HAPo6OuEkQzFSJhFuhNseG/lOXuAWDC+Ofo3z9CP5XccAP07Yt9417sh9JITXWxY4e1H0h19XFs3hydnQBjmi9JEyIwV0eaS0ggt+LWqK0E0dbnImHWfPtjaw+vSN3+OOCvf4Uf/YiGoYMw1m+k6qCdvn1Nbrrpdr797b9zwgl3MGTIX8I9SgE4eBDS0vBmHOKtZ3rzxbYBeDw3c+GFT+D17sDlWhsZS2vSqL6eqotOpPiurS0OhWt7em19LhIlmvemwHcb4Z9Kbr4ZjjsO26YvqHnpMQyjmDvv/CCqOwHGrLVr4dAhHPbjmfd/1iZiPXtGWK6ONJeQADNnWvejrBJEQYVImGVmXocz/dmgxyI26TElBX74QwAKet5OcfFIxo5VeWlE8i19HDrzHAoKHIBBXp42EYt09m9di31/Aqmb4KN//5xNm1zU1kZ+JYiCCpFI8L83gChb777tNujaldz7wDATgOhb/40LvqDi40GX8fXX1qazw4aFeUxyRI6u/ck7tJCTfgOXvLWYhx76OwcORH4liKo/RMLNNKlfspqKIZns2duf4cNvxm631rvD3pvicHr2hJtvJvNvfyO5z2iKb3y/xVNUWRBmptm4M2nC2QCccQbYoiBmFbBdcTWFx00DtvMwo6KiEkRBhUi4rVnD2x/kcM20pRyflcjWbQnADEyzNvKnp3/yE3jkEXj/fbgRVF4aYTZvhvJySEqi4Kv+gDYRiyo2G7kNc9jQMBczMfhMYE7OU2EbXjCKV0XCbdEi/sO3qatL4ttXJGAYUbTePWgQTJmCvRLsXztITXVRWqry0ojx7rsAeC7PIe/s8dpELApljv0DzmfPDnosJWU4XbtG1lqWggqRcGpowL32OYY/8BRDhxZx+eXhHtBR+PGPceyDvCmHcPZ5iUsumckddxSyYsVUPJ73IjqpLOb5lj62jU8iJyefCRMWMHp0mMck7fejH1m336gEicSkTQUVIuG0di0fOW2c6nyHSy99hnPPDfeAjsLo0XDWWdi+PkTNU/fTtWsxM2eqvDTcvN5tVO1YSdUQ2Nd/MwDjxy+iSxf9X0Qbe04e9oNdSSmFzS9ewrZtOTQ0WPkVkfb7peZXImHg9W6jrm4fPPQQ7+W+QFL3ag4e7M0557wOmNjtPXE4BoR7mG23ZAlMncqq/MaHGht4GfjXgCGykspiWfOmatb/RcQ3VZNWNXy5ldWbBwW+7uzfLzW/EolgBQUDKS4eSfFZT9MloxoAhyOKeztceSX06+crL7UuKyovDa/c2p9rE7EYYjthILlf3RTx7bsVVIiEQW7uQgz8vR3w3UbWxaFdEhPhttvIXAHOB08M+pSIbeQVozJXd8H5g+DH9H8RnTKnPo7zieAl2sOGPRkR/6cKKkTCIDPzOpwfzAh6LGov+N/7HnTtChs3+R6wLi+NDb2kU61eHbgbVU3VpHU2G/zqbgBMX9KmP4GhouLVMA2qOf2EiYTLm28CMXTB79EDpk+3yksPJJGa6mLNGqu81OtVeWmn8nqhsBB7JdTV9GbTJhdvvPEoqaku7PasyG6qJodlHzySxNpkUrbBu89Oo77eajdVUbE8IhI2o/wqJhKltm7lUME2Ksoz2bTJRWpqjFzw77jDKi+9sgZnxnOMHz+TW28tZNkylZd2qsJCqK3FYT+efz5lbSLWrZs2EYsFDkdfDnX5mq8HwdnXPUdCQj0AdXUVEZGTpY6aIuHw4ou8u+9Crpn2Aiec0IXSUoOo6aJ5OCedBBMnYnvjDbz/nMuJv7iVK64wOPfcxvLSrKzricoKl2jiW/owzz2P1asdAJxzjtVUzTCi+OdLACsna8NnN2ByqEUSLsDAgb8N19A0UyESDp61CznwwGYGDfqYiy4yoquL5pH4GvUUjH+C4uKR3HGHdi/tdL6gYtspF7Nrl5VHO2pUmMckIZOZeR1OV2Grx+vq9nXiaJpTUCHS2dxuynp+QKZzExMmLOBb3wr3gEJs4kQYNkzlpeFSVxfopPlO4gUAOJ2QnBzGMUnH8f1q+ZthQXgbYmn5Q6STBBperVzOrjE2oJ6xYxcxevT1VFXF0HKAzQZ33EHmbbeRXN+H4nt2tniKdi/tQMXF8PXXcNxxfOrZzQMPXE919TxgZLhHJiFkt/fGbs+irm43ADabiWlaJerh3MW03UHFvHnzANiyZQsA8+fPD+2IRGJUYLr/OKsMzAAyMvby6aeRuYXxMZk+HX75S9jpDyi0e2mn8ZeSnnsuXZIW4HTm4/UuQEFFbHE4+pKXt5U9e55n48YbMc1DgZ434dzFtF3LH7Nnz2bWrFnMmjUrEEyMHz++QwYmEmtycxdiGFYcHxMNrw6nWze4+WarvLS6C6mpLl56ySovratTeWlH8n74P6qGwpfnD+P00xcDkJISWftDSGjYbElkZX0HpzN4fkU4et60Oahwu92UlJTgdrsDj82cOZMVK1ZQWlraEWMTiSmZmdfhTHw86LGobXh1OD/4AY5yg7wra3GmPIPTaZWXrl6t8tIOU19PwYx8iufD5uH3BxJk6+uVIBsfwt/zpl2vXFRU1CyAyM7OBmgWaIjIYbz9NhBDDa8OJzsbLrkEWx3UPDmXiy4qxun8AKdTu5d2mHXrrATZCN8fQkLLn1+Rmupi6NDw9rxpc05FRkYGlZWVzR5bsWIF0BhciMjh2V9/H++QVLbuzcHtvpnx45/A690R3Q2vDueOO+Dllym46BlY/wwPPGDtrgjhTSaLWatWkbkCkvueRfH177U4rATZ2OTPrzCMLhiGwfHHh6/nzTFVf8ydO5f58+eTkZER9HhNTQ01NTWBrz0ez7G8nEh027EDx+rPuOv9tXxQ5+K55wyczhhoeHU4F14Iubnk3vcZG+6yYRoNQT89d3YyWczKt/ae9w4/E3ivSWKslSgrsavpNSScTc6Oeu519uzZTJ06lRkzgm+KBFbQkZ6eHvjTr1+/o305kej32mt8RRYf1I3EMAzGjYuhhletMQy4/XZr99L7Tgj6lJjMJwmHQ4fgrbcA+DjlcsrLs/jiCxdDhsRIC3iJCkcVVCxdupTBgwcza9aswz5vzpw57N+/P/Bnx44dRzVIkZjwv/+xgnEAjBgBPXuGeTydZfp0SEuD7dbvv2lq99IOUVICHg9kZLB6+zlMm7aVlSsLOeEE7fkhnafdv9X+PAr/DIXb7W61+iMpKYm0tLRmf0TiUl0dvPkmJUNP5IEHxjJ5chyVU3brBjfdFCgv7dbNxSOPWOWlpqny0pDxLX1wwQW89baNurokzj/fyl+J+RkxiRjtCipKSkooKSnB6XRSWlpKaWkpjz32GD169Oio8YnEhsJCTI+HpIk7cDrzOfPMOCunvO22QHnpyPSFZGZa5aUff6zy0pB5800Aas4dR0GB9dD554dxPBKXDNM025Ry7Xa7GTRoUNDy0Tb+E3g8HtLT09m/f79mLSRueL3bqPvr7/j8H6vYen8V3bvvITGxN6ef/jpxtVvnJZfAq6/inX0DG665nauvNrj//ovo3n0PdntvTjstzr4foVRbC927w9df887TWzj3+mx694bdu2nSZVHk6LX1/bvNQUVnDkoklqxa1XhVN03DV/1gENgJiDgpp3zjDZg0iVX5jQ/F9fcjlN55B849F3r14i93vUp9w2w+/3wef/+7WnNLaLT1/VuZUiIdLLfvI2pGBDB+vHYv7Si+pQ/GjKG2ztrvY+xYLSlJ51NQIdLBMou7c/oPgv+qxVU5pc3WWF76h+Dl5XH1/Qghb/FrVA2FyolDGDLE2u+jVy91LJXOp63PRTraG2+wgWHAZ2pGdP31cNddsM16kzNNG4bRELiVo3DwIAU/9m8odR/pvo6lpqmOpdL5NFMh0pFME5Yt4/3K8ykvz2LPnvD35g+r1FS48UZfeWkSiYkuHnzwUbZtywG6UFPzVbhHGH3eecdaUqq3vtSSkoSTggqRjvTpp/Dllyyr+DbTpm1l9+5C+vSJ82ZEt92GYx/kXVXDOcf/i337ZlJSMhaopbJyWbhHF32WLbOWlF6/NOhhLSlJZ1JQIdKRli+njkTeNs6jri6JMWPUjIihQ2HSJGq7Q/Wzv+b220sYM2YJoJ1Lj8oyKxCrdZ0FxMkOuBKxVFIq0pEuvZT3Nn1BwczeLFo0j4KCkdh0rYfXX2dV128FvvSXljaWmFqUB3AEX30FffqAYZD/4noqDl2I292PSZNuZvduawdcl2ttfM6ISUippFQk3Orq4K232DbRgdOZz7XXLlBA4TdxIrn/yGxRatt4qzyANlm+3Lp1uVi59iSmTdvKm29qvw8JH13iRDqA17uNqsIFVGVVkTZmCwCnnKKp/QCbjcxRc3D+IPhh5QG0kW/pgwkTePNNqKtL4sILtcQm4aOSUpEOUFAw0LrzGCSb+wFITFSJXzM33ABPzgEOgmmAYTYpuZUjamgIzFR4zr6INfdbD194YRjHJHFPMxUiHSA3dyFGvf8To0r8gkpPxz7+auzlkFqWzr591s6l1dXaubRN1q2DPXsgJYV3DiUyb95Yxo4tYoC2TZEwUlAh0gEyu30b5x0JQY9par+R4+ZfkDcNnNe6uShnArNnF7JihXYubRP/0seYMeze9y+cznyuvlrfMwkvBRUiHeGdd6DOykJUid9h5ORgGzOBmt7Q8OI93H77B5x//lJA5aVH4i34D1VDoeryk+jVy2rNPWSIvmcSXsqpEOkIK1diVtqpqOjBnj39GTXqZsAq8Yu7LppH8sMfUtBtGbCQ8SzE9LWZrqtTDkqrqqspuON93xfz6Ob7ntls+p5JeCmoEOkIK1fy4b4zueaa5fTo0YWvvjKAGZhmrTLyv+mii8j9bi823LQXMzF4DkpOzlNhG15EWrmS3L/Bhl+AmaDvmUQOzceKhFp5OXzwAfmMoa4uiQsuMDAMlfi1KiFB5aXt9corVmvut64JeljfMwkXBRUioZafD6ZJfvLFAIwZE+bxRIMbbwSHw7rvm8pvzEWRZhoa4NVXrbvnnOt7SHk7Ehn0EygSaitXsndoF779+9kMHVqkoKItMjKwj5scKC/95BOrvPTrr1Ve2sIHH1jtubt1Y1O3SZSXZ/H55y6ys+N491uJGAoqREJt5UrWT+zF8BGruOKKBQwZEu4BRQfHLXOs8tLr9nPJ8Iu57bZCli1TeWkLr7xi3U6YwLL8bKZN28rrrxfSv79ac0v4KagQCRGvdxtVm1+lis3UjXEDcP75i6iuVolfm5x0ErbzLqSml0mPVb/kpptUXhqUP6i45BJef91qzT1pklpzS2TQLqUiIbJqlRG4b5pgGGjXzfZ66SVWpX878GXj98/AX9kAcfx99O9KCny57g2WrPgjjz46j2XLRqqTpnQo7VIq0slycxdiNPg/MeK7VWvudrnkEnLn92yxe6lanPu89pp1O3o063a+wogR+VxzzQIFFBIxFFSIhEhm72tx/rJH0GMq8WujhAQyz/yVyktb4X1ridVFc6oTsLponnWWloYkcqj5lUiofPqp1aMCmuy2aQO062a73HILLLgH8Gj30qYOHKDgpmVwE8CjOHylt126qIumRA7NVIiEysqV2CuhqrInmza52LdPJX5HJSUF+xU3WuWlO7pSVPQImza5OHgwzstL//c/cu8Do976snFpTUtDEjk0UyESKitXcmhfClOnbeNgTVe++MJgwAC15j4ajhm/JG/wfIyqr3E8MYJTbi7kzjtv5/LL/05Z2QLS0kaGe4idb+lSMldA8nnfpfjcliW2TmchqanOMAxMpJFmKkRC4dAhWLWKdzmbgzXJDBxoMHCgSvyOWq9e2KbfTE0m9H/rTqZN+4Dzzovj8lKvt7GUdNyFgLpoSmTSTIVIKBQVgcdDftJFUAMXXBDuAcWAn/yEgskPAwXMwBXfu5cuWwbV1dC3L3t7jaH8oyz27u3H2LE3U1Wl3W8lcijEFQmFlSvxDIUT/7RIrblDJTub3JVnqrwU4IUXrNurruK1//Vn2rSt/OtfhQwdqi6aElkUVIiEwsqV7Jho58RTC5kwYYGCihDJvPLvKi+trYWXXgLAc+XJdOs2lkGDPubyy9VFUyKPggqRY+D1bqNq77tUlb3NnjHWRX78+EVkZMTZmn9HGTECzjjDuu+bqPDnEmzcODM+KkHefBP274fMTLZ3X8fAgflMmLCAyy4L98BEWlJOhcgxKCgYaN15mMCbXmpqHK75dyD7DXdiL5+Go9zgbfdfcKQtIDv7E6qri+KiEsT7+pPUDQWuOpeyPUtISLAC16ys66mqMrHbe+JwqKWmRAbt/SFyDMrKnmXDJ9MxgzRmMoxEcnKeio8p+o5kmjSc6aJm2wdsm/ETvr3kWubOvYju3fdit/fmtNNeB2L0zdXrZVVB18CX/r1QtKeMdLa2vn8rqBA5RlXfPoniOz9r8bjLVay+AaHy/POs6nV14Mu42Wjs3/+m7JGr2PALMBNaHlbgKp1FG4qJdIbKSvjMCijUN6ADXXkluf/IjL9KkIULyVwBzoLpQQ/HTbKqRA1d/USORX4+9grwVqazaZOLNWvUmrtDJCSQOfFP8VUJUlEBr75q3f/WtwAFrhL59JMpcixWrsSxD371gyJuvbWQ/v3VN6DDTJsG/azvqb8RVkxXgixdapWTnnYahwaeTUVFFps2uUhKUuAqkUvVHyLHYuVKKuhOSdlgwOCCC6y+AYahvgEhl5iI/cafYC//CUmVCfxj7V8ZPuJJhgyJ0UqQZ58FwDPjfNasmc7dd7+A15vH558bgPaUkcikmQqRo7VzJ2zcyGrjAkzTIDcXsrLCPajY5rjmh+T9MpuTf3WIMV1qefDBR6iuTgFibE+Qbdtg9WowDMrOqiYpKZ+xYxczdaqBYajhlUQuzVSIHC1fa+7aH25k6ENFjBkTQ5+SI1ViIrZf3E3hwBvJ5E4eu4qY3BPE+/xDVm8Kl4uyA1ZexdixixgwQL0pJLIpqBA5WitXUjYRep/yKRMmLOC88xRUdIrvfIfcm+5iw/SvMBODV4Lk5DwVtuEds/p6CkY+ACMBijDrrNmJjIy9eDwuioutp0Vz0CSxS8sfIu3k9W6jylNE1ZbX2e1rzT127CLOOCNGpt4jXWJibO8JsmwZuffRony28TZGy2clJmimQqSdAq257wPTtNovZWTspbQ0Nqbeo8Lll8OzpwHrMBsMDJtJQ4OBzWZy4MCn0d107NFHyVwByWdfS/EF/2px2OksjO7zk5immQqRdsrNXYhhWr86hjVRoU+Rnc0wsN/xa+zl0G2TwePzH8bt7gWA250f5sEdgx074JVXrPtXXQmoN4VEF/2UirRTZuZ1OJ87L+ixqJ96jyKOc69kxH/HMOzPDZxdWRYI8MrLX4neKpB//AMaGuCCC0g88Qz277d6U5SXqzeFRAft/SHSXvX1VI3KoPjBahoabNhsDVjxeYP2++hkq1YZgftRv9nWoUMwYADs2gWLFrEqczAffPAznnnmT7zzziiSk031ppCw0d4fIh2luBj7jmrqKxxs2uRiyRJ9igyX3NyFGA3+pagoT2h86SUroOjVC664gk8/fYYRI95i5syFpKSoN4VEBwUVR+DxFPHhh2PxeIpavS9xxtea+9+zF3PrrYUkJak1d7hkZl6Hc9jKoMdSUobTteuwTh7RUTJNmDcPbyZU/fRSdlesp0+fxQDk5MRQUy+Jear+OIKysmdwu/MpK1sAmK3ch9LSWWRnz4utNsES3IoVAKz0nItac0cA/1SsbxXKvyQVVa2733oL1qyhIB/gn7Dhn6Sn+5d2Yqepl8Q+zVQEsXfvyxQVjWL37oWUlVklXTt2LGDr1qd995/myy+tadWysmfZseP/cLvz2bXr4bCNWTrJ11/DO++wi+PZuKc7hgHnBc/ZlE5it/fGbs8kZVcSfRfC7p0n0tBgvSFHTevu++8HILfgQgzD+qwXN9u7S0xRomYQTZO/2qtbt5H07XsHGRnnqY1uLHrjDZg0ied63Ma1FX/D6STQ4VDCp6GhhtWrHYGvG5M1DfxvyhChn/I/+giGDwebDTZvZuUXbhISXC2epiRgCSclarZT09mJhoajD3iqq4vYsGF6Y4MkiS2+pY/8nlMAGDMmnIMRP5styZe0aX0giKpP+b5ZCqZMwdOzgj17ZgJgmupPIdFHP60+n3xymS8g+C6GUXXM/97Agb+L/ClXab/ly/EMhRE/vIehQ4u44IJwD0j8MjOvwzl4WdBjEZu0uWULLFli3Z89m40bn+H444vwepPp0sXF0KGqLJLoEteJml7vNurq9gEGNlsaDQ0egGY17kfDNGHr1rvZuvVuIEKnXKX9ysrgo4/Y8sMUck9ZzcSJCzj33ChIAownPXrAF0RP0uavfoW3Zz11F50FJ5qUv7sYhwPq61M49dRHMAzo3n0iSUnHq5xUokJcBxVNlyhMs7Hl8jf16XM7X301H9M06dfvZ+zc+QCmadKnz/fZtetvLZ7f9N/p3n1CiEct4eD1bqNu9UIYAhUXmiQA48cvwmbTVtSRxJ+02WWbB/ubiXx4dl/6D9iAzWayZ88isrKuByLk/6u4GBYt8lV8vAfFLpKSrItHcvI+SkoaAyB9MJFoEdeJmmVlz/LZZ9+laSJXMC5XMcnJJwGQkOCgvt4LQF3dPoqLR5GY2J2DBz8L+nft9t6cdtrrRMyFTI5K886NVuAYtZ0bY1xUJG2aJowfDytXUvbrs9kwphDTPNTiaf5t3NX6XcKtre/fcTlT4V/2SE7Opa4uA7u9ssVz+ve/i8qyV/BWlWL/1f+R0LU/HHccnHoqCWPHQpcuJCT0JS9vK9XV65t9qmiqtlY15rEgN2cBGz75LmZC8E3EcnKeCt/gpBl/0uaGT6djGg0tkjYBBg78bXgG57d8OaxcCV26kHnDQhqSKti4sWXFh3YklWgTl0FF02WPRN934JvLH73uzWfQgnWYiWCre675P5CRYW29PHMmtrw8unTJxG7PIiEhGa93K9aCrqXpJ1kthUSvzEoXybdC8WMtj+nCH3kyM68j2TGU4g9GBz1u5VKFSX09zJ5t3b/tNjw99rF21a2kpdFiLxmRaBOX1R/WpxR/6RnNbjEhoRrsr72PYYLtrPPhnnvgzjvh2mshKwvcbnj6aTjrLLj8chyfu8nL28oZZ3yOy7W21detrv4wOhrxSEvLlwfuaivqKGFLsG59783+hlgQ5qZYDz0EH34I6elw111s2/YMaWlWxUd9vSo+JLrF5UxFXd1eWsujcM6EbtsNbN+7zQokBg9u/oT6enj3XXjySXjmGfjvf+Hll7H95Ccwd+4RX1dLIVFq+XLslXDA05MduwaRkHAzTucTeL07dOGPUFbSZhZ1dbsBsNnMwIxk2H4Xt261Kj4yoe7+2yBpO7t2LSYpCerqUjjzzEdISFDFh0SvuEnUbFo+unbtRdhse3xHfMlbvhlH119OJvWeBTBixJH/0Q0b4Fe/ghdesL4++2y8C/9M8ZeXBV0KaWrgwN8ycODdx3xe0gnq6qBHDxqqD9DnuCrKypN5912DvDxtRR3pGhpq2LPneTZ+Oh3TFvxS12m/i6YJF10Eb7zBqvymD0f5lu0SF9RR8xsKCgZSXDyS4mIXhrEXsH7HAzMWNrB7k7E/+2rbAgqAnBxYuhT+/W9rU6N338Ux6lvk8dwRl0LCuqYr7VNYCNXVfJRxAWXlKaSmGowapa2oo4HNlkRW1ndwjiho9Tmd9rv4r39Zbd6TkhjY7Q4al2CjfMt2kSbiJqjIzV3YYqOeQD7FIcjZdh15EypwJB9FyecVV0BJiRWM7NuHbdKlGO+91+QJ1gtFzJqutI8vn2LFgJsAOP98sNvDOSBpN39GdrjyKzZvhttvt+7ffTd16SatLsE6C1VCKlErbnIqMjOvY9++XPbuDVK2VXkfqdffdWwvMHgwvPMOXHopvPkmTJqE/fVnIm9NV9rPF1R80qc7D0wfS0rKPCDCOjPKYYU1v6KqCr79bbxJbuquOhVuHceezy4LHG6tj4ZINIqbmQqPp4iPP7Y26gl8SvGnO0yaFJoXSU6Gl1+GsWOhuhrHRdPJc7xITs6CJrMk/idHUM28tG7/flizhhq60GPY/3A68zn99AXhHpW0k8Nh9ZTJyVmAgVUV0im/iw0NMH06fPopBYug+PaPKV53JrW1jUuwTftoqOJDol3cBBUffvgMWVlW2ZZ9k4OhD0Jq9fGh/yX2BxZjxkB1NbZvTyGLcTidha3+FeVXRC7vqsVUDa7n/bMv5NzzrI2f6uu1dBWNAvkVrjWtPiekv4umCXPmwH/+A126MDBxBi3zKPDdJpKTs4C8vK04HH1DNwaRThbTyx/+ig/TNPB4FtOtG5jeRE5/0INx0SS6j/s7SY4+oU+2S062kjfPPBM2boQrr4SXH/QdtKY4GxoMbL5s9Ijbk0ACCtJnwnyA18kwrXcALV3FCN+KQ4f8LjY0wI9/DH/9q/X1/PnUZZbAl63nUaiBmsSCdpWUut1ulixZwvPPP8/yJs2A2qqzS0qb79fgW7f0L136dOgbwubNMHo0uN14b51M8XXvBNZ0rTH5P6lE0J4E0kzZd7LYcEMZZpDwW/syRCevdyfFxaNa+V1s7qh+F+vrYcYM+Oc/rX4Uf5gNU65m3bqLqKuzStkbAxnrd9/lKlZQIREt5CWlJSUlLFmyBLfbTUVFRUgG2dGCVXzQZLqxw8u2hgyBJUsgIQHHI0vJW3/XYfMrVEoWYbZtI/PZMobd3i3oYWXpR6dm+RUtfhf9jKP7Xdy2DcaNg3/+E2w2K48i+36Ki13N8ihsNuVRSGxqc1DhdDqZMWMG2dnZHTmekOradRimOTzosU57Qxg/Hv74RwBsP/o5WV+d3Gp+RUrKcLp2HdbxY5K2WbYMgJL+3wbUnjuWBPIrWvldTPQmYm7dSlHRKPbte/nI/6BpWq37TzsNVq2ClBR4/vlm+/0oj0LiQUxfHcvKngGKADD9FR9mkDnOjvbTn1qlpjU1MGUKVFf5Dljffv+bVXV1EWVlqiyIGK+/DsBqYwrl5VlUVmpfhljnXwyuS6pjw8FfUV1dxPr1l+HZsYIPPxyLx1PU/C94vdasxKmnwg03gMfD3huHUfTGIHaf9TXV1R+2+lpOZyFZWd9RAzWJKR2aqFlTU0NNTU3ga4/H05EvBzRvx71r12LAWr+0b0vkhD0DKB+bTG3tns59QzAM61PMiBGwZQv2u/6E/ZYsunTpzc6dF+N2v8SAAZ9hs5lK2owUtbWwYgUAr24Zx4PTtvKvf3WhTx+D44+fofbcMcJu701CQjr19fuBIBsMAgkHYMfj43GPhV1PXwFfjqT0tPfJfr4HaSt2QHU1AJ7hXSn99fG4MzZCHWzY8F1fLldnn5VI+HRoUDF37lzuvffejnyJFppua+7/hTYMk/pBdWwf9DkcgPPO83b+G0L37lZ+xTnn4HjqVfImPsPqrOl0776O7t2tsYIqCyLGe+9BVRW7jzuZ9VuSMQyrShis9tyGoYAiFjgcfTn77DKqq9dTUhK8odmhZNg71rq/+9Sd1O/ZibsP7BpaBp9C6R1JnNBlGntG7cftfhHDsGOadUDTHhR+NhyOAdTXH9RMl8SkDl3+mDNnDvv37w/82bFjR0e+HHCEdty+RMiwfcIcPRrutjYust3+Y3L7PNwykVRJm5HBt/TxZu5tAAwfDscdF8bxSIex2ZIwAtMJLacVms00mLD3Quvu7otg6z/Px31yDZ8MeYq97hcBaGioa/W1XK61nHHGFuVRSMzq0KAiKSmJtLS0Zn86Wmbmda0mX0VEtv4vfmG9Q5WXk3nXm62OddiwJ8M/1njmCyo+6Hc8DzwwlquuKjrCX5Bo5m/jnZo6kv79D9Oy/xsxR0XdW4H7/nyM4MsdjQ9qIzqJZTGZqHnokHUbyNaPpBUEux2efNLa4OiFF2CFv9+HNVb/hami4tXwjE9g5074+GNMw4Z94EqcznzOOEMJtLHMX2bqdBbSq9dVvkfbd3lsLXeif/+7SE0dqeReiQvtDioivUeFx1PE2rW3s3//cZRuOpXsh1NITR4RWb/Qw4fDL38JgP0X95NoO47k5By2b7+L+nprOaSiYrlaQYeJd8WzVA2Fzy4ew6hRVmtuh0OtuWOdfxmkcdbCdfhZizbq1esqnM5CLXlIXGhzR83S0lKWLl3K4sWLKSkpYdasWYwaNYrJkye3+cU6o6Pm5s138OWXD/Hiiz8g/a+9+e0vvJh/+EPkZevX1sKoUbBuHavyGx/2d/5s3LnQooTNzhO0E6u6nsaVhoYaDKMLNTVfUlw8iqSkE+jRYyLbt/+hzf9GcvJJ1NVV4HKtVTAhUa+t79/tatPdWYNqr6ZlpCUlF2Gae6is7MWpv7ST+do/sWflRGZpZkkJjB5N2Zh6NtyVgGnUt3iKWkF3stpayq5KZ8OPvGrNLUD7Aox+/WZRWfkmNTU7cTrXkJTUO7I+zIgcpbgKKoJ9svxmL/+I/WR5993w+99TdUZ3iv9Y2eJwt24jGTr0EdLSgpe7SYjl58PYsVSM6sm6eS13rNQeDfEtWIDRp8/32LXrcbzenYwcWURS0gmRNzMqcoxCvvdHJGtLGWnE+tWv4OSTocIfUKjLZli9bLVk/ijnGkCtuaU5f96FP7HT5VpLnz4zcbnWctZZ23A4+qq6Q+JaTFwpI76M9HCSkuCf/8TuBns5pDCI/v3nUFGRQ4OvtfiePUoS7BSmGQgqVjVMprw8i/JyteaW4Jr2t1AgIWKJieUPgKqqEoqLXTQ02LDZGoi6LYVnzKDhqcdZvazxISVtdrKNGyEnB7p04fShX/PZxkM8/XQXpk0zME1TU9oiErfiavkDYN++3pSXZ7Jpk4vuL46KvrrwP/wBW2oPcu8Dw7T+WxqXctRls1P4Zil2nXkl69YncOhQEuPH65OoiEhbdejeH53F4yniow9+yj33/Jten9bw/f/VYjonRNcny5494b77yLz1VpLLkyl+sLrFU7Q1egfzBRX5w8/hgcvH8s478+jZUwmyIiJtFRMzFWVlz5DRYzVjxy7mxn5vwoQJ0fnJ8nvfA6czsOuhkjY7UUUFvPsuAOUD1uN05nPFFfpei4i0R9QGFV7vNqqqiqmqKuHLnYsAuHDsc0z6QwpV1SXRmdCYkAB/+xv2Sn/SZjaZmXPYuVNJmx3Nu2wBVYPrcU8cQv/sfwPQr5++1yIi7RG1iZrNe1NYJaRR05viSG66iYaFTwZN2lRnx46hLpoiIq2L+UTN5r0p+MZtlCc0/vGP2JLTgyZtamv0DlBXR+6DXTF8G9Hpey0icnSiNqiI6t4UR9K7N/zud2SuAOfPU4I+RUmbIbRqFZkvH8T5y4ygh6P+50lEpJNEbVDRXAx2Pbz1VjjtNPBU+R5Q0maHefFFAHaNuhxQF00RkaMV1VfNplsUx1zXw8REePBBK2mzAlISh1BVNYdt25S0GVINDfCf/wCwKum7lJdnsWtXDP48iYh0gqhN1PTzb/BjGDHa9fDii2lY/pqSNjtKQQHk5UFqKhfluVmZX8fvf9+FWbNi9OdJROQoxHyipl/M99+fNw9bvc1K2iQBUCJhSPmWPsquO5eJF41j0KCPufTSGP55EhHpQFEfVMS8k0+Gm2+2kjb/khP0KUokPEqmGQgqPhzdleHD85kyZQE5wb/NIiJyBAoqosG990JKCnzyie+B5kmbGzfOxOMpCtPgopd3/UqqjM1UnZRIQ9ZqAM45ZxHV1cpTERE5GgoqosHxx8Mvf2klbbptpHY9nWXLHmXTJhd1dcmqBDlKBeXjKZ4PxQ8fwuHYB0BS0l6Ki10UF4+koGBgeAcoIhJlFFREi5/8BEf3IeRd3cBJLw3nyitH8uCDj1BdbfWxUCVI++U+O7BFwyvtCCsicvSivvojrrzxBkyaxKr8xodUCXKUPv8chgyhapiN4kcbWhx2uYpJTXWGYWAiIpEnbqo/4srEiXDFFVYlSL2/QkGVIEdl8WIAzFGjATW8EhEJBV1Bo82f/0zmu11x3hp8JkLtu9tokbWzbenJN1NensXmzS4GDFDDKxGRY6GgItoMGAB33dXkAbXvbrdPPoH168Fu5z+V1zFt2lZef72QQYNm4nQWkpe3FYejb7hHKSISdRRURKOf/Qx7+gDs5ZCyvwebN6t9d7v4lj48159Jn8EXM2jQx1x5pRpeiYgcq8RwD0COgsOB496/k/fti1m9bB9D0ucCVtImQF2dVRbpp6TNJkwzEFRsGp/G0N6vMnHiAi65ZGSYByYiEv00UxGtvvUtbBddpqTNdvKWvEYVm6g6uQuVGWsAmDhxEXa7ZnZERI6VSkqjWWkpnHQSVf1rKH6s5eGcnAVkZX2n88cVwVatMgL3/eW4jWW5Fs3siIg0p5LSeJCdDT/5SeBL07T5bq2vKypeDceoIld9PbmP9FDDKxGRDqKgItrNmYM9sSeJldD1YC9efvku6uutVJmKiuVK2GzqzTfJXFKB8xepQQ9rYzYRkWOjoCLapabi+Nn9HOoO3uQyLr30DyQk1ANw6FCF9rFo6qmnrNuJkwA1vBIRCTVdTWPB9deTu3BAi2l9JWw2sX9/YJvzrcO/F2h4dcIJanglIhIqKimNBQkJZH73aZK/d0HQhE2ns1D7WDz/PBw8iGfiQDYnzOWeu18gOzuPmTMNTHMGplmr/hQiIsdIMxWx4vzz4cKx1n3f/lj+6f2NG2fi8RSFaWAR4umnASi7oQ89euQzduxipk1TwysRkVBSUBFD7D/+LfYKSN0ESdt/yqZNLg4eTI771t3eDW9RtecdqoYZ7MrcBMDYsYu48EIlsYqIhJL6VMSYhjk/o+bJBzh0Sj/uPmkp54+5hO7d92K39+a0014HTOz2njgcA8I91E6j3hQiIsemre/fCipijcfDqpL0wJdx/yZaU0PZtb3ZcKsHM0gGkWEkkpPzlEpJRUQOQ82v4lVaGrl7Z6jBk98LL5D5bw/OX/cMeli9KUREQkdBRQzKnPwozgV5QY+lpAyna9dhnTyiMPr7363bq64C1JtCRKQj6coaiwwD7v6Vdf8blSBxlbT50Ufw7ruQmIhn7EwqKrLYtMlFSop6U4iIdAT1qYhR9r6nYd+aRpetHrqvtbPm3BPJ6rMBm81kz55FZGVdT8wnbT78sHV75ZW8XFDPwYPDePvtP/H9749SbwoRkQ6goCJGORx9yRtTxuq3u3LgxDr68BmmaVVB1NXtpbjYFXhuTCZtut3w7LMAmD+4jd35z3DBBW8xYMBCYBSGYWAYCihEREJJyx8xzJbgILfX/2u1fTfAwIG/7fyBdYYnnsCb+jVVFw1mTVIKp5++GIDevRdpkzURkQ6imYoYl3nyj0gu2kvxgPuCHq+r29fJI+oEtbXw//4fBYsAtoB3JBkZ1izNoUNxMEsjIhImmqmIB1dcYd0GkjYbm0Ht2RODn9wXLYKdO8n9Wzr+uFmbrImIdDzNVMQBe5dM7LZe1LEXAJvNxDStIpGYy68wTfi//wMg0zWbFz6dyEknuVo8TZusiYiEnmYq4oDD0Ze8c3aQs+uGJvkV/qMx9sn9jTfg44+hWzfc3zmDrl1nAmCa6k8hItLRdIWNEzZbElnXPIHzyVFBj8dMU6w//cm6/d73WPPZfxg0qAivN5lu3VwMHar+FCIiHUnLH/HEZoO774bdl1n5FTarKZbN1hBoipWWNjLcozx6a9bg/eRN6nJscOuF1G26iS5dwDBSGDbsEQwDunefSFLS8epPISLSARRUxBn7wBHYd6TRZZuHojUTceTtYMCAz2KjKdbdd/sqPhrgy0tITrbWeJKS9lFS0hgsRX3eiIhIhFJQEWespli7Wf1OMrknvgEQG02xVq+GZcvIxcaGOTZMDgWt+MjJeSpsQxQRiXXKqYhDtsSu5B73YOw0xTJN+JW110lm9gz6nFAY9GnakVREpGMpqIhTmaf+GOfWWa0ej6qmWCtWwNtvQ1ISnp9ewscfWxUf2pFURKRz6Wobz6ZMsW6juSlWk1kKbr2VLw68QY8eVsWHYajiQ0SkMymnIo7Zk7Kw23pShzUrEZVNsRYswLttDXWnOeBH32L3xu+QlASHDqWQl/cINpsqPkREOothmmanvVt4PB7S09PZv38/aWlpnfWychgNDTXsWXk3G40/YQYNMRPIzX06MnMR3G4YNoxVi/cEHjJNA8MwA7d+ERsUiYhEgba+f2v5I87ZbElkjZ+Hc8ecoMd7954SmQEFwK9/DXv2kPvP4zGM5nt8NN7GSKdQEZEooKBCLJMnA2D68iv881cVFW9EZm7FunXwt78B0HX6L0lMHB70aar4EBHpPMqpEIBAEqPxjTDz0CF35OVW1NXBzJnQ0ACTJ1M2YCN1XxYBjR1CrXi5IazDFBGJN5qpEMBqijVs2JNNlhH8RyKwd8U99+D9ooCqESlUzb2RL79cDFjVK4mJOfTvP4eUlFNU8SEi0sk0UyEBxx9/A926ndZsZqKpiOhd8frr8Mc/UpAPcAB2XuxLyrTyKBoaPmX79k8BOO88ryo+REQ6kWYq5LAiqnfFzp0wfToAuYXjgiRn4ru1kjMVUIiIdC7NVEgzdntv7PYs6up2AxHUu+Lrr+Gqq2DfPnA66Xrzb+iy+U5qaopaPNXpLCQ11dl5YxMREUAzFfINDkdf8vK2kpOzIHLyK+rr4brrYM0a6NEDliyhrHJxIKBQO24Rkcigq7C0YLMlkZX1HZzO4BtzQSfnV/z0p/Cf/+Dt14Wql+ZR1Xs/27c3Jmfa7UrOFBGJBFr+kDYwAJOGBgObzZqt2FP2HFlZ1wMmdntPHI4BHfPSc+fCX/4CQMEztXDoFigGm82aPjEMk/p6JWeKiEQCBRXSqsPlV9R2dH6FaVodM3/3O+vrefPo3n0FlZXLgODJmTk5TymgEBEJo3YHFfPmzSMjIwMAt9vNrFmtb58t0c2fX7Fnz/Ns3HgjpnmoyZt44/O6p10Y2hc2TZg9G/70J7yZUPfL2+GGC6le93+t/hUlZ4qIhF+7cirmzZsHwIwZM5gxYwZOp5OZM2d2yMAkMrQlv6J6+5vsXjCdorUj2bfv5WN7wX374PLL4U9/AqBgERSf+jeKi13U1u45wl8WEZFwatcupd27d+eLL74IzFQAGIZBW/8J7VIavaqqSlptiuVfEvG74Lx6sLUzB9g0YflyuOEG+Oor6NIFHnmEsouT+Oyz79K08qSRDYdjAPX1B3G51uJw9G3fa4qISJuEfJfS0tJS3G53s4DCb8WKFUc1SIke/vwKhyObb/7YNA0oEg7A7lsGULR8MPvK/nPkf3jbNmtW4tRTYeJEK6DIyWHvO/MoOu0RTNPk0KGMoH/V5VrLGWdsIS9vqwIKEZEI0OacitLS0qCPZ2Rk4Ha7QzUeiVD+/ArD6EJ19QetzlrUJ8OG6TsBWP/ZFWS8n0124g9ISx8NdrsVRHzxBXz0Ebz7LuzY0fiXk5Lglltg3jw+WZMCwIYN3yUhwTr8zRkRsGbKDEPJmSIikeCYqz969OhBRUVF0GM1NTXU1NQEvvZ4PMf6chJGLSsrrFLTFg/579aBO6OUnSt/Ru+VsHMyZM+3jpXOhOwSINdG6c9SyU74ATUXnMq2vQ/S1/NvDCMN07R+XoIlhyYkpKsfhYhIhDnmoKK1gAJg7ty53Hvvvcf6EhJhGpdC+tG9+3i2b/9D0Oc1JFoxxp4LrT8Au2b0JsHRHffJGyl79ArIzMS951HKTjjAl19cC1izE/5NwoJxOovo1u0UlY+KiESYNidqlpaWMnjw4BZJmYZhsHz5csaNG9fi7wSbqejXr58SNWNAQ0PNN5ZCgsxa+Hxz2SIhIY36eg8JCemAGbhfV1eHzfb1YV7Veg2Xq1jloyIinSjkiZrZ2dlkZGQEza0IFlAAJCUlkZaW1uyPxAabLQnDMAKzFqmpI+nf/66gz/3mjEN9vcd3u7/Z/cMFFP3730Vq6ki14RYRiWDtqvubM2dOs0qPpUuXMmPGjJAPSqKHP4HT6SykV6+rfI+2sm5xDHr1ugqns1CVHiIiEaxdQcWsWbNwu90sXbqUpUuXsnbtWubPn99RY5Mo8c1Zi+Tk3JD8u9bsxKjA7IRhGMqjEBGJYO1qfnWs1Pwq9jU01FBbu4fi4tE4HH1JTx/Lzp3z2vmvNOZOdOs2AtOsVTAhIhJGIc+pEGkLmy0Jh6Ofb0lkDX37/hC7PYtu3VwMGjQXw7ADifTr94ug9wcNmtssd0KzEyIi0UMzFdLh/JUihmFQX+8FICHB0ep90zQ1OyEiEkHa+v6trc+lwzUNDhISHEe8ry6ZIiLRScsfIiIiEhIKKkRERCQkFFSIiIhISCioEBERkZBQUCEiIiIhoaBCREREQkJBhYiIiISEggoREREJCQUVIiIiEhIKKkRERCQkFFSIiIhISHTq3h/+vcs8Hk9nvqyIiIgcA//79pH2IO3UoKKqqgqAfv36debLioiISAhUVVWRnp7e6vFO3fq8oaGBXbt2kZqaimEYIft3PR4P/fr1Y8eOHTG7pbrOMTbE+jnG+vmBzjEWxPr5QejP0TRNqqqq6NOnDzZb65kTnTpTYbPZ6Nu3b4f9+2lpaTH7A+Knc4wNsX6OsX5+oHOMBbF+fhDaczzcDIWfEjVFREQkJBRUiIiISEjERFCRlJTEr3/9a5KSksI9lA6jc4wNsX6OsX5+oHOMBbF+fhC+c+zURE0RERGJXTExUyEiIiLhp6BCREREQkJBhYiIiIREp/ap6Ajz5s0jIyMDALfbzaxZs8I7oBCYN28eAFu2bAFg/vz5LY7H0jmPHz+e5cuXN3ssFs5x9uzZDB48GIAePXowefLkwLFYOL/HHnsMt9tNRkYGW7ZsYc6cOYFzgug7R7fbzZIlS3j++edb/DzCkc8nGs63LecI0XvtOdL5NRWt1522nGNYrz1mFLv//vvN+++/P/D18uXLzRkzZoRxRMdu1qxZzb6eMWOGOW7cuMDXsXbOzz//vPnNH8NoP8fKykrT6XSalZWVpmmaZnFxcbNzjPbzM03rHPznZ5rWOU+ePLnZ8Wg6x+LiYnP+/Pnm/fffbzqdzhbHj3Q+0XC+RzrHaL/2HOn8morW686RzjESrj1RHVRkZGQ0u7CZptniByWaVFZWmuPGjWt2Tv4fii1btpimGVvnXFlZac6fP7/F+KP9HGfMmNHsF9c0rV9ev2g/P9M0m73ZBHssWs/x+eefD3qxPtL5RNP5BjvHWLr2tPZ/6BcL153WzjESrj1Rm1NRWloamHr9phUrVnT+gEKkqKiI0tLSwNfZ2dmANU0Va+e8ZMkSrr766maPxcI5PvbYY0yePJnS0tLAmMeNGwfExvkBZGRkMH78eNxuN2Cdl/9nNVbO0e9I5xMr5xsv155Yve5AZFx7ojqoCCYjIyNwoYs2GRkZVFZW4nQ6A4/5/7Ozs7Nj6pxXrFgR+GFvKtrP0T/+kpIS3G432dnZzJw5M/D/GO3n5/f4449TWlpK9+7dmT17NitWrAisv8fKOfod6Xxi4Xzj5doTq9cdiJxrT9Qnan5Tjx49qKioCPcwQmbu3LnMnz8/aHTpF43n7P+hb+sPc7Sco/8XNyMjI3CBvv/++xk0aBCVlZWt/r1oOT+/jIwMZs+ezfLly5k3bx7jxo3j6quvjrmf08Pxn09r5xzt5xuL155Yve5A5Fx7onamojXR8gPQFrNnz2bq1KnMmDHjsM+LtnP2T9G1R7Sd48iRIwP3/Z8EDjfFGG3nN3v2bLKzs3n++efZsmULFRUVuFyuw/6daDvHIznS+UTz+cbitScerjsQ/mtP1AYV/vW+b/JHotFu6dKlDB48uFm5Tyycc0lJSbMf+m+K9nNsbYwZGRnN8g6+KVrODxrXZv3TyNnZ2RQXF5ORkcHSpUtj4hybOtL5xNr5xuK1J9avOxA5156oXf7Izs5u9ZsVbM0smvijSv+nBLfbTUVFRUycc0VFBSUlJYFz9NfDz5s3j+zsbCZPnhzV5+h/kyktLW22Pu12uxk5cmRM/B+WlpYGnRKfOXMmEHu/m205n1g531i99sT6dQci59oTtTMVAHPmzGk2rbN06dIjTtdFupKSEkpKSnA6nZSWllJaWspjjz1Gjx49gOg/53HjxjFr1qzAH/8b0axZswJTk9F+jvfffz+LFy8OfL106VLGjRsX+EWP9vMbN25cIBmsqeLi4qj/P2xtKvhI5xNN59vaOcbKtSfY+cXadae1/8NIuPZE/S6l/kgTYO3atdx///1hHtHRc7vdDBo0KGgSUdP/plg556VLl7J48WKWLl3KrFmzGD9+fCBijvZz9HebBCgvL28x/mg/P7fbzdy5cznuuOMC67YzZsxo0VEzWs6xtLQ08PNYUlLCrFmzGDVqVItOhIc7n0g/38OdYyxce9ryfwjRfd1pyzmG+9oT9UGFiIiIRIaoXv4QERGRyKGgQkREREJCQYWIiIiEhIIKERERCQkFFSIiIhISCipEREQkJBRUiIiISEgoqBAREZGQUFAhIiIiIaGgQkREREJCQYWIiIiEhIIKERERCYn/D3bG3DkMJYDtAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "fitness error is 0.7620181982052235, while loss addition is 0.0009753287886269391\n" + ] + } + ], + "source": [ + "compiling_params = {'mode': 'autograd', 'tol':0.001, 'lambda_bound': 1e1} #\n", + "optimizer_params = {'params': {'lr': 1e-6}} # 'optimizer': 'LBFGS', 'params': {'lr': 1e-4}\n", + "training_params = {'epochs': 10000, 'info_string_every' : 1e1}\n", + "early_stopping_params = {'patience': 12, 'no_improvement_patience' : 2000}\n", + "\n", + "fitness.set_adapter(net = deepcopy(loaded_nn))\n", + "fitness.adapter.set_compiling_params(**compiling_params)\n", + "fitness.adapter.set_optimizer_params(**optimizer_params)\n", + "fitness.adapter.set_early_stopping_params(**early_stopping_params)\n", + "fitness.adapter.set_training_params(**training_params)\n", + "\n", + "fitness.apply(correct_eq, {'sparsity': {}, 'coeff_calc': {}})" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "39608b95", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhUAAAGeCAYAAAAje/P4AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABL+UlEQVR4nO3dd3hU55k28PtM0ajPqIIaQhK9I6qNDcYW7o4bYCfGzibZiGz6pkDIfrspWwhkU9aJk4CTOMUNkGPcCzI22HQkIXrTCNRRnRnVqef7YzQDmKYyM++cM/fvunRdAeSZ5w3i6NZbnleSZVkGERER0TBpRBdARERE6sBQQURERAHBUEFEREQBwVBBREREAcFQQURERAHBUEFEREQBwVBBREREAcFQQURERAGhC+WbeTweNDQ0ICEhAZIkhfKtiYiIaIhkWUZnZycyMzOh0Vx7PiKkoaKhoQE5OTmhfEsiIiIKkNraWmRnZ1/zz0MaKhISEgB4i0pMTAzlWxMREdEQ2Ww25OTk+L+PX0tIQ4VvySMxMZGhgoiISGFutHWBGzWJiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggQnqhGBER0WBZe52oqOlAclwURqfGITFaL7okugaGCiIiCjtuj4xXyurw5pFG7KlqhdMt+/9sZGI0vra4AE/My4VGc/1bMym0GCqIiCisdHQ78M2XK/DxmVb/741KjkWPw43WLjuabH3499eOYeuhBqx9ZCrGjUgQWC1diqGCiIjCxrEGK1b+vQx1Hb2I0WvxtcUFuGdqBgrS4gEAnX1O/KO8HuvfPYmy8x24/+lPsOGpWVg8Pl1w5QQAkizL8o0/LTBsNhuMRiOsVisSExND9bZERKQAp5o68fDvdqHH4cao5FhseHIWJmZc/XtFg6UXP/jHEew83YJovQZ/++I8zM1LDnHFkWOg3795+oOIiITrsrvwLy+UocfhxtzRyXj96wuuGSgAINMUgz99fjZun5COPqcHX/rLARytt4awYroahgoiIhJKlmWs+ccRmFu6MTIxGr9fUQhTbNQN/zu9VoPfPVGIuXnJ6LS78E/P7UdzZ18IKqZrYaggIiKhnt97Hm9UNkCrkfDbz81ESrxhwP9ttF6LP35+NiaMTEBrlwM//MdRhHBVnz6FoYKIiIRpsPTiv946AQD4wd0TMHv04PdFJEbr8evHZ0CvlVB64gJeKa8PdJk0QAwVREQkzC+3nYbd5cHc0cn451vzhvw6E0Ym4ttF4wAAP3njGBqtvYEqkQaBoYKIiIQ40WjDK+V1AIAf3jcRkjS8RlYrF+ZjRo4JnX0u/OCVI1wGEYChgoiIhPjZOychy8B90zIwI8c07NfTaTX4xfLpiNJpsON0Cz461TL8ImlQGCqIiCjkPjnTih2nW6DXSlh11/iAvW5BWjy+cPNoAMDad07A5fYE7LXpxhgqiIgopGRZxs/fPwUAWDE/F7kpcQF9/a8uHgNTrB6nL3ShpKwuoK9N1zesULFkyZJA1UFERBGiotaCyloLonQafH3xmIC/vjFGj2/cPhaAdyNoj8MV8PegqxtyqCgpKUFpaWkgayEiogjw3K5zAICHZmQOqifFYKyYPwo5yTFo7rTj2Z3VQXkPutKQQoXFYkF7e3ugayEiIpVrsvbhnSONAIDP9+99CAaDTotVd00AAPzpEzM6+5xBey+6aEihYvPmzVi+fHmgayEiIpV7Yd95uDwy5uYlY3KmMajvdd/UDBSkxcHW58JL+2uC+l7kNehQUVpaiqKiogF9rt1uh81mu+yDiIgiU5/TjRf3eb+5fyGIsxQ+Go2ErywqAAD88eNq2F3uoL9npBt0qLBYLMjPzx/Q565duxZGo9H/kZOTM+gCiYhIHd6obEBbtwOZxmgsmTQiJO/54IwsZBij0dxpx6ts3x10gwoVGzduxNKlSwf8+WvWrIHVavV/1NbWDrpAIiJSh5cPeL8HrLgpFzptaDoaROk0+OdbvT8Ib9hphtvDLpvBNOC/1fLycsyePXtQL24wGJCYmHjZBxERRZ6ath6Une+ARgIeLcwO6Xs/PicHplg9qlu78e7RppC+d6TRDfQT29vbUV5e7j9GWlVVBQBYv3498vPzBzWDQUREkWXrIe/Sw4IxqRiRGB3S944z6PDUTaPx9Adn8Odd1bhvWkZI3z+SSPIQb1wpLy/HrFmzBnVhi81mg9FohNVq5awFEVGEkGUZd/xiB8yt3fjFsul4dFZoZyoAoLmzDwt+th1Ot4y3vnlL0E+eqM1Av38PaVGrpKQEa9euBQCsXr2aTbCIiOiaDtdZYW7tRrReg7umjBRSQ3pCNO6e4p2heH7veSE1RIIBL39caunSpVzuICKiAXm1wrv0ceekkYg3DOnbTkA8OT8Xb1Q2YGtFA35wz0QYY/TCalErXihGRERB43R78EZlAwDg4ZlZQmuZMzoJE0YmoNfpxiu8aCwoGCqIiChoPjnbirZuB1LionDr2FShtUiShBXzcwF4l0A8PF4acAwVREQUNG8d9t7zcf+0jJD1prieh2dmIcGgg7m1G7uqWkWXozri/4aJiEiVXG4PSk9cAADcMzU8jnHGGXR4pNC7DONrxkWBw1BBRERBsb+6HZYeJ5LjojBndLLocvyWzfZeGbHt2AVYehyCq1EXhgoiIgqK9455u1cWTUyHViMJruaiKVlGTMpIhMPtwWuHGkSXoyoMFUREFHCyLOP9496lj7smi+lNcT3LZnsbcG0p4xJIIDFUEBFRwB2us6LR2oe4KC0WjBF76uNqHpyRBb1WwtF6G4432ESXoxoMFUREFHC+pY/bxqcjWq8VXM2VkuOi/Nevc7YicBgqiIgo4N7tDxWi2nIPxLJZ3g2bWyvq4XB5BFejDgwVREQUUGebO2Fu6UaUVoPF49NEl3NNt45NxYhEAzp6nNh+8oLoclSBoYKIiAJq+8lmAMD8ghQkRIfv/Ro6rQYPzvD2rNhawVMggcBQQUREAbXztLdT5W3jwneWwufBGZkAvEHI2usUXI3yMVQQEVHA9Drc2H+uHQCwUAGhYlJGIsamx8Ph9uDdo42iy1E8hgoiIgqYvdVtcLg8yDLFoCAtTnQ5NyRJEh6aySWQQGGoICKigNl5ugUAsHBcKiQpfLpoXs9npnuXQPZWt6HJ2ie4GmVjqCAiooDxh4qx4b/04ZOTHIvZuUmQZeD1ynrR5SgaQwUREQVEXUcPqlq6odVIuDkMu2hez4NcAgkIhgoiIgoI36mPmTkmGGPC9yjp1dw/NQM6jYTjjTacbe4UXY5iMVQQEVFAXNxPoZylD5+kuCjcMtY7u/LW4SbB1SgXQwUREQ2by+3BrirvTIUSQwUA3Dc1AwDw9hEeLR0qhgoiIhq2yjorOvtcMMXqMTXLKLqcIblz0kjotRJOXejkEsgQMVQQEdGw7TW3AQDm56VAq1HGUdJPM8bq/de0cwlkaBgqiIho2Hyh4qaCFMGVDM+9XAIZFoYKIiIaFofLg4PnOgAoP1TcddkSSJfochSHoYKIiIalss6CXqcbKXFRGJseL7qcYbl0CYSzFYPHUEFERMOyp6p/P0V+imJac1+PbwnkrcMMFYPFUEFERMPiDxUKX/rwuXPSCOg0XAIZCoYKIiIasj6nG2U1/fsp8tURKkyxFxthcQlkcBgqiIhoyCpqLHC4PEhLMCjiqvOB4imQoWGoICKiIfP3p1DJfgof3xLIySYugQwGQwUREQ3ZHl9/CpUsffiYYqN4CmQIGCqIiGhI+pxuHKqxAFB+f4qruW8al0AGi6GCiIiG5FCtBQ63B+kJBoxOiRVdTsBdugRS1cIlkIFgqCAioiE5UN0OAJiTl6yq/RQ+ly2BsGfFgDBUEBHRkOw/5w0Vc0cnC64keHzXob/FJZABYaggIqJBc7k9KD/v7U8xN0+9oWLJpBHQ9i+BnGvtFl1O2GOoICKiQTveaEO3w43EaB3Gj0gQXU7QJMVF+U+2vHuM16HfCEMFEREN2n7fforRydBo1Lef4lJ3TRkJAHj3KEPFjTBUEBHRoO2/ZJOm2t01aQQkyXvapdHaK7qcsMZQQUREgyLLMg74NmlGQKhIT4zGrFFJAID3OFtxXQwVREQ0KGebu9DR40S0XoMpmUbR5YTE3b4lEO6ruC6GCiIiGhTfUdLCUUmI0kXGt5G7JntDxf7qdrR12QVXE74i46uBiIgC5tJNmpEiJzkWU7IS4ZGBbccviC4nbDFUEBHRoBw8p/7+FFdzzxRvI6x3uK/imhgqiIhowJqsfai39EIjATNyTKLLCSnfEsjuqlZYe52CqwlPDBVERDRg5TXeWYqJGYmIM+gEVxNaY9LjMTY9Hk63jA9PNosuJywxVBAR0YCV9bfmLuw/YhlpfKdA3jnKu0CuhqGCiIgGzBcqZuVGZqjwLYHsON2CHodLcDXhh6GCiIgGpM/pxrEGK4DIDRWTMxORkxyDPqcHO061iC4n7DBUEBHRgBytt8LplpEab0B2UozocoSQJAl3T2YjrGthqCAiogG5uPRhgiSp+xKx67m7/2jp9hPNsLvcgqsJLwwVREQ0IJG+n8JnZo4JIxIN6LS7sPtsm+hywgpDBRER3ZAsyyivsQCI3JMfPhqN5N+wyevQL8dQQUREN1Tb3ovWLjv0WglTsiLjErHr8e2reP94E1xuj+BqwgdDBRER3VBZjfe+jylZRkTrtYKrEW9uXjKSYvXo6HH6L1gjhgoiIhqASG969Wk6rQZLJo0AALzHJRA/hgoiIrqhQ7UWAAwVl/J113z3WBM8HllwNeGBoYKIiK6rz+nGycZOAMCMUSaxxYSRBWNSEW/Q4YLNjkN1FtHlhAWGCiIiuq5jDVa4PN6mV5nGaNHlhA2DTovbJ6QD4CkQH4YKIiK6ror+o6QzciK76dXV+JdAjjZBlrkEwlBBRETX5dtPMZNLH1e4bXwaDDoNatp7cLzRJroc4RgqiIjouir79wtMzzYJrSMcxUbpsGhcGgCeAgEYKoiI6Drauuyobe8FAEzLYdOrq7lnKi8Y82GoICKia/ItfRSkxSExWi+2mDB1+4QR0GslnL7QhaqWLtHlCMVQQURE11TZHypm5LA/xbUYY/S4uSAVAE+BMFQQEdE1VfhCBTdpXtelp0AimW4wn2yxWLB582YAQFVVFcxmM5599lmYTKZg1EZERALJsnxxpoKbNK9ryaQR+LdXj+BIvRV1HT3ITooVXZIQg5qpWL16NYqKilBcXIx169YhOTkZy5YtC1ZtREQkUHVrN2x9Lhh0GkzISBBdTlhLjTdgzuhkAMB7xy4IrkacQYUKs9mMkpIS/68LCgpw8ODBgBdFRETi+TZpTskyQq/lavmNXFwCaRRciTiDWv7Ytm3bZb8+cOAAioqKAloQERGFB9/SB/tTDMxdk0fiJ28cx8HzHWju7EN6QuS1NB9y9CwpKYHFYsGzzz57zc+x2+2w2WyXfRARkTIc4ibNQck0xWB6jgmyDLwfoUsggw4VFosFGzduhNlsxrJly667SXPt2rUwGo3+j5ycnOHUSkREIdLndPvbTs/MMYktRkHu6V8CeS9CG2ENOlSYTCYUFxdj1apVAICkpCRYLJarfu6aNWtgtVr9H7W1tcMqloiIQuNEow1Ot4zkuChkJ8WILkcx7prsDRV7qtpg6XEIrib0BhwqLBYLVq9efVmAKCoqgsViQWlp6VX/G4PBgMTExMs+iIgo/PmXPngz6aDkpcZhwsgEuDwySk80iy4n5AYcKsxmM9avX4/29nb/7/kCBvtUEBGpy6WhggYnkhthDThUFBYWYtWqVcjPz/f/3qZNm1BYWMgTIEREKuM/+cFQMWi+ULHzTAu67C7B1YTWoI6UrlmzBuvXr/f/2mKx4IMPPgh4UUREJE5HtwPn2noAANOzeTPpYI0fkYC81DhUt3bjo1PNuH9apuiSQmZQocJkMvk3aBIRkTpV1lkAePcHmGKjxBajQJIk4a7JI/GHHVV452hTRIUKtkgjIqLLcD/F8PmWQD482Yw+p1twNaHDUEFERJdhqBi+6dlGZJli0ONw48OTkXMKhKGCiIj8Lr2ZlJs0h06SJNw/PQMA8MbhBsHVhA5DBRER+dW096Cjx4korQYTeTPpsDzQv5di+8nmiDkFwlBBRER+vqWPSZmJMOi0YotRuMmZichLjUOf04MPTkTGXSAMFURE5Mf9FIEjSRIemNa/BFIZGUsgDBVEROTHUBFY90/3LoHsON0Ca49TcDXBx1BBREQAAIfLg2MN3ptJGSoCY9yIBIwfkQCnW8Z7x9XftpuhgoiIAAAnm2xwuDwwxeqRmxIruhzVuL9/CeTNw42CKwk+hgoiIgJwceljejZvJg0k3xLIrrOtaOuyC64muBgqiIgIAPdTBEteahymZCXC7ZHx7jF1L4EwVBBFALdHxgVbHxwuj+hSKIwxVASPr2eF2k+BDOpCMSJSjhONNvx2+1kcrreg0dIHl0dGbJQWc0Yn45YxqXikMAsp8QbRZVKYsPY4YW7pBsBOmsFw37QMrH3nJPZVt+OCrQ8jEqNFlxQUnKkgUpna9h58++UK3Pv0x3jrSCNq23vh8sgAgB6HGztOt+C/3z6Bol/uwKsVdZBlWXDFFA4O11sAAKOSY5Ecx5tJAy07KRaFo0yQZeDtI+rdsMmZCiIVKTvfgX96bj86+7wtge+floEV83ORmxKLtHgDzjR3YdfZVpSU1eFkUyf+dVMltlY04H+XTUdaAmctItmhGgsALn0E0/3TMlFeY8EblQ34woI80eUEBWcqiFRi99lWPPmnfejsc2F6jglvfP0W/PZzhZifn4IMYwx0Wg0mZiTin2/NxxvfuAXfv2s8onQa7Djdgsc27kGTtU/0EEgg7qcIvvumZUCSgPIaC+o6ekSXExQMFUQq8OGpZvzTXw6gx+HGrWNT8dKX52FqtvGan6/XavC1xWPw9jdvQZYpBuaWbizfsEe1Dzq6PlmWUVlnAcD9FME0IjEac0cnAwDeUmnPCoYKIoU739aNb7xYAYfLg6KJI/DsU7MRGzWwlc0x6QnYtHI+clNiUdPeg8c27EWjtTfIFVO4qevoRWuXA3qthMmZiaLLUbUH+ntWbD2kzlMgDBVECuZwefDNlyrQZXdhzugk/H5FIaL1g7tZMjspFpuKb0J+WhzqLb34yvPlsLvcQaqYwpFvlmJiRuKgv35ocO6bmgG9VsKJRhtONtlElxNwDBVECvaL90+hss4KY4we//f4TOi1Q/snPdIYjb9+YS6MMXpU1lrw49ePBbhSCme+TZrTs01C64gESXFRWDw+HQDwanm94GoCj6GCSKF2nm7Bhp1mAMD6pdOQaYoZ1uvlJMfi6c/OhCQBL+2vxUv7awJRJikAN2mG1iOFWQCArYfq4fao60g3QwWRAjlcHvzHa0cBAE/Oz8Vdk0cG5HUXjUvD9+4cDwD40WvHcPpCZ0Bel8KX0+3B0QYrAGDGKJPYYiLE4gnpMMboccFmx56qNtHlBBRDBZEC/X3veZxr60FqvAGr75kQ0Nf+l0UFWDw+DQ63B6tKDqvuJym63KmmTvQ5PUiI1iEvJU50ORHBoNPivv6bS1+tUNcSCEMFkcJYehx4+oMzAIDv3jkO8YbA9rDTaCSsfWQaEgw6HKq14Lld1QF9fQovly59aDS8mTRUHpnpXQJ592gjeh3q2RjNUEGkML/ZfhbWXifGj0jA8tk5QXmPkcZo/Nt9EwEAP3/vFM61dgflfUi8Su6nEGJWbhJGJcei2+HGeyq6uZShgkhBzrd14297zgEAfnjfRGiD+JPlY3NysGBMCuwuD37wj8O8I0SlfDMVPPkRWpIk4eH+2YqSsjrB1QQOQwWRgvxm+1k43TIWjkvDonFpQX0vSZLws0emIVqvwV5zu6p+miKvzj4nzrZ0AeAmTRGWzsoGAOyqakVtuzq62TJUEClEo7UXrx3ybur616KxIXnPnORYFN+aDwD42Tsn4XB5QvK+FBpH6qyQZSA7KQap8bxQLtRykmOxYEwKZBl4pVwdsxUMFUQK8edPquF0y5ibl4yZo5JC9r4rFxUgNd6Ac209eH7v+ZC9LwVfhW/pg/sphPHti9pysA4eFZy0YqggUgBrrxMv7vM2o/qXRQUhfe84gw7fvXMcAODp7Wdg7XGG9P0peHybNGcyVAhz1+SRSIjWod7Siz1m5fesYKggUoDn955Ht8ONCSMTcNv44O6luJpls7IxbkQ8LD1O/PbDMyF/fwo8WZYvbtJkqBAmWq/FgzO8l4xtPlgruJrhY6ggCnN9Tjee23UOALByUT4kKfS9BHRaDdbc6z1i+rc959Fs6wt5DRRYjdY+NHfaodNImJJpFF1ORPMtgbxztEnxM4EMFURh7s3DjWjtsiPTGI37p2UKq+O2cWmYlZsEu8uD3++oElYHBUZF/yViEzMSERPFm0lFmpplxISRCXC4PHi1QtkbNhkqiMLcpgPevRRPzM8d8i2kgSBJEr7df+rkxX01nK1QuIqaDgDATB4lFU6SJHx27igAwAv7ahTdE4ahgiiMnW3uxIFzHdBqJCzrP9Mu0i1jUjGbsxWq4Dv5wVARHh4uzEKMXoszzV3YX90uupwhY6ggCmObDng3bt0+IR3pidGCq/HNVnhPgnC2QrkcLg+O1HtvJp2ZE7rjyXRtidF6/4bNF/pPeikRQwVRmHK4PHil3Nvs6vE5wbnjYygWjEnxz1b8YYdZdDk0BCcabXC4PEiK1SM3JVZ0OdRvxfxcAMA7R737qJSIoYIoTG07fgHt3Q6MSDQEvSX3YEiShG/e4d1b8fKBGlh6HIIrosEq9++nSBJymoiubkqWEdNzTHC6ZcUeL2WoIApTL/dv0Fw2Kwc6gRs0r+bWsamYlJGIHoebXTYVyHfyg02vws8T87wbNl/cV6PIDpvh9aQiIgBAvaUXn5xtBeC9LTTcSJKElYu8d4L8Zfc59DndgiuiwaiovThTQeHlgWmZSIzWoa6jF9tPNosuZ9AYKojC0JuVDZBlYF5eMnKSw3PN+96pGcgyxaC1y6Gay5AiQUunHbXtvZAkYFoOm16Fm5gorf946Z93VQuuZvAYKojC0OuVDQCAz8wQ1+zqRvRaDb50Sx4A4NmdZrgVOFUbiXytucemxyMxWi+2GLqqp24eDa1Gwu6qNhxvsIkuZ1AYKojCTFVLF4412KDTSLhnSobocq7rsTk5MMboca6tB9uON4kuhwbA3/SKR0nDVpYpBndPGQkAeE5hsxUMFURh5o3+WYpbxqYiOS5KcDXXF2fQYcV871St734SCm/+TZpsehXWvrjAOwv42qEGRR0vZaggCiOyLPtDxQMC7/kYjBXzc6HVSNhX3Y6TTcqaqo00bo+MyjoLAG7SDHezcpMwI8cEh9ujqBNWDBVEYeR4ow1VLd2I0mlw5+QRossZkAxjDO7qr/Wvu5Xz8ItEpy90osfhRrxBhzHp8aLLoRv4Yv+epef3nlfMCSuGCqIw8kZlIwDg9vHpSFDQJrqnbhoNANhaUa/4q5vVzLf0MT3HCK2GTa/C3T1TRvpPWG1RSDMshgqiMCHLMt483L/0MV0ZSx8+8/KSMX5EAnqdbmwpU8bDLxJxk6ay6LUafz+YP+www+n2CK7oxhgqiMLE8UYb6jp6Ea3X4PYJ6aLLGRRJkvD5m0cDAP6+97wiOwFGAt/NpIW5JqF10MAtn52D1Pgo1Ft68dqhBtHl3BBDBVGY+OCEt3veLWPSEBOlFVzN4D00MxMJ0Tqcb+vBjtMtosuhT7H2OnG2uQsAMIMzFYoRrdfiS7d4Zyt+99HZsO8Hw1BBFCZKT1wAACyZpKxZCp/YKB2Wz/a2FP/rnnNii6ErVPbPUoxOiQ37o8p0uRXzRyExWgdzSzfeOxbe/WAYKojCwAVbHw7XWQEAixW29HGpJ+fnQpKAj0614Fxrt+hy6BIX+1NwlkJpEqL1+Kf+5cXfbD8b1suLDBVEYcC39DEjx4T0hGjB1Qzd6NQ43NZ/Tfvf9vB4aTi5eImYSWwhNCRfWJCHBIMOJxptePNIo+hyromhgigMfOBf+lBGb4rrear/J6otZbXotrvEFkMAvCeLLl53zpkKJUqKi8KXF3r3Vvzi/VNhexKEoYJIsF6H23/N+R0Tlbv04bNobBpyU2LR2efC1kP1osshAObWblh7nTDoNJiQkSC6HBqiL92Sh5S4KJxv68HmMO1bwVBBJNgnZ1thd3mQnRSD8SOU/8DXaCQ8OT8XAPC33echy+G7/hspfLMU07KN0Gv52FeqOIMO37h9DADg/0rPoNcRfl02+dVFJFjpce/SR9HEEZAkdXQ5XDY7BzF6LU5d6MT+6nbR5UQ8f9MrbtJUvM/OG4XspBg0d9rx3O7wu8GUoYJIIFmW8eEp7yZNNSx9+Bhj9PhMf1fQl/bXCK6Gys77OmmaxBZCw2bQafGdJeMAAM9sP4sLtj7BFV2OoYJIoJNNnWjutCNar8Gc0cmiywmoz83zXon+9tEmdHQ7BFcTuay9Tpy60AkAmK2yr7FI9dCMLMwcZUK3w421b58QXc5lGCqIBNrZ33lyfn4KovXK66J5PdOyjZiUkQiHy4NXyutElxOxyms6IMvepldpCQbR5VAAaDQSfvqZKZAkYOuhBuwzt4kuyY+hgkignWe8oWLh2DTBlQSeJEn+2YqX9tdww6YgZee8Sx+cpVCXqdlGfHau99/Xj14/BleYHDFlqCASpMfhwoFq7wN/4Tj1hQoAeHBGJmKjtKhq6eaGTUEOnPP+/z47l5s01eb7d46HKVaPk02d+Mvuc6LLAcBQQSTMPnM7HG4PskwxKEiLE11OUCREc8OmSA6XB4f67/zgTIX6JMVFYdVdEwAA//v+KVSHQWt8hgoiQXw3eS4cl6qao6RX45ui5YbN0DvWYIXd5UFSrF61wTXSfXZuDm4uSEGf04NVJZXC7wVhqCASRM37KS41LduIyZncsCnCwf79FLNyk1UdXCOZJElY9+g0xEVpceBch/BlEIYKIgHqOnpgbumGViPh5jGpossJKkmS/LMVL3LDZkj59lPMGc39FGqWkxyLNfdOBACsf++k0GWQQYeK9evXY/369Vi5ciVWrlwZjJqIVG/nae9dHzNyTDDG6AVXE3y+DZtmbtgMGVmW/U2vZjNUqN4T80b5l0HeOSruFlPdYD559erVWLdunf/XK1euxJIlS7Bt27aAF0akZr7+FGpf+vDxbdh8+UAtXtxfg3n5KaJLUr3q1m60dTsQpdNgSpZRdDkUZL5lkBONNtw5eaSwOgY8U2GxWFBeXg6LxeL/vZUrV6K0tBRmszkYtRGpksvtwa4q70zFwnHqXvq4lK9nxTtHuGEzFHz7KaZnG2HQqauxGl1dTnKs0EABDHL54+DBg5cFiPx8793ulwYNIrq+Q7UWdPa5YIrVY1q2SXQ5ITM1q3/DppsbNkNhv68/BY+SUggNOFSYTCZ0dHSgsLDQ/3ulpaUALoYLIrox39LHgjGp0GoiZ0f+pR02uWEz+PZVe1s3z8tjqKDQGdbpj7Vr12LDhg0wmUxX/XO73Q6bzXbZB1Gk23HGu/SxKEL2U1zqwRlZ/g2bB/qn5ynwGiy9qG3vhVYjcaaCQmrIoWL16tV47LHHUFxcfM3PWbt2LYxGo/8jJydnqG9HpAod3Q4crrMAAG6NoP0UPvEGHR6Y5u2w+TI7bAaNb5ZiSmYi4g2D2o9PNCxDChUlJSUoKCjAqlWrrvt5a9asgdVq9X/U1tYOqUgitfjkbCtkGRg3Ih4ZxhjR5Qjx+FzvDxdvHWmEtccpuBp12mf27qeYy6UPCrFBhwrfPgrfDIXFYrnm6Q+DwYDExMTLPogiWaQdJb2aGTkmTBiZALvLg1cruGEzGHy9QObl8eguhdagQkV5eTnKy8tRWFgIs9kMs9mMjRs3IjmZaZjoRmRZvtiaW6W3kg6EJEl4fI53tuLlA7XcsBlgzbY+mFu7IUnAHM5UUIgNeLHNYrHgjjvugMViwerVqy/7sxstgxARcPpCFy7Y7DDoNBE/Lf3wzGysfeckTjZ14lCtBTNHseNjoOzrn6WYODIxIrq1UngZ9JFSWZav+CCiG/MtfczLT0G0PrKbERlj9bhvagYA4OX93GsVSP6jpPmRHVxJDF4oRhQiF28ljbxTH1fzeP8lY28cbkCX3SW4GvXwbdLkfgoSgaGCKAR6HW7/tPSiCN5Pcak5o5NQkBaHHocbrx9qEF2OKrR12XGmuQsAT36QGAwVRCGwr7oNDpcHGcZojEmPF11OWPBu2PTOVrzEnhUB4bvqfNyIeCTHRQmuhiIRQwVRCPiuOl84Ng2SFDmtuW/kkcIs6LUSjtRbcbTeKrocxdt11ruf4ibeAkuCMFQQhQCPkl5dSrzBf6viywc4WzFcvttvbx7DfTskBkMFUZA1WHpxtrkLGgm4hQ/7K3y2fwnktYoG9Di4YXOomqx9MLd0QyMB8zlTQYIwVBAFme8o6fQcE4yx7BvwaTcXpGBUciw67S68ebhRdDmKteusd5ZiSpaR/SlIGIYKoiC7eJSUSx9Xo9FI/vtAXtzHJZCh8i99FHA2jMRhqCAKIpfbg0/6rzrnfoprWz47B3qthEO1Fm7YHAJZlrG7f5PmgjFc+iBxGCqIgqiyzgpbnwuJ0TpMzzaKLidspcYbcPcUb4fNF/adF1yN8phbu9Fk60OUVoPZuexPQeIwVBAF0e6zF6ekdVr+c7ueJ+Z5N2xurWiArY9Xog/G7irvLEVhrgkxUZHdAp7E4lOOKIj2mL0P+5s5JX1D8/KSMTY9Hr1ON7ZW1IsuR1F84XUB91OQYAwVREHS53Sj7HwHAO8JB7o+SZL8sxXP7z3PywoHyOORLwmvDBUkFkMFUZBU1Fhgd3mQlmBAQRpbcw/Ew4XZiNFrcfpCFw6c6xBdjiIca7DB0uNEXJQW07hvhwRjqCAKEt9Pjzflp7A19wAZY/T4zPRMANywOVAfnWoG4J2l0HPfDgnGr0CiINnT3zfgJi59DMqK+bkAgHeONKGtyy64mvD3UX9ztcXj0wVXQsRQQRQUPQ4XDtVaAHA/xWBNzTZiWrYRDrcHmw/WiS4nrFl6HKio8S4T3TaefVBIPIYKoiA4eK4DTreMTGM0RiXHii5HcVbM885WvLj/PDwebti8lp1nWuGRvVedZ5piRJdDxFBBFAz+/RQFqdxPMQQPTM9EQrQOte29/jbndCXffgoufVC4YKggCgJfMyLupxiamCgtHi3MBgA8v5f3gVyNxyP7L6tbxKUPChMMFUQBZutz4kidBQBDxXCsmO/tWbH95AXUdfQIrib8HGuwobXLgbgoLVtzU9hgqCAKsAPV7fDIQG5KLLK4zj1kY9ITcHNBCjwy8Pc9PF76aR/2L30sGJOKKB0f5RQe+JVIFGB7+pc+eOpj+L64IA8A8NL+GvQ4XIKrCS/+/RQTuJ+CwgdDBVGA+TZpzs9nqBiu2yekIzclFrY+F14p530gPu3dDv+R5UXjuJ+CwgdDBVEAWXocON5oA8D9FIGg0Uj4ws2jAQDP7arm8dJ+2082wyMDkzISeZSUwgpDBVEA7TW3Q5aBMenxSE+IFl2OKiydnYMEgw7mlm7s4PFSAEDp8QsAgKJJIwRXQnQ5hgqiAPK15uZ+isCJN+jw2JwcAMCfP6kWXI14fU63v3fHnQwVFGYYKogC6NJLxChwPn/zaGgk4OMzrTh9oVN0OULtqWpDj8ONkYnRmJyZKLocosswVBAFSEunHacvdAHgJs1Ay0mOxZL+n8qf23VObDGCbTvhW/pIZ7dWCjsMFUQBsrd/lmJiRiKS4qIEV6M+vuOl/yivQ0e3Q3A1Yng8Mj7oDxVLJo0UXA3RlRgqiALE35qbsxRBMTcvGZMzE2F3efDSgchs3X2k3ooLNjviorSYn88umhR+GCqIAsQ3U8FNmsEhSZJ/tuJvu8/D6fYIrij0SvtnKRaNT4NBpxVcDdGVGCqIAqDR2ovq1m5oJGAuf4IMmvunZyA13oAmWx/eOdokupyQe/+Yb+mDpz4oPDFUEAWArzX31CwjEqP1gqtRL4NOiyfn5wIA/vSxGbIcOc2wzjZ34tSFTui1Em4fz1BB4YmhgigAfKFiPpc+gu6J+aNg0GlQWWf1H+GNBG9UNgIAbh2bBmMsgyuFJ4YKogDY7b9ELFVwJeqXGm/wN8P6ww6z4GpCQ5ZlvHm4AQBw/7QMwdUQXRtDBdEw1bb3oN7SC51GwuzcJNHlRIQv35oPrUbCztMtOFpvFV1O0J1s6kRVSzeidBrup6CwxlBBNEy7+1tzz8gxIc6gE1xNZMhJjsUD/T+x/2FHleBqgu+NSu8sxW3j0pDAPTsUxhgqiIbJt5+Ct5KG1spFBQCAt4804lxrt+Bqgse79OHdT3H/9EzB1RBdH0MF0TDIsnyx6RVDRUhNzEjE4vFp8MjAhp3q3VtxpN6KmvYexOi1KJqYLrocoutiqCAaBnNrN5o77YjSaVA4ivspQu1ri8cAAErKalFv6RVcTXD4Zilun5iO2Cgur1F4Y6ggGgbfLMWsUUmI1rPDYajNHp2MmwtS4HTL+P1HZ0WXE3AutwevVtQDAB6YxqUPCn8MFUTDsJdLH8J9646xAIDNB+rQaFXXbMWO0y1o6bQjOS4Kt0/g0geFP4YKoiGSZRn7qtsBMFSINC8/BfPykuFwe/D7j9R1EmTzwVoAwMMzsxCl4+Oawh+/SomG6HxbD1q77IjSajA1yyi6nIj2rSLvbMXL+2vRZO0TXE1gtHbZ8cGJZgDA8tk5gqshGhiGCqIhOni+AwAwNdvI/RSC3ZSfgrmjvbMVz3yojr0VWyvq4fLImJ5txPiRCaLLIRoQhgqiISo77136YBdN8SRJwnfuHAcAeGl/DWraegRXNDyyLGPTAe/SxzLOUpCCMFQQDdHBc96ZilkMFWFhfn4KFo1Lg8sj45fbTokuZ1gq66w409wFg06DB9jwihSEoYJoCCw9Dpxp7gLAUBFOvn/XeADAa5UNON5gE1zN0L247zwA4O4pI2GMYVtuUg6GigHoc7pxuM6Cl/fX4OX9Ndh9thV1HT2QZVl0aSRIeY13liI/NQ4p8QbB1ZDPlCwj7p+WAVkG/vd9Zc5WtHbZsfWQ966Pp27KFVwN0eCwPds1yLKMPeY2PPPhWew1t8PtuTJA5KfG4Yn5uVhamA1jLH+aiCRc+ghf371zPN452oTtJ5ux19yG+fnKOu774r4aOFweTM8xsUsrKQ5nKq6ioqYDj23Yi889uw+7zrbB7ZGRHBeFW8emYtG4NOSlxkGvlWBu7cZ/vnkc89aW4ukPzsDl9ogunULEd/Jj9mg+9MNNXmocHp/j3dz4n28ev+oPBOHK7nLj73u9Sx9fXDAakiQJrohocDhTcQlZlvH8vhr85PVjcHlkRGk1eHxuDr50Sx5GJcde9g+8y+7C1op6PL/3PE42deKX205j+8lm/OqxGchLjRM4Cgo2h8uDyloLAGBWbrLYYuiqvrNkHF6vbMCxBhtKymrx2JxRoksakLcON6Kl044RiQbcOzVDdDlEg8aZin59TjdWv3IY/771KFweGfdOHYmdqxbjpw9OQW5K3BU/McQbdFgxPxfvfOtW/PqxGUiI1uFQrQX3/t/HePdok6BRUCgca7DC7vIgKVaPgjQGyHCUEm/wt+/++Xun0NnnFFzRjcmyjD99Ug0AeOqm0dBr+Xgm5eFXLQCn24OvPF+GzQfroJGANfdMwDOfK8RIY/QN/1tJkvDQzCy89+2FuCk/Bb1ON772YjleKasLQeUkQtn5i/spOD0dvp66aTTyU+PQ2uXAb7eHf0OsfdXtONZgQ7Reg8/NVcbMCtGnRXyokGUZa/5xBB+dakG0XoO/fGEuVi4qGPQ3i0xTDP7+pblYNisbbo+M726pxF93nwtO0STUgXPepldc+ghvUToN/v3+SQCAP++qxtn+I8Dh6telpwEAS2dlIykuSnA1REMT8aHiF++fRklZHbQaCc98rhALx6UN+bV0Wg3WPToNX1gwGgDwo9eP4aX9NQGqlMKBxyNjr9kbKublM1SEu8UT0nH7hHQ43TJ++I8j8ITpps3dVa3Ya25HlFaDr942RnQ5REMW0aHitUP1+G3/PQH//dAU3DFxxLBfU6OR8B/3T8JXbysAAPy/rUfx4anmYb8uhYfjjTZYe52IN+gwjZeIKcJPH5yMGL0W+8+1Y1P/rZ/hRJZl/Gqbd5bis3NzkGmKEVwR0dBFbKho6bTjR68fAwB8ffEYPB7ANUxJkvD9u8bj0ULvUsjXXijH0XprwF6fxNlrbgMAzM1Lho4b6RQhOykW3+2/F+R/3j6BZlt43WL6ydlWHDjXgSidBl9dzFkKUraIfSr++I1jsPQ4MTkz0X9tciBJkoS1j0zFgjEp6HG48cW/HEBzZ3g9zGjwdld5Q8VNCmuoFOm+sCAP07KN6Oxz4cdvHBNdjp8sy/hl/yzFinm5GJF4483hROEsIkPFe8ea8NbhRmg1EtY9Oi1oR7eidBr8fsUsjE2PR3OnHd/dXBm2a7p0Yy63B/urvfspbipgqFASrcYb8rUaCW8facI/ysPjdNbbR5pQUWNBtF6Dr9yWL7ocomGLuFBh63Pi37ceBQAUL8zHlCCviydG6/G7JwoRrdfg4zOt2LDTHNT3o+A5Um9Fl90FY4wekzISRZdDgzQ50+jvXfHvW4/ifFu30Ho6+5z46ZveWZPihQVIT+AsBSlfxIWKP3xUheZOO/JS4/wPmGAbOyIBP/nMZADeS458fQ5IWXxLH/Pzk6HRsD+FEn1t8RjMHZ2Mbocb33z5EJwCW+v/atsZXLDZkZsS69/YTaR0ERUqmjv78NyucwC8Da6i9dqQvffy2Tl4YHom3B4Z33q5Al12V8jemwJjT3+ouLkgVXAlNFRajYRfPT4DidE6VNZahN1keqzBir/s9nbP/OmDU0L6LCIKpogKFb/dfha9TjdmjjJhyaThHx8dDEmS8D8PT0F2UgzqOnqx9u0TIX1/Gh67y+1venUz91MoWpYpBusenQYA2LDDjDcqG0L6/i63B/9v61F4ZOC+qRlYNIzeOEThJmJCRW17j78R1ffvGi+kvXJCtB7rl3ofZi/sq8Gus60hr4GG5lCNBXaXB6nxBoxJjxddDg3TPVMzULzQuzHye1sqcbjOErL3/nXpGVTUWBBv0Pk7fhKpRcSEil9tOw2nW8atY1OFTl/fXJCKJ+fnAgBWlRzmMohC+I+SFqTwvg+VWH33BCwenwa7y4Piv5WFpH/FjtMteOYjb8O9/3lk6oDuFyJSkkGFCovFgo0bN2LJkiXBqicozjZ34tVD9QC8sxSi/eCeCchJjkG9hcsgSnFxPwWXPtRCq5Hw9GdnYmx6PJpsfXjqz/vR3u0I2vs1Wnvxr5sOQZaBJ+aNwmemZwbtvYhEGXCoKC8vx+bNm2GxWNDe3h7MmgLudx9WQZaBJZNGYFq2SXQ5iDPo/Gu6L+yrQXkNT4OEs16HGxW13r8jNr1Sl4RoPf74+dlITzDgZFMnVvxxHyw9gQ8WPQ4XvvpCOdq7HZiUkchlD1KtAYeKwsJCFBcXIz9fWQ1aatp68Fr/Rqxv3B4+LXBvLkjFo4XZAIB/e/UoXAKPttH1HTzfDqdbRqYxGrkpsaLLoQDLTYnDi1+ej9R4A4432rDiT4ENFnaXGyv/XoaKGgsSo3V45olCnvYg1VL9norf76iC2yNj0bi0sJiluNQP750AY4weJxpt+Oue86LLoWu4uJ8ilfspVGpMejxe+vI8pMRF4Wi9DQ8+swunL3QO+3Vdbg+++VIFPj7TitgoLZ77wlzkpcYFoGKi8BTUUGG322Gz2S77CKVGay9Kyry3En49jGYpfFLiDVh99wQAwC/fP4UmK+8GCUd7LtmkSeo1dkQCXiqej+ykGJxv68FDz+zCu0cbh/x6Hd0OfOX5Mrx37AKitBpsfHI2ZuUmBbBiovAT1FCxdu1aGI1G/0dOTk4w3+4KG3aY4XTLmJeXjDmjk0P63gP1+JwczMgxodvhxn9z02bYsfU5/ccNGSrUb9yIBLz+9Vtwc4H3IsCvPF+Ob75UgUZr76BeZ3dVK+7+v50oPdEMvVbCbz43E7eMZdM0Ur+ghoo1a9bAarX6P2pra4P5dpex9jjx8gFvX4pv3B6adtxDodFI+K+HpkCSgDcqG/wXVlF4OFDdDo8MjE6JRZYpRnQ5FALJcVH42xfnonhhPiQJeL2yAbf/7w788v1TqGnrue5/W1HTga+/WI4n/rgPF2x25KfF4dWvLsBdk0eGqHoisXTBfHGDwQCDwRDMt7imkvI69Dk9mDAyAQvGhPdPmFOyjHh8Tg5e2l+Ln7xxDK9//RZoebdEWODSR2TSaTX44b0T8Znpmfjx68dw8HwHnt5+Fk9vP4tp2UbcVJCCEQnRSE0woKPbgaqWLhyqteBwndX/Go/PycF/PDAJsVFBfcwShRVVfrXLsowX9no3Pq6Yn6uIzXXfvXM83qxsxLEGG0rKavHYnFGiSyJcvkmTIs+ULCO2fOUmvHWkES/tr8GeqjYcrrNeFh4uFaXV4DMzMvGFBaMxOTO4NyAThaNBhwol9KjYXdUGc2s34g06PDQzS3Q5A5Iab8C3isbiv946gZ+/dwr3TM1AYrRedFkRraPbgeON3s3F7E8RuSRJwv3TMnH/tEy0dtnx3rEmnG3uQnOnHS2ddhhj9ChIi8eY9HgsHJfKK8wpog04VJjNZpSUlGDTpk0oLy/H6tWrMWfOHCxdujSY9Q3J8/2zFA/PzEK8QTmTMU/dNBov7q+BuaUbv/uwCj+4Z4LokiLaXrN3lmJsejzSEsQs41F4SY034Il5uaLLIApbA96omZ+fj1WrVqGsrAyyLGPdunVhGSiarH14//gFAN6lDyWJ0mnww3smAgCe21WNBsvgdpxTYO08473wbcEYLn0QEQ2E6ppfvXygBm6PjLmjkzF+ZILocgbtjonpmDs6GXaXB78uPS26nIglyzJ2nm4BAF5NTUQ0QKoKFW6PjE0HvMdWn5ivzI2OkiThB/d6lz1KyuoC0tWPBq+6tRv1ll7otRLm5YdnjxMionCjqlDxydlWNFr7YIzRK/pceOGoJNwzZSQ8MrDunZOiy4lIH/cvfczOTeaRQCKiAVJVqNhy0DtL8eCMTMVf2PO9u8ZDq5Hwwclm7OvfMEih8/EZ79LHreO4n4KIaKBUEyqsPU7/Bs3ls0PbDjwYCtLi8fgc7zh+9u5JyLIsuKLI4XB5/E2vFo7lfgoiooFSTah4vbIeDpe3g+bkzETR5QTEt4rGIkavRUWNBe8ebRJdTsQor+lAt8ONlLgoTMpQx9cSEVEoqCZUbCmrAwAsm52jiA6aA5GeEI0v35oHAPj5e6fgdHsEVxQZfEsft4xNhYbt0omIBkwVoeJUUycO11mh00h4aEam6HICqnhRAVLiomBu7fafbKHg8m3S5NIHEdHgqCJU+DZo3jExHSnx6up8GG/Q4Zt3eG9Z/XXpGXTbXYIrUrf2bgeO1HvvdbiVV1UTEQ2KKkJFW7cDkgQsm6X8DZpX89m5o5CbEovWLjue21UtuhxV23G6GbIMTBiZgPRE3uFARDQYqggVv3psBj5ZfTsWjVfndHWUToPvLBkHANiww4yObofgitSr9EQzAKBo4gjBlRARKY8qQgUAZJlioNeqZjhXeGBaJiZmJKLT7sLvd1SJLkeVnG4Pdp7ybtK8fWK64GqIiJRHvd+FVUajkbDq7vEAgL/sPodGKy8bC7QD1e3otLuQEheFGdkm0eUQESkOQ4WC3DYuDXNHJ8Ph8uDpD86ILkd1PjjpXfpYPCGdR0mJiIaAoUJBJEnC6nu8sxWbD9ahpq1HcEXqIcsyPjjh7chaxKUPIqIhYahQmFm5ybh1bCrcHhl/2Mm9FYFibu3GubYeRGk1uIX9KYiIhoShQoG+cbu3b0XJwTo0WfsEV6MOvlmKefnJiDfwVlIioqFgqFCguXnJ3r0Vbg827jSLLkcVPug/SnrHBC59EBENFUOFQn399jEAgBf3n0dbl11wNcrW0e3AwfMdAIA72J+CiGjIGCoU6taxqZiWbUSf04M/fsIum8Ox7cQFuD0yJmYkIic5VnQ5RESKxVChUJIk4euLvbMVz+2qRoOFfSuGynet/D1TRgquhIhI2RgqFGzJpBGYMzoJfU4PfvbOSdHlKFJnnxOf9N9KylBBRDQ8DBUKJkkSfvTAZEgS8HplAw6caxddkuJsP9kMh9uD/LQ4jEmPF10OEZGiMVQo3JQsIx6f472d9cevH4PbIwuuSFkuXfqQJHbRJCIaDoYKFfjeneOREK3DsQYbthysFV2OYvQ63Pio/wKxe6ZkCK6GiEj5GCpUICXegG8Xea9G/+W20+hxuARXpAw7Tjej1+lGdlIMJmcmii6HiEjxGCpU4sn5uchJjkFzpx3P7TonuhxFeKd/6ePuyVz6ICIKBIYKlYjSafC9O72Xjf3hoyp0dDsEVxTe+pxulB73tua+ZypPfRARBQJDhYo8MC0TkzIS0Wl34ZkPz4ouJ6xtP9mMbocbWaYYzMxJEl0OEZEqMFSoiEYjYfU9EwAAf9tzHnUdvBr9Wl47VA8AeGB6JjQaLn0QEQUCQ4XKLBybipvyU+Bwe/CrbWdElxOWrL1OfNh/6uPBGZmCqyEiUg+GCpWRJAk/6J+t+EdFHU422QRXFH7eO9YEh8uDsenxmDAyQXQ5RESqwVChQtNzTLh36kjIMvDzd0+JLifsvFHZAMA7S8FTH0REgcNQoVLfu3M8tBoJH5xsxv5qtu/2ae7sw66z3rs+HpjOpQ8iokBiqFCp/LR4PNbfvvtn75yALLN9NwC8fbgRHhmYkWNCbkqc6HKIiFSFoULFvnXHWETrNSivseD9/p4Mke7VCu+pj89wloKIKOAYKlRsRGI0vnRLHgBg3Tsn4XR7BFck1ukLnaiss0KnkXjqg4goCBgqVO4riwqQHBcFc2s3Xj4Q2ZeN+S5bu31COlLiDYKrISJSH4YKlUuI1uNbd4wFAPxf6Wl02SPzsjGn24NXK7ynPpbNzhFcDRGROjFURIDPzRuFvNQ4tHY5sHFHlehyhNhxqgWtXXakxkfhtvFposshIlIlhooIoNdqsOou72Vjz35cjUZrr+CKQm9LmXfp46EZWdBr+WVPRBQMfLpGiLunjMTs3CT0Ot3477dOiC4npNq67PjgRDMALn0QEQUTQ0WEkCQJP3lwMjQS8ObhRuyuahVdUsi8WlEPl0fGtGwjxrMtNxFR0DBURJDJmUY8MS8XAPCj145FxBFTj0fG83vPAwCWc5aCiCioGCoizPfuHI/kuCicae7CX3efE11O0O043YJzbT1IiNbh4ZlZosshIlI1hooIY4zVY/Xd3k2bv9p2Gg0WdW/a/Et/cFo+OwdxBp3YYoiIVI6hIgItm5WDWblJ6Ha48e9bj6r2XhBzSxd2nG6BJAFP3ZQruhwiItVjqIhAGo2Enz0yFXqt9xbTt440ii4pKP62x7uX4vbx6bw8jIgoBBgqItTYEQn46m1jAAA/fv0YLD0OwRUFVmefEyVldQCAf1owWmwxREQRgqEign11cQHGpMejtcuB/3lbXb0rNh2oRZfdhYK0ONwyJlV0OUREEYGhIoIZdFqse3QqJAnYfLAOu86qo3eF3eXGsx+bAQBfvjUfkiQJroiIKDIwVES4WbnJWNHfu+KHrx5Br8MtuKLhe6WsHhdsdoxMjMbDhTxGSkQUKgwVhFV3j8fIxGicb+vBrz84LbqcYXG5PfhD/6VpxQvzYdBpBVdERBQ5GCoICdF6/OdDUwAAf/y4GkfrrYIrGrq3jjSipr0HyXFReHwuO2gSEYUSQwUBAJZMGoH7pmbA7ZHx3c2V6HMqbxnE45HxzIdnAQBfXDAasVFsdkVEFEoMFeT3kwcnIzXegFMXOvFfbx0XXc6gvXWkEacvdCHBoMOTN40WXQ4RUcRhqCC/1HgDfrl8OgDg+b01ePdok+CKBs7ucmP9eycBAF9emA9jjF5wRUREkYehgi6zcFwaVi7MBwCsfuWwYu4GeWFvDWrbe5GWYMA/35onuhwioojEUEFX+O6d4zE92whrrxPffvkQXGF+Rbq114nfbD8DAPjOknHcS0FEJAhDBV0hSqfB05+diXiDDvvPteO3/Zsfw9UfdlSho8eJMenxWDYrW3Q5REQRi6GCrio3JQ7//bD3mOnTH5zBPnOb4IqurqqlC3/6pBoA8IO7J0Cn5Zc0EZEofALTNT04IwuPFmbDIwPf3nQo7C4dc3tkfH9LJRwuDxaOS8MdE9NFl0REFNEYKui6fvrgZOSlxqHR2ofvbK6ExyOLLsnvuV3VKK+xIN6gw88emco7PoiIBGOooOuKM+jwzOcKYdBpsP1kM37f3wJbtOrWbvz8vVMAgH+7byIyTTGCKyIiIoYKuqFJmYn4zwe9+yt+8f4p7K4Se5up0+3Bdzcfgt3lwS1jUvH4HLbjJiIKBwwVNCDL5+Rg6Szv/opvvlSB2vYeYbWsffskymssSIjW4WePctmDiChcMFTQgP3ng1MwYWQCWrsceOKP+9Bs6wt5DW8dbsSfd3lPe/xi2XRkJ8WGvAYiIrq6QXcJWr9+PUwmEwDAYrFg1apVga6JwlRMlBZ//eJcLP3DbtS092DFn/ZhU/FNSIqLCsn7V7V0YVVJJQDgK4sKcOfkkSF5XyIiGphBzVSsX78eAFBcXIzi4mIUFhZi5cqVQSmMwtOIxGi88KX5GJFowOkLXVjxp30haeVd296Dp/60H90ON+bnJ+N7d44L+nsSEdHgSLIsD/iMYFJSEqqrq/0zFQAgSRIG+hI2mw1GoxFWqxWJiYmDLpbCx5kLnXhs4160dzuQEheF33xuJm4uSA3Ke9W09eCzz+5FvaUXealx2LRyPtITooPyXkREdKWBfv8e8EyF2WyGxWK5LFD4lJaWDqlIUq6xIxKw9asLMCkjEW3dDjz5p/145sOzAb8npKqlC49t3IN6Sy/y0+LwcjEDBRFRuBpUqLgak8kEi8Vy1T+z2+2w2WyXfZB6jEqJxSv/cjMeKcyC2yPj5++dwkO/24XjDYH5e373aCMe/O0uNFr7UJAWh5e/PB8jEhkoiIjC1bBPfyQnJ6O9vf2qf7Z27VoYjUb/R04O+wmoTUyUFr9YNh3/u2w6jDF6HK234TO//QT/9ebxIbf17nO6sfbtE/jK8+XosrswNy8Zm1behHQGCiKisDbsUHGtQAEAa9asgdVq9X/U1tYO9+0oDEmShKWzsrHtOwtx79SRcHlk/PGTaiz6+Uf448fmAYcLu8uNv+05h0U//xAbdnpnxr58ax5e+Od5SI03BHMIREQUAAPeqGk2m1FQUHDFpkxJkrBt2zYUFRXd8DW4UTMyfHSqGWvfPolTFzoBABoJmJ5jwvz8FCRG6xGl08Dg+9BrUd/Ri4qaDhw834H2bm8AyTBG4z/un4R7pmaIHAoREWHg378HffqjrKwM+fn5F1+Apz/oKtweGSVltfjzJ+f84WIgRiZG42uLC7B8Tg4MOm0QKyQiooEa6PfvQTW/WrNmDUpLS1FcXAwAKCkp8f9voktpNRIemzMKj80ZhUZrL3aebsGReivsTg/sLg/sLjfsLg/6nG4kxUahcFQSZo4yYVq2CVE6NnolIlKiQc1UAN4GWL6ZigMHDmDdunUD/m85U0FERKQ8QVn+CFVRREREFD4C3vyKiIiI6HoYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCB0oXwz34WoNpstlG9LREREw+D7vn2ji81DGio6OzsBADk5OaF8WyIiIgqAzs5OGI3Ga/65JN8odgSQx+NBQ0MDEhISIElSwF7XZrMhJycHtbW1173nXck4RnVQ+xjVPj6AY1QDtY8PCPwYZVlGZ2cnMjMzodFce+dESGcqNBoNsrOzg/b6iYmJqv0C8eEY1UHtY1T7+ACOUQ3UPj4gsGO83gyFDzdqEhERUUAwVBAREVFAqCJUGAwG/OhHP4LBYBBdStBwjOqg9jGqfXwAx6gGah8fIG6MId2oSUREROqlipkKIiIiEo+hgoiIiAKCoYKIiIgCIqR9KoJh/fr1MJlMAACLxYJVq1aJLSgA1q9fDwCoqqoCAGzYsOGKP1fTmJcsWYJt27Zd9ntqGOPq1atRUFAAAEhOTsbSpUv9f6aG8W3cuBEWiwUmkwlVVVVYs2aNf0yA8sZosViwefNmbNmy5YqvR+DG41HCeAcyRkC5z54bje9SSn3uDGSMQp89soKtW7dOXrdunf/X27Ztk4uLiwVWNHyrVq267NfFxcVyUVGR/9dqG/OWLVvkT38ZKn2MHR0dcmFhodzR0SHLsiyXlZVdNkalj0+WvWPwjU+WvWNeunTpZX+upDGWlZXJGzZskNetWycXFhZe8ec3Go8SxnujMSr92XOj8V1Kqc+dG40xHJ49ig4VJpPpsgebLMtXfKEoSUdHh1xUVHTZmHxfFFVVVbIsq2vMHR0d8oYNG66oX+ljLC4uvuwfrix7//H6KH18sixf9s3mar+n1DFu2bLlqg/rG41HSeO92hjV9Oy51t+hjxqeO9caYzg8exS7p8JsNvunXj+ttLQ09AUFyMGDB2E2m/2/zs/PB+CdplLbmDdv3ozly5df9ntqGOPGjRuxdOlSmM1mf81FRUUA1DE+ADCZTFiyZAksFgsA77h8X6tqGaPPjcajlvFGyrNHrc8dIDyePYoOFVdjMpn8DzqlMZlM6OjoQGFhof/3fH/Z+fn5qhpzaWmp/4v9Ukofo6/+8vJyWCwW5OfnY+XKlf6/R6WPz+fZZ5+F2WxGUlISVq9ejdLSUv/6u1rG6HOj8ahhvJHy7FHrcwcIn2eP4jdqflpycjLa29tFlxEwa9euxYYNG66aLn2UOGbfF/1Av5iVMkbfP1yTyeR/QK9btw55eXno6Oi45n+nlPH5mEwmrF69Gtu2bcP69etRVFSE5cuXq+7r9Hp847nWmJU+XjU+e9T63AHC59mj2JmKa1HKF8BArF69Go899hiKi4uv+3lKG7Nvim4wlDbG2bNn+/+37yeB600xKm18q1evRn5+PrZs2YKqqiq0t7dj1qxZ1/1vlDbGG7nReJQ8XjU+eyLhuQOIf/YoNlT41vs+zZdEla6kpAQFBQWXHfdRw5jLy8sv+6L/NKWP8Vo1mkymy/YdfJpSxgdcXJv1TSPn5+ejrKwMJpMJJSUlqhjjpW40HrWNV43PHrU/d4DwefYodvkjPz//mv9nXW3NTEl8qdL3U4LFYkF7e7sqxtze3o7y8nL/GH3n4devX4/8/HwsXbpU0WP0fZMxm82XrU9bLBbMnj1bFX+HZrP5qlPiK1euBKC+f5sDGY9axqvWZ4/anztA+Dx7FDtTAQBr1qy5bFqnpKTkhtN14a68vBzl5eUoLCyE2WyG2WzGxo0bkZycDED5Yy4qKsKqVav8H75vRKtWrfJPTSp9jOvWrcOmTZv8vy4pKUFRUZH/H7rSx1dUVOTfDHapsrIyxf8dXmsq+EbjUdJ4rzVGtTx7rjY+tT13rvV3GA7PHsXfUupLmgBw4MABrFu3TnBFQ2exWJCXl3fVTUSX/jWpZcwlJSXYtGkTSkpKsGrVKixZssSfmJU+Rl+3SQBoa2u7on6lj89isWDt2rVISUnxr9sWFxdf0VFTKWM0m83+r8fy8nKsWrUKc+bMuaIT4fXGE+7jvd4Y1fDsGcjfIaDs585Axij62aP4UEFEREThQdHLH0RERBQ+GCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCAYKoiIiCggGCqIiIgoIBgqiIiIKCD+Px2n/2LLxRaNAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(loaded_nn(torch.from_numpy(t_train.reshape((-1, 1))).float()).detach().numpy())\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aca960c2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "In the solver form declaration, default_domain mode flag is True\n", + "target_form shape is torch.Size([160, 1])\n", + "processing a single variable\n", + "obtained bconds: [, ]\n", + "processing a single variable\n", + "Before grid creation [tensor([0.0000, 0.0500, 0.1000, 0.1500, 0.2000, 0.2500, 0.3000, 0.3500, 0.4000,\n", + " 0.4500, 0.5000, 0.5500, 0.6000, 0.6500, 0.7000, 0.7500, 0.8000, 0.8500,\n", + " 0.9000, 0.9500, 1.0000, 1.0500, 1.1000, 1.1500, 1.2000, 1.2500, 1.3000,\n", + " 1.3500, 1.4000, 1.4500, 1.5000, 1.5500, 1.6000, 1.6500, 1.7000, 1.7500,\n", + " 1.8000, 1.8500, 1.9000, 1.9500, 2.0000, 2.0500, 2.1000, 2.1500, 2.2000,\n", + " 2.2500, 2.3000, 2.3500, 2.4000, 2.4500, 2.5000, 2.5500, 2.6000, 2.6500,\n", + " 2.7000, 2.7500, 2.8000, 2.8500, 2.9000, 2.9500, 3.0000, 3.0500, 3.1000,\n", + " 3.1500, 3.2000, 3.2500, 3.3000, 3.3500, 3.4000, 3.4500, 3.5000, 3.5500,\n", + " 3.6000, 3.6500, 3.7000, 3.7500, 3.8000, 3.8500, 3.9000, 3.9500, 4.0000,\n", + " 4.0500, 4.1000, 4.1500, 4.2000, 4.2500, 4.3000, 4.3500, 4.4000, 4.4500,\n", + " 4.5000, 4.5500, 4.6000, 4.6500, 4.7000, 4.7500, 4.8000, 4.8500, 4.9000,\n", + " 4.9500, 5.0000, 5.0500, 5.1000, 5.1500, 5.2000, 5.2500, 5.3000, 5.3500,\n", + " 5.4000, 5.4500, 5.5000, 5.5500, 5.6000, 5.6500, 5.7000, 5.7500, 5.8000,\n", + " 5.8500, 5.9000, 5.9500, 6.0000, 6.0500, 6.1000, 6.1500, 6.2000, 6.2500,\n", + " 6.3000, 6.3500, 6.4000, 6.4500, 6.5000, 6.5500, 6.6000, 6.6500, 6.7000,\n", + " 6.7500, 6.8000, 6.8500, 6.9000, 6.9500, 7.0000, 7.0500, 7.1000, 7.1500,\n", + " 7.2000, 7.2500, 7.3000, 7.3500, 7.4000, 7.4500, 7.5000, 7.5500, 7.6000,\n", + " 7.6500, 7.7000, 7.7500, 7.8000, 7.8500, 7.9000, 7.9500],\n", + " dtype=torch.float64)]\n", + "grid.shape inside Domain is torch.Size([160, 1])\n", + "[2024-05-20 18:12:14.076439] initial (min) loss is 282.02587890625\n", + "Step = 10000 loss = 0.109007.\n", + "[2024-05-20 18:13:47.713147] No improvement in 1000 steps\n", + "[2024-05-20 18:13:47.713433] Step = 10764 loss = 0.341743 normalized loss line= -0.001721x+1.181644. There was 1 stop dings already.\n", + "[2024-05-20 18:13:56.362817] No improvement in 1000 steps\n", + "[2024-05-20 18:13:56.363080] Step = 11764 loss = 0.057747 normalized loss line= -0.001363x+1.548429. There was 2 stop dings already.\n", + "[2024-05-20 18:14:05.009672] No improvement in 1000 steps\n", + "[2024-05-20 18:14:05.009765] Step = 12764 loss = 0.044953 normalized loss line= -0.000520x+1.138855. There was 3 stop dings already.\n", + "[2024-05-20 18:14:13.676847] No improvement in 1000 steps\n", + "[2024-05-20 18:14:13.676945] Step = 13764 loss = 0.052647 normalized loss line= 0.000412x+0.745773. There was 4 stop dings already.\n", + "[2024-05-20 18:14:22.299137] No improvement in 1000 steps\n", + "[2024-05-20 18:14:22.299406] Step = 14764 loss = 0.088807 normalized loss line= -0.000600x+0.507226. There was 5 stop dings already.\n", + "[2024-05-20 18:14:30.945768] No improvement in 1000 steps\n", + "[2024-05-20 18:14:30.945853] Step = 15764 loss = 0.036569 normalized loss line= 0.002070x+0.982140. There was 6 stop dings already.\n", + "[2024-05-20 18:14:40.606769] No improvement in 1000 steps\n", + "[2024-05-20 18:14:40.607049] Step = 16891 loss = 0.030540 normalized loss line= -0.004779x+1.474848. There was 7 stop dings already.\n", + "[2024-05-20 18:14:55.969276] No improvement in 1000 steps\n", + "[2024-05-20 18:14:55.969363] Step = 18686 loss = 0.047548 normalized loss line= -0.002266x+0.845560. There was 8 stop dings already.\n", + "grid.shape inside Domain is torch.Size([160, 1])\n" + ] + } + ], + "source": [ + "import epde.globals as global_var\n", + "\n", + "from epde.interface.solver_integration import SolverAdapter\n", + "\n", + "\n", + "net = None\n", + "\n", + "adapter = SolverAdapter(net=net, use_cache=False)\n", + "solution_model = adapter.solve_epde_system(system = epde_search_obj.get_equations_by_complexity(4.5)[0], \n", + " grids = None, boundary_conditions = None)\n", + "grid = global_var.grid_cache.get_all()[1]\n", + "g_fun_vals = global_var.grid_cache.g_func" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0eeaff43", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(160, 1)" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "solution_model.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d30c423f", + "metadata": {}, + "outputs": [], + "source": [ + "for eq_idx, eq in enumerate(epde_search_obj.get_equations_by_complexity(4.5)[0].vals):\n", + " referential_data = global_var.tensor_cache.get((eq.main_var_to_explain, (1.0,)))\n", + "\n", + " discr = (solution_model[..., eq_idx] - referential_data.reshape(solution_model[..., eq_idx].shape))\n", + "\n", + " discr = np.multiply(discr, g_fun_vals.reshape(discr.shape))\n", + " rl_error = np.linalg.norm(discr, ord = 2)\n", + " \n", + " fitness_value = rl_error\n", + "\n", + " eq.fitness_calculated = True\n", + " eq.fitness_value = fitness_value" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d3a01291", + "metadata": {}, + "outputs": [], + "source": [ + "ref_data = referential_data.reshape(solution_model[..., eq_idx].shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0cb73cf4", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhUAAAGeCAYAAAAje/P4AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABXH0lEQVR4nO3dd3RU1QLF4d9MOoQkhF6CEHwKIooUERFFARUBBaUoChbaQ5CnoiD2jsTeQBDs9IggCAqhKU1KKBYUJCBRikgq6cnc98dMoiglIZOcmcn+1sqSmruPhJnNPeeeY7Msy0JERESklOymA4iIiIhvUKkQERERt1CpEBEREbdQqRARERG3UKkQERERt1CpEBEREbdQqRARERG3UKkQERERt/Avz4s5HA4OHDhAlSpVsNls5XlpEREROUOWZZGenk7dunWx209+P6JcS8WBAweIiooqz0uKiIiImyQmJlK/fv2T/ny5looqVaoAzlBhYWHleWkRERE5Q2lpaURFRRW9j59MuZaKwimPsLAwlQoREREvc7qlC1qoKSIiIm6hUiEiIiJuoVIhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIiblGuB4qJiIiUVH5+PuvXryctLY28vDyCgoK48sorCQ4ONh1N/kGlQkREPFJ+fj7Tp0/nueeeY/fu3cf9XI0aNfjvf//L8OHDqVOnjqGE8k82y7Ks8rpYWloa4eHhpKam6uhzERE5qR9//JGePXsWlYmqVasSHR1NQEAAiYmJ/P777wAEBwfzwQcf0K9fP5NxfV5x37+1pkJERDzKd999R8eOHdm9ezc1atRgwoQJ7N+/n82bN7N+/Xr27dvHnDlzaNu2LdnZ2dx888288MILlOO/keUkdKdCREQ8xvbt2+nUqRNHjx6lZcuWLF26lGrVqp3w1xYUFPDAAw/w2muvATB06FAmTZqE3a5/L7ub7lSIiIhXSUxMLCoUbdq0IS4uzlko/vgDPv8cNm+GjIyiX+/n58err77KG2+8gd1uZ8qUKbzwwgsGRyAqFSIiYpxlWQwePLjoDsWyL76g6gcfQPv2ULs23HADtGkDVapAkybw8cfgutF+zz338M477wDw6KOP8tVXXxkcScWmUiEiIsZNmTKFpUuXEhwczOxXXyW8Z0+4/35Yt85ZHpo2hVq1nN/++WcYOBA6dwbXQs4hQ4YwZMgQLMvilltuYe/evWYHVEGpVIiIiFEJCQmMHj0agA+GDuXsfv2cZSI8HN54A/bvhx9/hEOHnFMh48dDcDCsWAEXXACuOxNvvvkmF198McnJyfTu3Zv8/HyTw6qQVCpERMSYwmmPjIwMhlx0EX0nT3aWh2bNnGso7rkHoqL++g01asBDD8EPP8BVV0F2NvTqBV9/TVBQELGxsVStWpX4+HjeeustcwOroFQqRETEmC+//JKVK1dSLziYt//4A1tODlx7LWzYAGefffLfGB0NS5ZAt26QlQXdu8PGjURFRTFhwgQAHnvssaL9LKR8qFSIiIgRlmXxxBNPYAPiatcm4PffnUVi1iwIDT39JwgMhLlz4corIT0dunaFxEQGDRrEJZdcwrFjx7j//vvLfBzyF5UKEREx4osvvmDTpk08HRBAk337ICQEPv3UuZaiuEJCnI+btmoFSUlw553YoWi/ijlz5rB06dKyGoL8g0qFiIiUO8uyePzxx2kCjCtcUPnOO86FlyUVGgozZkClSrB8Obz5Ji1atGDUqFEA3HvvvRQUFLgvvJyUSoWIiJS7BQsWsHXrVl7x88PPsuD6652PiZ6pc86Bl15yfnvsWPjxR5588kkiIyPZuXMns2bNck9wOSWVChERKVeWZfHss89yJdC1oAD8/SEmpvSf+L//dS7yzMmBgQMJDw3lgQceAOCpp57SI6blQKVCRETK1ebNm9m6ZQuv2GzOHxg+HM49t/Sf2GaD995zrsnYsgU++oiRI0dSvXp1du/ezfTp00t/DTkllQoRESlXkyZNYiDQwrKcBeDxx933yevUgUcfdX77kUeoYrczZswYAJ5++mny8vLcdy35F5UKEREpN8nJycydOZOiGvHoo1C9unsvcs890KgRHDwIL73EiBEjqFWrFgkJCXz44YfuvZYcp1SlokuXLu7KISIiFcBHH33ENdnZNAKs6tVhxAj3XyQoCApPK42JoVJKCmPHjgXgxRdfxOFwuP+aApSiVMTGxhIXF+fOLCIi4sMsy+Kdd97hPtf3bcOHO/eZKAt9+kC7dpCZCY8/zpAhQwgPD2fXrl0sWbKkbK4pZ1YqUlJSSEpKcncWERHxYV9//TVVfvqJ9oAVEOBcoFlWbLa/HjH98ENCjx5lyJAhALz66qtld90K7oxKxZw5c+jbt6+7s4iIiA979913udf1bdsttzgXVZalSy+FTp0gPx9efJF77rkHPz8/li9fzvbt28v22hVUiUtFXFwcnTt3LtavzcnJIS0t7bgPERGpeDIzM9k4bx59Cn/gvvtO9cvd5+GHnf+dOpUGgYH07t0bgNdee618rl/BlLhUpKSkEB0dXaxfO378eMLDw4s+ov5+fK2IiFQYixcv5o6sLAIAq2NHaNGifC585ZVwySXODbFee437XGVmxowZHDp0qHwyVCAlKhVTpkwpannFMW7cOFJTU4s+EhMTSxxQRES835yZM7nd9W3byJHld2Gb7a+7FRMn0vacc2jXrh25ublMnjy5/HJUEMUuFfHx8bRu3bpEnzwoKIiwsLDjPkREpGJJS0sjY+FC6gH54eHQvXv5BujWDZo3dx6P/vbb3HPPPQBMmzZNB425mX9xf2FSUhLx8fFFj5Hu2bMHgJiYGKKjo0t0B0NERCqOzz//nJtdO1n69e/v3EeiPNntzkPGbrsNJk6k188/U61aNRITE/nqq6+47rrryjePD7NZlmWdyW+Mj4+nVatWlOS3p6WlER4eTmpqqu5aiIhUEH2uvZYPvvqKygAbNkDbtuUfIjcXGjSAw4dh9mxGf/str7zyCjfccAPz588v/zxeprjv32f0SGlsbCzjx48HYOzYsdoES0RETigpKYmwuDgqAzkNG8LFF5sJEhgIw4Y5v/3mm0V7VixatIjff//dTCYfdEalonfv3sydOxfLspgwYUKxHzEVEZGK5bPPPqO/a91C0KBBzoWTpgwb5jxmfc0ammRn06FDBwoKCnj//ffNZfIxOlBMRETKzNpZs7iy8Du33WYyCtStCzfd5Pz2W28xdOhQAKZOnaoFm26iUiEiImUiKyuLOqtWYQeOtWoFDRuajgSFj7NOn85NV15J1apV+fXXX1m2bJnZXD5CpUJERMrEihUr6JGfD0DlQYMMp3Fp3x4uvBCyswmZNYtbb70VcJ6eKqWnUiEiImXim1mzuARwALaePQ2ncbHZ/jrIbNo0Bg4YADjXfugoidJTqRAREbezLAu/hQsBSG3atOwPDyuJm292Hrm+cyet8/Np2rQp2dnZzJ0713Qyr6dSISIibrdt2zauTE0FINR1N8BjhIdDH+fRZrb33mPgwIGApkDcQaVCRETcbtns2XR0fTugb1+TUU6scI3H7NkM6NULm83G119/zd69e83m8nIqFSIi4naZc+bgDyTVqweNG5uO828dOsB//gPHjlFv7Vo6deoEwMcff2w4mHdTqRAREbc6ePAgF7r+xR/Yr5/hNCdhs/11t2LatOOmQM7w9ApBpUJERNwsbsECrnV92+PWU/zd7beDnx+sW8eNTZtSuXJl9uzZw4YNG0wn81oqFSIi4lZHZswgBEgOD3fuCeGpatcG1wmllefN44YbbgBg1qxZJlN5NZUKERFxG4fDQfVNmwDI6NTJ7FkfxVF4J2X6dG5xTdXMmTNH23afIZUKERFxm+927OCK7GwAat1+u+E0xdCjh/MR0/37uaZSJapWrcqhQ4dYtWqV6WReSaVCRETcZsv06ZwF5NrtBHjDCdbBwUV7VgTMmkXv3r0BmDlzpslUXkulQkRE3CbftYvmgf/8BypVMpymmApPT507l1tvvBGATz/9lJycHIOhvJNKhYiIuEV2djbRu3cDEORa9OgVOnSABg0gLY3LUlOpU6cOKSkpLF261HQyr6NSISIibrFh+XI6OBwA1L7jDrNhSsJuB9dppX7Tp9PPtWBTUyAlp1IhIiJuse+DDwgCjlSujK1JE9NxSqbwKZAlSxjQtSsACxYsIDMz02Ao76NSISIibhHy9dcAJF18sec/SvpPTZvCRRdBfj4X7d3LWWedRWZmJl9++aXpZF5FpUJEREotKSmJVn/8AUANT95F81Rc0x62uXOLngKJjY01mcjrqFSIiEipbZ49m7OBPCDS9YbsdQpPU125kluuugqARYsWke3ad0NOT6VCRERKLenTTwHYW6cOVKliOM0ZatQI2rQBh4OL9u6lfv36pKens2zZMtPJvIZKhYiIlFrVLVsAyLn0UsNJSsk1BWKfO5ebbroJ0BRISahUiIhIqSQnJXFRSgoAdQo3kvJWrt01+fpr+nfsCDifAsnNzTWXyYuoVIiISKlsnzGDmkCWzUZ116mfXqtBA2jXDiyL1r/+Sp06dUhNTWX58uWmk3kFlQoRESmVlHnzAEioWxcCAw2ncYPCKZA5c7jRtW23pkCKR6VCRERKJWLrVgByL7vMcBI36d3buc/GunXcevnlAMyfP5+8vDzDwTyfSoWIiJyxtORkWrjWU9T19vUUherVA1dBart/PzVq1CApKUnHoReDSoWIiJyx7z76iAggzW6n1rXXmo7jPn97CkRTIMWnUiEiImcsdf58APbUqwf+/mbDuNNNNzkPGtu4kdtcdy0+++wz8vPzDQfzbCoVIiJyxiIL11O41h74jNq14YorAGiXmEi1atU4cuQI33zzjeFgnk2lQkREzkhGSgrNU1MBqO8r6yn+zjUF4hcbS8+ePQFNgZyOSoWIiJyRn6ZPpzKQZLdT9+qrTcdxvxtvBD8/iI9noGun0Hnz5lFQUGA4mOdSqRARkTOSvHAhAL/UqYPN7oNvJzVqgOtgsUt//52IiAgOHTrEunXrDAfzXD74VSAiIuWhSnw8ANlt2hhOUoZcUyD+sbHccMMNAMydO9dkIo+mUiEiIiXmyM/n3CNHAKjpOnjLJ/Xs6ZwC2bGD211TIJ999hmWZZnN5aFUKkREpMT2LFhABJAOnN27t+E0ZahataIpkMsOHyY0NJTffvuNzZs3Gw7mmVQqRESkxA65pgB+jozEPzjYcJoy5ipNAZ9/znWuA9Pmuc47keOpVIiISIkFrl8PQOqFFxpOUg569nRuhLV5c9FGWPPmzdMUyAmoVIiISMlYFo1+/x2AKt26GQ5TDmrWBNfmXl3S0ggMDGTXrl3s3LnTcDDPo1IhIiIlcnjtWmoWFJADNBkwwHSc8uGaAgn+4gu6dOkCaArkRFQqRESkRPZPnw7A95UrE1azpuE05aRXL+d/16/nto4dAZWKE1GpEBGRErFWrwbgjyZNDCcpR3XrQvv2AHTPzcVut7N161b27dtnNpeHUakQEZESqZuQAEBw586Gk5Qz1xRI6FdfcblrjcVnn31mMpHHUakQEZFiy963j/o5OTiARrfcYjpO+brxRud/v/mG21yFSlMgx1OpEBGRYvt1xgwAdvr5cdYFFxhOU84aNICLLwbLopfNBsDatWs5fPiw4WCeQ6VCRESKLWPZMgB+rV8fm+uNtUJxTYFErlhBmzZtsCyLBQsWGA7lOVQqRESk2Kp89x0Aua1bG05iSOE5J6tWcavruHdNgfxFpUJERIonK4uzjh4FoNr11xsOY0h0NFx0ERQU0C8kBIDly5eTkpJiNpeHUKkQEZFiSYmLIxA4CJzfvbvpOOa4pkBqr1nDeeedR35+Pl988YXhUJ5BpUJERIrlkOs2//bQUKpGRhpOY1DhFEhcHP27dgU0BVJIpUJERIrFtm4dAEnnnms4iWHnngvnnw/5+dwWFgbAkiVLyMzMNBzMPJUKERE5Pcui9t69AAS4tqmu0FxTIA02b+ass84iKyuLpUuXGg5lnkqFiIiclvXzz4Tn5ZEFNCrcBKoic02B2L76iltcJ7VqCkSlQkREiuEP13bUm202Lqioj5P+XbNmzmmQ3FzuqF4dgIULF5Kbm2s4mFkqFSIiclrprlv7e2vXJjAw0HAaD2CzFU2BnPPdd9SqVYuUlBRWrVplNpdhKhUiInJalXfsACCnZUvDSTxI4RTIkiX0ve46QAeMqVSIiMippaVRKykJgEjXm6cALVo4N8PKzuauOnUAZ6koKCgwm8sglQoRETmlvHXrsAN7gQtdW1MLx02BNN+9m/DwcA4fPsyGDRsMBzNHpUJERE7psOvArK2BgTRu3NhwGg/jmgLxW7yYXtdeC1TsKRCVChEROaW8NWsAOBIdXTFPJj2VNm0gKgoyMhjasCHgfLTUsiyzuQxRqRARkZOzLKrt3g2A36WXGg7jgf42BdLm118JCQlh7969bN++3XAwM1QqRETk5PbtIywnh1ygfo8eptN4JtcUiP/ixXTr3BmouBthqVSIiMhJHVu+HIBtQOvLLjOaxWO1awd16kBaGv895xyg4q6rUKkQEZGTOuo60ntneDjVXTtHyj/Y7UV3Ky47eBB/f3++//57du3aZThY+StRqUhJSWHKlClMmTKFsWPH0qdPH1JSUsoomoiImOa3eTMAx847z3ASD+cqFUFLltD5iiuAinm3okSlYuzYsXTu3JmhQ4cyYcIEIiMj6dOnT1llExERk3JyqHXgAACVO3UyHMbDdegANWpAcjIjmjUDKua6ihKVioSEBGJjY4u+37hxYza7WqyIiPgWa+tWAhwOjgDndu1qOo5n8/MD1+mtVyYlYbPZ2LhxI7/99pvhYOWrRKVi2bJljBkzpuj7mzZtorNrpauIiPiWpCVLANhos3GRzvw4PdcUSOWvvuKydu0AmD9/vsFA5e+MF2rGxsaSkpLCu+++e9Jfk5OTQ1pa2nEfIiLiHY7FxQGwv04dgoODDafxAh07QmQkHDnCiAsuACreFEiJS0XhYs2EhAT69OlDRETESX/t+PHjCQ8PL/qIiooqTVYRESlHoT/8AEB+q1aGk3iJgADo2ROAazMyAPj666/5888/DYYqXyUuFREREQwdOrRoGqRq1aonfQJk3LhxpKamFn0kJiaWKqyIiJSTP/6gWmoqDqC6TiYtPtcUSHhcHBddeCEFBQUsXLjQcKjyU+xSkZKSwtixY48rEJ07dyYlJYU41y2yfwoKCiIsLOy4DxER8Xz5a9cC8BNwUceORrN4lU6dIDwcDh7kntatgYo1BVLsUpGQkEBMTAxJSUlFP1ZYME41BSIiIt7nT9emV/EBAZzj2iVSiiEoCK6/HoAeOTkALF26lPT0dJOpyk2xS0XLli0ZM2YM0dHRRT82e/ZsWrZsqSdARER8TL7rZNKjZ5+N3a7Nl0vENQVSbfVq/nP22eTm5rLE9SSNr/MvyS8eN24cMTExRd9PSUlhuWtfeBER8REFBVRLSADAr317w2G80NVXQ2gotsRE7hkwgFG//MK8efPo27ev6WRlrkSlIiIi4rh9KkRExAf99BMheXlkAA27dTOdxvuEhED37jBrFjc5HIwCvvjiC7Kzs33+0Vzd0xIRkeNkrVoFwCagjWsTJykh1xRInXXrqFe3LseOHasQd/ZVKkRE5DiFO2n+FBZGrVq1DKfxUl27QqVK2Pbu5V7XFNKnn35qOFTZU6kQEZHj+G/ZAsCx8883nMSLVa4MN9wAQD+HA4AFCxaQl5dnMlWZU6kQEZG/HDtG9UOHAAjVyaSlc8stANRft47aNWqQlJTk81MgKhUiIlLE2rQJP2A/cP7VV5uO492uuQaqVsV28CBjL70UcG7F4MtUKkREpEja0qUAfGuz0VInk5ZOYGDRgs1++fkAfPbZZ+S4NsXyRSoVIiJSJMN1ez6xTh0qVapkOI0PcE2B1F63jga1a5OamspSV3HzRSoVIiLiZFmE/vgjAAVt2hgO4yOuuALq1MGWnMwjrrNA5syZYzhU2VGpEBERp/37CcvIIA+oee21ptP4Bj8/6NcPgF6uaY8FCxaQnZ1tMlWZUakQEREAHOvXA7AdaN2hg9kwvsQ1BVJ97VrOqVeP9PR0nz0LRKVCxMelpaWxfPlyXnrpJaZOncr69etJTU01HUs8UNLixYDzZNImTZoYTuND2rSBxo2xZWbyWIsWgO8+BVKisz9ExHts2LCB0aNHs379eizL+tfP9+rVi2effZbzzjvPQDrxRAXr1gHOk0n9/PwMp/EhNhvcfDM89xzd0tIAWLhwIRkZGVSuXNlwOPfSnQoRH5OcnMx///tfrmrXjgvWreNdy2JNcDC/hYayqVo1ngsLoxXOR9uaN2/OHXfcwZ9//mk6tpiWm0vkvn0ABGrqw/1cUyARGzbQokEDMjMzWey6M+RLVCpEfMi+ffto06IF1uTJ7AYmAYOA9tnZ1Dt2jNZHj/JwWhqbgR+qVeNch4MPP/yQdu3asXv3brPhxawdOwgoKCAJiL7mGtNpfE+zZtC8Oba8PB513R30xSkQlQoRH5GYmEi/yy9n7v79TAbqATRoAE88AR99BHFx8PrrzvMIQkI47+hRvvf3583wcPb/8guXXHIJa9asMTwKMSVn9WoAvgXaXnKJ2TC+qn9/AK4+ehRwHoeenp5uMpHbqVSI+IADBw4wsEMHpicmchFQULWqs0Ds2gVPPgkDBkCnTjBqFMyfDz/9BD16YM/PZ2RqKqvDwshKSqJTp06sWLHC8GjEhJSvvgLgxypVqFu3ruE0PurmmwEI3byZSxs1Ijs7m4ULFxoO5V4qFSJeLjc3l3s6d+aTX3/lbCA/Kgq/zZudBSIo6MS/qUEDWLAAYmOhUiUuSUtjY2Qkgbm53HTTTfz000/lOgYxLzA+HoDM5s0NJ/FhDRtCu3bYLItHzj4b8L2NsFQqRLzchLFjeWXnTuoBueecg/+GDRAdffrfaLM5zyVYuhTCwjg/KYn1oaHkp6TQrVs3jhw5UubZxUMcPUpV1y358C5dDIfxca4pkCt//x2AJUuW+NQj3ioVIl5szTffcMFrr3EWcKx2bQLXrYOS3rpu3x6WL4fISM4/doy5lSqRkJBAr169yM3NLZPc4mG+/RaAn4ALOnY0GsXn9esH/v6E/PgjPaKjyc3NZf78+aZTuY1KhYiXSktLY3nPntwA5NnthH7xBVSrdmafrHVrWLwYAgO5NjOTJ4OCWLt2Lc8884xbM4tnSo+LA2Aj0Np1PoWUkRo1oFs3AB6qUweAGTNmmEzkVioVIl5q0uDBjEtKAiB/wgQo7THVbdvCW28B8HhuLlcDzz//PBs2bChlUvF0WStXAvBrnTqEhoYaTlMB3H47ABfv3o0fEBcXx8GDB81mchOVChEvtGvnTrrMnUsg8MdllxEyerR7PvGQITB4MDbL4tPAQGo5HAwYMICMjAz3fH7xPA4HVVwLc62LLzYcpoLo1g2qVcP/jz8Y1bQpDoeDWbNmmU7lFioVIl4obsAAWgLH/P2pOW+ec9Glu7z1FrRqRWhuLu8FB/PLL7/w4IMPuu/zi2fZtYuQ7Gwygbra9Kp8BAYW7bA53LVN9yeffGIykduoVIh4mY3Ll3Pjli0AHLvvPuccrTsFBcEHH0BAANdmZ9MXmDRpkqZBfJTDdd7HFqDNpZeaDVORDBwIwNnff0+knx/x8fH8+OOPhkOVnkqFiBexLIuf77yT2sDhsDBqP/ts2Vzo/PPhkUcAeDc4mGrAyJEjKSgoKJvriTEpS5cCsMXfn2bNmhlOU4G0bg1Nm2LLzuap888HYPr06YZDlZ5KhYgXWfn++/RJTATA79VXnbdRy8q4cdC8OWHZ2Uz092fLli1Mmzat7K4nRljr1wPOk0n9/XVwdbmx2eCOOwC4JTMTcE6BOBwOg6FKT6VCxItkPvwwwcDuBg2ofuedZXuxwECYNg3sdvrm53MZMG7cOI66NkkSH5CRQYSrpAZdcYXhMBXQwIHg70+13btpW7ky+/fv55tvvjGdqlRUKkS8xNZFi7j68GEAIt54w72LM0+mTRsYPBiASSEhJCcl8fjjj5f9daV8bNmCn2XxO9Ckc2fTaSqe2rWhRw8AnmnYEIAPPvjAXB43UKkQ8RK/PfAAgcDPNWtS44Ybyu/CTz8NVapwflYWtwKTJ0/WMek+Is91Ku0G4GI9TmqGq7R3TEwkEJg7d65Xn1yqUiHiBRJ37KDjzz8DEPjYY+V78Vq1ihZtvhoURFBBge5W+IhU1yLNH0NDiYqKMpymgrrmGqhXj4C0NIbXqUNGRgZz5841neqMqVSIeIEdd99NFWBv5co0GjGi/AP873/QsCHVc3J4AJg1axbbtm0r/xziVkGuP8PMCy/EVh7TafJvfn5w110A/M+1Z8X7779vMlGpqFSIeLjUQ4do49pLIO3uu8tnLcU/BQfDhAkAPOTvTyTwiOvuhXipxESqpKaSD0RqPYVZd90FNhuNfvmFxjYba9asYdeuXaZTnRGVChEPt/mBB6hpWRwICOCCstqXojj69IGLLiIkP58xNhuLFy/2+pXqFZrrUdLtQMsOHcxmqegaNgRXsXu+USPAexdsqlSIeLgan30GwN5rr8VWlvtSnI7NBk89BcD//PyoAVpb4cUyly8HnIs0dTKpBxg+HIAbjhwhEPjwww+9crM5lQoRD7Zz9mwuyMwkD2jqmn4wqnt3aN2a4Px8xtrtrFq1irVr15pOJWcge9UqAPbXq0d4eLjZMOJ8tLR+fYLS07kzNJQDBw6wZMkS06lKTKVCxIP98dxzAGypX5/Ipk0Np+G4uxX32GzUAp5zZRQvkp1N2C+/AGDTeR+ewd8fhg0DYGxYGOB8fNvbqFSIeKjspCRafPcdAP533204zd907Qpt2xJYUMBDNhtLlixh8+bNplNJSWzZgr/DwWHg7C5dTKeRQoMHg78/jQ4c4ELgiy++4NdffzWdqkRUKkQ81LZHHiEc2O/nx0UPPGA6zl9sNnjySQCG2+1EorsV3qbANWW1HrikXTuzYeQvtWvDTTcB8GzduliWxdSpUw2HKhmVChEPVWXmTAB2XX45fgEBhtP8wzXXQIsWBBUUMBKYP38+37nuqojnS3dtehUfFMR5551nOI0cx3VX8tqjR4kApk6dSl5entFIJaFSIeKBDqxYQTPXHgLnvvCC6Tj/ZrPBQw8BMDowkEpATEyM2UxSPJZFgGu6Kv3887Hb9TbgUTp0gObN8c/J4b7QUA4dOsTChQtNpyo2fTWJeKB9zz8PwKbISKI89UyGm26Cxo0Jy81lEM5dNhNdJ16KB9u/n8qpqeQBEdr0yvPYbHDffQCMstnwByZNmmQ2UwmoVIh4GssiynXQU0Z5HhxWUv7+8OCDADwSFAT5+bz++uuGQ8lpuTa92ga0ufxyo1HkJPr3h1q1iEhPpy8QFxfHTz/9ZDpVsahUiHiYxHnziMrJIQO4sLwPDyup22+HWrWolZPDLcCUKVNITU01nUpOIXvFCsC5SLNt27Zmw8iJBQXByJEAPO3aQ+SNN94wmajYVCpEPMyBV14B4Nvatanh2rLXYwUHOw8bAx4OCiI9PZ0pU6YYDiWnkuPa9Gpv7dpUq1bNbBg5uf/+F0JCaJyaSgecO2wmJSWZTnVaKhUinqSggOiNGwHI79PHcJhiGjYMQkJokpNDR+D1118nNzfXdCo5kawsQvfscX5bj5J6turVnXcCgafDwsjMzOTdd981HOr0VCpEPMivH35Ijfx8koDWDz9sOk7xREYWvfiNCwri999/Z86cOYZDyQlt3oyfw8EB4Bwt0vR8994LNhsd09I4D3jrrbc8/vFSlQoRD3L0zTcB+LZBAyJr1zacpgTuvReALrm5nA286RqHeBbHunWANr3yGueeCzfeCMDTQUH89ttvzJs3z3CoU1OpEPEQVk4OZ+/YAYD9ttsMpymhc8+F667DZlncb7ezceNGvv32W9Op5B8yli0DYLO/P82bNzecRorlkUcA6OUq7K+88gqWZZnNdAoqFSIeYv+HHxLmcHAQaOd6VNOruJ6tv8tuJwLdrfA4loX/pk0ApDVrhr+/v+FAUiwXXQTdu2O3LB7182Pjxo2sXLnSdKqTUqkQ8RBJrj3+46OiCIuIMBvmTHTqBM2bE5Sfz53AnDlzOHTokOlUUmjvXkLS0sgFqmo9hXdx3a24zbI4C88+a0elQsQTFBTQYNs257d79TIa5YzZbEXP1t8fHEx+Xp5XHt3ss1ybXm0FWl92mdksUjKXXAKdO+PncDDOZmPFihVs2LDBdKoTUqkQ8QB/LlpEtbw8UoCLXNMIXunWWyE8nPrZ2VwNvPPOO3q81EPkrl4NwDq06ZVXcm2ENwiIBp53beXvaVQqRDzA72+9BcCGatWo27Ch2TClUbky3HEHAKODgjh06BCxsbFmMwkA2a5Nr36pXp06deqYDSMld/nlcM01+FsWzwALFy5kh2thtydRqRAxzbKouXYtAJnXXGM4jBsMHw5A59xczsJ7thf2aRkZ2vTKF7hOLO4PXAQ8++yzRuOciEqFiGEZ69dTJyuLLKCpN099FDr3XOjcGZtlMcJu59tvv2Wja5dQMWTTJuwOB78B53TqZDqNnKkWLZxTjMB4YO7cucTHxxuN9E8qFSKG7Xv1VQDWVq5Mk1atDKdxkxEjAPhvQABB6PFS06y/b3p1ySVmw0jpPPMMBARwDXAV8LCH7byrUiFiWOjy5QD82aEDNpvNcBo36d4doqKokpNDX2D27Nl6vNSgTNfX2CY/Py666CLDaaRUGjUqmmJ8DYj76itWudbLeAKVChGDCvbt46zkZBzAWXffbTqO+/j7Ow8aA8ZWrkxeXp5OLzXF4cDftbvp0aZNCQwMNBxISu3xxyEykubASGDcuHEes8umSoWIQfsmTgRgs58fbbp2NZzGzQYPhoAAmmVk0Arn46WefhiST9q5k6CMDDKAqlddZTqNuEO1akWLNp8Gft2wgQULFpjN5KJSIWJQ3vz5ACScd57vbZtcqxa4jm8fHRLCwYMHme8ar5Sjb74BYAPQVpte+Y5Bg6BtW8KAl4DRo0eTnZ1tOpVKhYgx2dmc5XrMr7LrzdfnuBZs9s7LIxJ4++23zeapgHJd50R8A7Rv395sGHEfux0mTsSy2+kPNEpI4OWXXzadSqVCxJSkefMIcT3md/GQIabjlI127aBFCwLy8xlks7F69Wp++OEH06kqlALXIr49tWtTt25ds2HEvVq2xOZai/Ue8Nazz7J//36jkVQqRAw59N57AGyqXp1atWsbTlNGbDZwvejdX6kSNmDSpElmM1Uk+/cT8scf5APBHTuaTiNlYfx4rMaNaQC8lJ3N6NGjjcZRqRAxwbKo5joQKMvXNyPq3x/Cw6mdkUEX4KOPPiI9Pd10qophzRrAeYhYmyuvNJtFykZoKLZPPsHy8+NWwC82luWuR4hNUKkQMSDvu++olZFBDnD20KGm45StypVh4EAAHqhcmfT0dD755BPDoSqGgq+/BpzrKS7TIk3fdckl2FzHo78DpBmcYixxqYiJiSEmJoZhw4YxzPUcuoiUTOI77wCwNiCAVldcYThNOXC9VnTKyqIeMHHiRI95rt6X5cTFAbA9NJQmTZoYTiNl6tFHyW/ZkgigV1KSsRglKhVjx45lzJgxjBkzhsmTJwPQpUuXMgkm4sscixcD8NsFF+Dn52c4TTlo1gw6dMDucPBff3++//571rhuzUsZSU6mkuvpIsell2K368a0TwsIwH/mTJg8GZ54wliMYn+VpaSkEB8fT0pKStGPDRs2jLi4OBISEsoim4hvysiggWuFdni/fobDlKP//heAEYGB+OG8WyFlyHXy7c/A+dr0qmI45xwYOtS5QNqQElXXzZs3H1cgoqOjAY4rGiJyainz5xNoWewFLhkwwHSc8nPTTVC9OlUzM+kGfPrppzoPpAxZWk8hBhS7VERERJCcnEzLli2LfizONV9XWC5E5PQOf/wxAFsiI333UdITCQqCO+8E4KGICPLy8pg6darhUL4re+lSANb5+9O6dWvDaaSiKNUk2/jx45k8eTIREREn/PmcnBzS0tKO+xCp6MLWrwcgoyL+69H1pMslqak0AiZPnkx+fr7ZTL4oPZ2g774DILVFC4KCggwHkorijEvF2LFj6devH0NP8Tjc+PHjCQ8PL/qIioo608uJ+ARr717qpKWRDzRw/au9Qjn7bOjSBZtl8b+QEH777TcWLVpkOpXvWbsWu8PBHuA/vr4PiniUMyoVsbGxNG7cmDFjxpzy140bN47U1NSij8TExDMKKeIrDn74IQAbbTYuueYaw2kMcS3YHGSzEYDOAykTrq25VwNXVIRHlsVjlLhUFK6jKLxDkZKSctKnP4KCgggLCzvuQ6Qiy/zsMwB2N2pESEiI4TSG9OgBdeoQmpnJjThfU37++WfTqXxKjms9xdc2mw4Rk3JVolIRHx9PfHw8LVu2JCEhgYSEBKZMmUJkZGRZ5RPxHfn51P7xRwD8rrvOcBiDAgLAdYDaw9WqAfCOazMwcYP0dAK2bwcgqXlz/WNOypXNKua2dikpKTRq1OiEj48Wd2e8tLQ0wsPDSU1N1Re6VDi5K1cSeNVVHAV+27KFC//2JFWFk5gIDRuCw0ET4FB4OL///juVK1c2ncz7ffkldO1KAjDpgQd48cUXTScSH1Dc9+8SP1JqWda/PkTk9A588AEAa4KCaN6ihdEsxkVFQffuAIwJCyM1NZWZM2caDuUjXOspVqH1FFL+tG+rSHlZuRKAIxdeqC2ToWjBZv+8PIJxLtjUP1JKL7dwPQXa9ErKn17ZRMpDRgb1fvsNgPBevQyH8RBXXw0NGxKclcWt/v5s27aNb7/91nQq73bsGP6u9RRHzjvvpHsIiZQVlQqRcpAVF0eAZfEr0LpvX9NxPIOfX9FmWA9VrQroPJBSW7MGu8PBXuDcivrIshilUiFSDg7OmAHAxsqVaaRt7f9y113g78/ZR45wITB79mz+/PNP06m81/LlgNZTiDkqFSLlIOCbbwBIrshPfJxIrVpw440APFqjBrm5uUybNs1wKO+V9+WXAMQBHTp0MBtGKiSVCpGylpJC3YMHAYh0vYHK37gWbN6QlkYozj0rCgoKzGbyRn/+ScD33wNwoGlT7R8kRqhUiJSxzC+/xA/YBbS96SbTcTxPx45wzjkE5OQwuFIl9u3bx5euf3FLCaxYAcAOoIXWU4ghKhUiZeywa/+FzWFhOlTvRGy2orsVD7g2v9KCzTPgOkIhDuikQ8TEEJUKkTIWtG4dAOmtWxtO4sFuvx2Cgqh35AgXA0uWLDnpmUJyYoXrKVba7VqkKcaoVIiUpT/+oK7raYbqvXsbDuPBIiOhXz8AnqlbF8uydB5ISSQkEJCYSB6Q1aYNVapUMZ1IKiiVCpEydGzRIsA5z31pz55Gs3g81xRIpz//JAKYNm0aWVlZRiN5DdfUx3qgvdZTiEEqFSJl6MjcuQBsrVqVOnXqGE7j4S65BC64AL/cXP4XEUFSUhJzXf//5NQsracQD6FSIVKGKm/YAEBm27aGk3iBvy3YHBkQADjPA5HTcDgocJ33sSYoiEsuucRwIKnIVCpEyspvv1EzJYUCoLa25i6eW2+FypWpfuQInfz82LhxI5s3bzadyrPFx+OfmkoaUOmKKwgMDDSdSCowlQqRMpL++ecAbAHad+tmNoy3CAuD224D4Pm6dQGYNGmSyUSeb8kSwDn1ceXVV5vNIhWeSoVIGTkaGwvAjurVqVmzpuE0XuSeewBo8/vvRAEzZswgOTnZbCYP5li8GIAlaD2FmKdSIVIWLIuwTZsAyLn0UsNhvEyzZnDVVdgcDp6sUYPs7Gzef/9906k809Gj2FzHxX9btSoXXHCB4UBS0alUiJSFhAQijx0jF6jv2n9BSmDUKAD6Z2YSjHMKxOFwmM3kiZYuxWZZ7ACad+2K3a6XdDFLX4EiZSBt/nwAvgUu074BJde9OzRsSHBGBoOCg/nll1+Icz02KX/jmvpYDHTt2tVsFhFUKkTKRPK8eQD8WKsW1apVM5zGC/n5wYgRADwUGgroPJB/cTgocJWKL4FrVF7FA6hUiLibZVF161YA8jp0MBzGi911F4SEUP/PP+kALFy4kP3795tO5Tk2b8YvKYlUILd1a2rUqGE6kYhKhYjb/fgjYVlZZAENb77ZdBrvFRkJAwYA8EyNGjgcDiZPnmw4lAdxPUq6DLhajyyLh1CpEHGzFNd6irXAZXrEr3RGjgSgw9Gj1AfeffddsrOzzWbyEI4vvgC0nkI8i0qFiJsVLtL8qW5dIiIijGbxes2bw5VXYnc4GFulCkeOHGH69OmmU5l3+DA2106jG6tWpXXr1oYDiTipVIi4U0EB1XbsAMBx5ZWGw/gI1+OlgwoKCAZefvllPV76+efYLItNwIVdu+Ln52c6kQigUiHiXtu3Uzk3lzTgP9qfwj169ICzziIkM5O7goPZuXMnS1zrCSos192wz4DrrrvOaBSRv1OpEHGj5E8/BeBr4LKOHY1m8Rl/e7z0kSpVAHjppZdMJjIrPb3oqPMFwNU670M8iEqFiBtlLFoEwC8NGlDF9QYobjBoEISEUPfIETrZ7axatarinl765ZfYcnPZBUS2b69HScWjqFSIuEteHtV//BEAm576cK/ISOe+FcBLtWsDzrUVFdJnnwEwH+jZq5fRKCL/pFIh4ibWxo0E5+fzJ9C0b1/TcXzPffeBzUaLAwc4D5g7dy779u0znap85ebicN0Nmw/07NnTZBqRf1GpEHGTwvUUq2022msnTfdr3BhuvBGAl+rWpaCggNdff91wqHK2ahX29HQOARnnn0/jxo1NJxI5jkqFiJtku55I2NeoEZUrVzacxkeNHg3ANUeOUBvnZljJyclmM5Un19THAuAGTX2IB1KpEHGH7Gyq79oFgL9W45eddu3g0kux5+XxdPXqZGRkMGXKFNOpykdBAdbfSkUvlQrxQCoVIm5grVtHoMPBAaB5796m4/i2Bx4AYGBmJlWA119/ndzcXLOZysPq1dgOHyYJ2BUVRYsWLUwnEvkXlQoRN0iKjQVgld1Ou0svNZzGx91wAzRpQlBmJmOqVOHgwYPMmDHDdKqyN3MmAJ8C3Xr1wmazmc0jcgIqFSJukPfVVwAknn02ISEhhtP4OLsdHnoIgHsti2AgJibGt7fuzs3Fci0Enome+hDPpVIhUlrp6VTfuxeAIJ0WWT7694cGDQg9doy7Q0LYuXMnn7redH3SV19hS07mAPBzrVpcfvnlphOJnJBKhUgpWd98g79lkQC0dD3yKGUsIAAefBCAR4OC8AeeffZZLMsym6usuKY+5gB9br5ZB4iJx1KpECmlo671FKv9/Gjbtq3hNBXIoEFQsyZVU1K4MyiIHTt2sHDhQtOp3C8jA2vBAsA59dG/f3+zeUROQaVCpJQcrsOdDjZpQlBQkOE0FUhIiHOXTeDZypWx46N3KxYtwpaZSQJwNDqaNm3amE4kclIqFSKlkZRE9cREACp37244TAU0YgRERlIzKYnbAwLYtGkTX7kWzfqM6dMBmAXc0r+/nvoQj6ZSIVIKjpUrsQM/Am2uv950nIqnSpWitRUTKlfGD3jiiSd8527FgQNYixcD8DGa+hDPp1IhUgqF+1N84++v29KmjBwJ1atTIyWFuwID2bhxo++srfjwQ2wFBawBgi68kKZNm5pOJHJKKhUipbFqFQB/NGtGQECA2SwVVWgojBkDwPhKlfAHHnvsMe/ft8KyYNo0AKYCt9xyi9k8IsWgUiFypg4fpvqhQwCEaerDrLvvhpo1qZaSwrDgYHbs2EGs6y6S11q9GvbsIQ2YZ7czYMAA04lETkulQuQMOZYvB2Ar0E6LNM2qXLlol83nAwMJBh5//HHy8/PN5iqNqVMB52Okna6/nrp165rNI1IMKhUiZ6hwPcWagABatmxpOI0wfDg0aEBYWhoPVarEzz//zIcffmg61ZlJTi7alnsqMGTIELN5RIpJpeI0EhMTmTZtGsOGDaNly5Y0a9aMq666iv79+/P2229z9OhR0xHFEL+vvwbg6IUX4u/vbziNEBwMzz4LwFiHg6o411ZkZGSYzXUmpk/Hlp3NduBw/fpcc801phOJFItKxUkcPnyYe+65h+bR0bw/eDBMmcLtW7cy+McfabZyJckzZ/LIyJHUqVOHG2+8kc2bN5uOLOXp11+pevQo+UA1He7kOfr3hwsuIDg7mwlhYRw8eJCXX37ZdKqScTjg9dcBmAIMHjJE23KL11Cp+AfLsnjllVdo2agRkW+9RUJ+PmuAycD/gPuAN4ElwEG7nTfz8vjls89o27Yt9957L+np6SbjSzkpcO2iuQlor0PEPIefH0yYAMBdmZk0wHmC6cGDB83mKolFi+CXX0gGPrbZuOuuu0wnEik2lYq/ycvLY8jgwfw2ejQ7s7J4CogEqFMHunaFBx5wfvTqBWefTYjDwTBgB/CRw8GHr79Os2bNWLdundFxSNlLds13rwsK4sILLzScRo5zzTVw1VX45efzXmQkGRkZPPHEE6ZTFd8rrwDwDnBFt27Ur1/fbB6RkrDKUWpqqgVYqamp5XnZYklOTra6XnGFNcP5dLhlgeW44ALLmjPHsvLz//0bHA7LWr3asvr0sSy73bLAOujnZ10NVlBQkDVnzpzyH4SUD4fDSgsLsyywHrv0UtNp5ES2by/6e3klWHa73dq2bZvpVKe3ebNlgZULVl2wvvzyS9OJRCzLKv77t+5UADk5Odx19dU8u3o1twAOPz94+21sW7dCnz7OW6r/ZLPB5ZfDnDmwfj2cey61Cwr4CngwJ4e+ffsSExPjO9sFy19276ZKWho5QE2tp/BMF1zg3LsC+CAsDLvDwYgRIzz/76PrLsUsoPoFF3D11VebzSNSQhW+VFiWxb0DB/Lcpk20BPIiIrCvWOF8QbIX83/PxRdDfDzccw8AzwCvAQ+NHctzzz1XRsnFlIJlywBYB1yuVfme66mnoFo1GqSlcW9AAGvXruXjjz82nerkEhOx5swB4FVgzJgxOjxMvE6FLxUxzzzDzXPm0BTIrl6dgK1bnXcgSqpSJXjjDecHzkWdHwFPPvYYEydOdGdkMSx53jwAvg0J4fzzzzecRk4qMhKefx6AZ/z8qAU8+OCDpKSkGI11Ui+8gC0/n1XAn1FR9O3b13QikRKr0KVi0eef0+CJJ7gCyAkOJnjFCmjYsHSf9J574JNPwN+f24CJwMgRI5g5c2bpA4t5BQVUWr8egPS2bbEX926WmDFoELRqRXB2Nh+EhvLHH3/w2GOPmU71b/v2Yb37LgBPAffff7/OkhGvVGFfEZOSkth+663cAhTY7QQtWgTNm7vnk996K8yZg2W3MxR4GBg4cCBr1qxxz+cXc7Zto1JWFmlA/V69TKeR0/Hzc2537efHtceO0QuYOHEiGzZsMJ3seE89hS0vjzhge9WqDB482HQikTNSYUvFy7ffzoPHjgHgePNN6NTJvRfo1QubayrkWeCW/Hz69OnDgQMH3HsdKVe5X3wBwErgKq2n8A4tWsDYsQC8FxxMmMPBHXfcQVZWltlchX76CeujjwB4BBg1ahShoaFmM4mcoQpZKhbPn0+fRYsIBJI6dCBg+PCyudCIEUVHMk8Dzjp0iD59+pCbm1s215Mylz5/PgCbIiI455xzzIaR4nvsMTj3XCKys5kUEsLPP//Mk08+aTqV0xNPYHM4+BzYV7Mmo0ePNp1I5IxVuFKRmprKrgEDaAFkhIQQOWeO8/HQsjJ+PPTuTQAw12bjp3XruP/++8vuelJ2srII27EDgIKOHbUy35sEB8O0aWCzcXNWFjcCL730Et9++63ZXBs3Oh9LBx7DebJqlSpVzGYSKYUKVyo++N//GOma9vCfOhVq1y7bC9rtzhez//yHKMviI2Di22/z+eefl+11xf2++YaAggISgWY33mg6jZRU+/bw4IMAfBQYSH2Hg4EDB3LM9XpQ7vLzYdgwZx4g6z//YejQoWayiLhJhSoV+3/9lRYffYQ/cOCyywjq3798LhwWBnPnQnAw3YAHgcGDB3P48OHyub64RdbChQDEAZ06dzYbRs7Ms8/CxRdTOTeXuYGB7Nm1i+HDh5vZFOvNN2HbNpKAB4Dnn39eT3yI16tQpWLenXdyhWWRa7dTp7w3wbnwQueLCM6Fm/WPHGHQoEGev8OfFMletAiAnfXqUadOHcNp5IwEBMDMmRAWxsW5uTxts/HJJ5/w3nvvlW+O/fuxXI+2jgXObteOm266qXwziJSBClMqtm3aRNeVKwE4OnAgttLuR3EmBg2CPn0IwHn64LIvvmDy5Mnln0NK7sgRqu7bB0CgTiX1btHR4Pp797Bl0QcYOXIk3333Xflc37LgnnuwZWSwBvg4IICpU6dqjY74hApTKtYOHMi5QGpQEHVef91MCJsNJk6EmjVpZlk8CTzwwAPs37/fTB4pvuXLAdgOtL3+erNZpPRuvhlcC6Y/tttpnp3N9ddfXz5TkpMmweefkwf8F3jiqac477zzyv66IuWgRKUiJSWFKVOm0KVLl7LKUybWf/klN//0EwB5jz3mXONgSvXqRf9KGgOcn5Fhbk5Xii39s88AWG6zccUVVxhOI24REwPduxPkcPCFnx8F+/bRo0cPMjIyyu6aGzdi3XsvAA8Bwa1a8aBr8aiILyh2qYiPj2fOnDmkpKSQlJRUlpncbte991INOBgRQXXXJjhG9ewJAwbgB3wAxC1ezOzZs81mkpOzLGxxcQD81qQJYSZLqbiPnx/MmAEXXECNggJW2O38tmkT/fv3p6CgwP3XO3oU+vTBlpfHp8Cb/v588MEH+Pv7u/9aIoYUu1S0bNmSoUOHEh0dXZZ53O67b7+l688/A2B79FHwlL/Ar78OtWrRBOfTIKNGjeLo0aOmU8mJ7N5NaFISOUCkjjr3LVWqwMKFcNZZnO1wsNpmY/Pnn3PnnXeSn5/vvutkZUHfvrB/P7uBu4C3J07UgXTic3x+TcXWu++mJnC4cmVq/+9/puP8pWpVePVVAB6z2Qg7ckS3QT2UY+lSANYCV2iRpu9p0ABWrYKzzuI/lsUqYPXHH9OvXz9ycnJK//mzspx3J1es4BhwEzBk9GiGDBlS+s8t4mF8ulQk7NzJVfHxAOTce6/n3KUodPPN0KULQZbFJOD9999nvesETPEc6a6jzlcHBNC2bVvDaaRMNGzoLBYNG/IfYDOQMm8ePXv2JD09/cw/b2GhWLqUY8B1QMMePZgwYYIbQot4njItFTk5OaSlpR33UZ7W33039YE/g4Jo4InHHRc+DRIURBfgFpyPtpXJfK6cmfx8gl1FL6V1awIDAw0HkjLTsCF8/TW0bEkNYClwwZdf0uqii9iyZUvJP9/u3XDFFccVioJLL2X69On4+fm5NbqIpyjTUjF+/HjCw8OLPqKiosrycsdJOnKES1avBiD5rrsgKKjcrl0iZ58Njz4KwEs2Gz/FxzNlyhTDoaTI5s0EZWeTBDTU1ty+LyoK1qyBO+7AD5gAzNizhwfatiUmJqZ40yGWBdOmYV10EWzaRDLOQlGrd2/i4uJ0tof4tDItFePGjSM1NbXoIzExsSwvd5yvH3qIxpZFqp8fZ8fElNt1z8gDD0CjRtS1LMYCjzzyCH/++afpVALkf/klACuATldfbTaMlI+QEHjvPZg8GSs0lNbAyoICzhk7lhH16jHljTdOXC6ysmDaNBwXXgiDB2PLyGAlcAHQ+r77mD17NiEhIeU8GJHyVaalIigoiLCwsOM+yoPD4aDazJkA7OvUCVtoaLlc94wFB8OLLwIwxmYjNDmZxzxxuqYCynDtT7EhNFQr9SsSmw2GDsW2Zw/W0KE4bDZ6AlOPHqXf//7H6tBQvoqKYv1VV7H7yis5fM455ERGwuDB2L/7jkyc+9AMa9yYiZ9/ziuvvILd7tNL2ESAMygV3rBHxdr33qNDVhYO4D8vv2w6TvHceCNcfjnBlsUE4N1332Xnzp2mU1Vsx44R+v33AORdcYXeFCqimjWxTZ6M/bvvyB81ivSICMKBq/Pzuea332i3ciX/WbWKWrt3E5SdzV6ch4O1rFGDGjExfP/jj/To0cPwIETKj80q5laOCQkJxMbGMnv2bOLj4xkzZgxt2rShd+/exb5YWloa4eHhpKamluldi0XR0XTfu5fvGjWieUJCmV3H7bZuhVatwLK4FIjs1o1FrkOsxIAvvoDu3UkAVk6dyqBBg0wnEtMcDgrWreO3pUv5bfNm0n7+mSTL4vewMA5ERBB5xRVc16MHLVu2VAkVn1Lc9+9il4ryDFUa+3/4gYjzzycM2D91Kg287Y1g8GCYNo1NQFtgWVwcnTp1Mp2qQsodOpTAd9/lHaDrvn2cddZZpiOJiBhR3Pdvn6vS2+6/31koKlWiwZ13mo5Tcs89B1Wq0Aa4DRg9erQeMTXBsshbuBCAbbVrq1CIiBSDT5WK/Lw8zl2xAoCkm28Gb7z9WKsWPPIIAC/YbPyyfTsfffSR4VAV0O7dVD50iFwguFs302lERLyCF77rnty3b77Jufn5ZAHNvHnHunvvhejookdMH3300bI9OVH+bckSAL4GrtJR5yIixeJTpSLzrbcA+KFJEwKqVzecphSCgooeMX3QZsP/wAFe9panWHxERmwsAEvtdq666irDaUREvIPPlIqjiYm03bsXgIjRow2ncYNevaBjR4Iti6eAmJgYDh48aDpVxZCZSdCGDQD82aYNoZ6+z4mIiIfwmVIR/+ijhAGJgYGcfdddpuOUns0Grp1ABwKNMjK0IVZ5WbkS//x8fgWaamtuEZFi85lSEek6STKxc2fvXKB5Im3aQO/e2IHngffee4/vXZsxSdnJdz31sRi4Vkedi4gUm0+8++5avJhWx47hAM597jnTcdzruefAz48eQHvL4uGHHzadyLdZFrkLFgCwMTJSW3OLiJSAT5SK/U8/DcC2mjWp1qKF2TDuds45zg2xgBhg4cKFrFmzxmwmX7ZrF5UOHSIHqNS9OzabzXQiERGv4ROlItJ1+mnB7bcbTlJGHn8cQkJoB1wPjB07lnLcCLVicU19rAau1JkNIiIl4hOlouXvv3Nw0SJaPP646Shlo25duO8+AMbbbHy7bh0LXW9+4l7Zc+YAsMhmo3PnzobTiIh4F58oFQB1unUjwJcf/RszBiIjOc+yGAg8/PDD2r7b3Y4eJXDzZgAOtm5NRESE2TwiIl7GZ0qFzwsPB9cizWdsNvb88AMff/yx4VA+ZvFi7JbFduDiPn1MpxER8ToqFd5kxAiIiqKeZTECePzxx8nOzjadymfkuR5L/hzo3r272TAiIl5IpcKbBAeD60mXR2020hITefvttw2H8hE5OfDVVwBsrVePJk2aGA4kIuJ9VCq8zYAB0KwZEa7Dxp5//nlSU1NNp/J+q1cTkJXFQaBh7956lFRE5AyoVHgbPz8YPx6Ae202gpOSiHFt5y1nzvr8cwAWAd30KKmIyBlRqfBG3btD+/aEWBZPAK+++ioHDhwwncp7WRa5n34KQFxICB06dDAcSETEO6lUeCObDSZMAOAuICori6dday3kDGzdStChQ2QCgV27EhgYaDqRiIhXUqnwVu3bw/XX44/zsLGpU6eya9cu06m8U2ws4DxA7OqePY1GERHxZioV3uz558Fu5yagdUEBjz76qOlE3seyyJ01C4DPbDa66lRSEZEzplLhzZo1A9d5JxOAuXPnsmnTJrOZvM0PPxC4dy85QOpll1G9enXTiUREvJZKhbd76ikICuIKoCvw0EMP6bCxknAt0PwKuLZvX7NZRES8nEqFt4uKglGjAJhgs7FqxQqWLVtmOJT3yHNNfXwK9OrVy2wYEREvp1LhCx56CCIiaG5Z9Md5t8LhcJhO5fl27SLgp5/Iw3mAWL169UwnEhHxaioVviAy0lksgOdsNn7cupU5riO85RRcUx/LgS6a+hARKTWVCl8xahTUq0cDy2I48Mgjj5Cbm2s6lUfL/9vUx4033mg2jIiID1Cp8BUhIc5Fm8BjNht/JiTw7rvvGg7lwX7+Gf8dO8gHfmnWjMaNG5tOJCLi9VQqfMntt0PTpkRaFmOAp59+mmPHjplO5ZlmzACcT31c1a+f2SwiIj5CpcKX+Ps7N8QC7rfZsP/xB6+++qrhUB7Isij45BMAZgA33XST2TwiIj5CpcLX3HADtGtHiGXxDPDiiy9y5MgR06k8y6ZN+CUkkAHsadaM8847z3QiERGfoFLha2w2eOklwHnYWKP0dJ577jmzmTyNa+pjAdBrwACzWUREfIhKhS+69FLo2xc78AowaeJE9u3bZziUhygooMBVKmYAN998s9k8IiI+RKXCV73wAgQG0gnokpfH448/bjqRZ1ixAr8jR/gTSG/XjrPOOst0IhERn6FS4asaNYL77gPgJWDWxx+zY8cOs5k8wfTpAMwF+t12m9ksIiI+RqXClz38MNSoQRNgGPDwww+bTmRWWhoO106jM+12+vTpYziQiIhvUanwZWFh8MwzADwJrP3iC77++mujkYyaNQt7VhY7gcpdulCjRg3TiUREfIpKha8bNAiaNaMa8BgwduzYCns0ujVtGgDTgP633mo2jIiID1Kp8HX+/vDKKwCMBP7csIG5c+eazWTC999j27iRPOCz0FBteCUiUgZUKiqCq6+Grl0JBGKA0aNHk5GRYTpV+XLdpVgIdLn1VipVqmQ2j4iID1KpqCheegnLz49ewLm//cYLL7xgOlH5ycnB8eGHAEwF7rrrLrN5RER8lEpFRXHeedjuvhuAN4HXYmJISEgwm6m8LFiAPTmZ34DfmzWjTZs2phOJiPgklYqK5OmnsWrWpCkwPDeX0aNHm05UPt5+G4D3gTsHD8Zms5nNIyLio1QqKpKICGwTJgDwOLBp/nyWLl1qNlNZ27YNvv6afGCavz+3acMrEZEyo1JR0QwcCO3aEQq8DIwaNYrc3FzTqcrOG28AEAtc3KsX1atXN5tHRMSHqVRUNHY7vP02lt1OP6Dhzz/z1ltvmU5VNo4cwXIdHvY6MHz4cLN5RER8nEpFRXTRRdhGjQJgIjDhiSc4dOiQ2UxlYcoUbDk5bAIymzenY8eOphOJiPg0lYqK6plnsKKiiAbuP3aMhx56yHQi98rLw5o4EYA3gFH/+58WaIqIlDGViooqNBSb66mI0cC2Dz9k5cqVZjO505w52A4c4BCwPDKS/v37m04kIuLzVCoqsh49oHdv/IF3geFDhpCVlWU6Vek5HPDcc4BzT447hg0jJCTEbCYRkQpApaKie+MNrPBw2gC99uzh2WefNZ2o9D79FHbuJBmYZLdzt2vTLxERKVsqFRVdnTrY3nwTgKeAxRMm8N1335nNVBoOB7iK0RvANX37Ur9+fbOZREQqCJUKgdtug+uvJxCYWlDAkDvuID8/33SqM7NwIezYQRrOx0gffvhh04lERCoMlQoBmw3eeQdHRAStgGvi473zwDHLgmeeAeAt4Mobb6R58+ZmM4mIVCAqFeJUpw5219MgjwFLn3ySbdu2GY1UYvPnw5YtZACvAo899pjhQCIiFYtKhfylf3+sW2/FH/iooICRt91GTk6O6VTFk5sLDz4IOAvFpddfT4sWLYxGEhGpaFQq5Di2iRMpOOssGgIjf/iBxx591HSk4nn7bdizh4PAC+guhYiICSoVcrywMPxmz8bh58fNQNJLL/Hll1+aTnVqSUlFaykeBa7q0YPWrVubzSQiUgGpVMi/tW2L3fVY5tvAK7fcwsGDB81mOpWnn4bkZLYDn/j58eKLL5pOJCJSIalUyImNGUNB9+4EAdNSUhjZty8FBQWmU/3b1q1Yf9tufMSoUZx77rlmM4mIVFAqFXJidjt+06eT27AhUcDINWt40tPWV+Tmwp13YsvPZy6wrVo1raUQETFIpUJOLiyMwMWLyQsO5kqg/gsvMGf2bNOp/vLCC7B9O0dtNkYAzzzzDFWrVjWdSkSkwlKpkFNr2pSA2bNxAMOAHwYMYPv27aZTwY4dWK7FmSMsi3otWjBkyBDDoUREKjaVCjm966+HN94A4Km8PD7o1IlDhw6Zy5OZCQMGYMvP5zPgs4AAPvroI/z9/c1lEhERlQopHvs995B9zz0AxBw9ygvt2pGSklL+QSwLBg2CHTv4w2ZjOPD0M89oO24REQ+gUiHFFvzaa6Rffz0BwIv79vHipZeSlZVVviFefhlmzSLfZuMmy6LRJZfwwAMPlG8GERE5IZtlWVZJfkNMTAwREREApKSkMGbMmGL/3rS0NMLDw0lNTSUsLKxEQcVD5OeTdP31RC5ZQh7wYqtWjF67lqCgoLK/9tKlWF27YnM4GAF8FBrKli1bOOecc8r+2iIiFVhx379LdKciJiYGgKFDhzJ06FBatmzJsGHDSpdUvIu/P5Gff84fnTsTAIzdsoWpLVqQkZFRttddsQJ69sTmcPAeMAmYMWOGCoWIiAcp0Z2KqlWrsnfv3qI7FQA2m43ifgrdqfAh+fkc6NGDuq4tvD+tXZtO331HRPXq7r/W8uXQowdkZbEY6AU8/9JLjB492v3XEhGRf3H7nYqEhARSUlKOKxSF4uLiziikeDF/f+ouXsyvw4cDcNOhQ/zUsCEJq1e79zrz50P37pCVxRc2G72A2+66i/vvv9+91xERkVIrUak4kYiIiJM+BZCTk0NaWtpxH+JDbDbOmjiRfS+/TKbNxiUZGUR27MimUaOcT2mURm4u3Hsv9OoF2dksBG60LG7o04dJkyZhs9ncMQIREXGjUj/9ERkZSVJS0gl/bvz48YSHhxd9REVFlfZy4oEa3n8/Gd98ww9VqhABtHnzTX6KiiJt6dIz+4QbNkD79vD66wC8BNwI3DF0KDNnziQwMNBNyUVExJ1KXSpOVigAxo0bR2pqatFHYmJiaS8nHqpG+/acc/gwSy67jFygye+/E3bNNSReeCGO+fMhO/vUn8CyYP166NYN2rWDzZtJttvpDjwIPDhuHO+88w5+fn5lPxgRETkjxd6CMDo6+oQ/npKSctKfCwoKKp9HDcUjBISE0PWbb1g3cyYHhg+nZ2oqUTt2QK9e5AQFQdeuBF10ETRuDNWqQUoKJCfDxo3w5Zfg2qWzwGbjQ8vicYcDq25dvnj3Xa677jqzgxMRkdMq8dMfW7ZsOa5E6OkPOZHc3Fw+fOwx8l97jR65udQvxu/JsNmYa1k8C+wBBg4cyGuvvaZDwkREDCvu+3eJSkXhxldDhw4FIDY2lmXLljF58mS3hhLfkZmZyYxPPuGbF1+k3i+/0Bg4GwgHkoEUYDfwJbAWCAwN5c4772TkyJHag0JExEOUSakAZ7EovFOxadMmJkyY4PZQ4nssy2Lv3r2sX7+edevWkZCQQF5eHnl5eVSrVo1WrVrRqlUr2rdvT5UqVUzHFRGRvymzUlEeoURERMRzlMk23SIiIiIno1IhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIibqFSISIiIm6hUiEiIiJuoVIhIiIibuFfnhcrPGU9LS2tPC8rIiIipVD4vl34Pn4y5Voq0tPTAYiKiirPy4qIiIgbpKenEx4eftKft1mnqx1u5HA4OHDgAFWqVMFms7nt86alpREVFUViYiJhYWFu+7yeRGP0Db4+Rl8fH2iMvsDXxwfuH6NlWaSnp1O3bl3s9pOvnCjXOxV2u5369euX2ecPCwvz2S+QQhqjb/D1Mfr6+EBj9AW+Pj5w7xhPdYeikBZqioiIiFuoVIiIiIhb+ESpCAoK4oknniAoKMh0lDKjMfoGXx+jr48PNEZf4OvjA3NjLNeFmiIiIuK7fOJOhYiIiJinUiEiIiJuoVIhIiIiblGu+1SUhZiYGCIiIgBISUlhzJgxZgO5QUxMDAB79uwBYPLkyf/6eV8ac5cuXVi2bNlxP+YLYxw7diyNGzcGIDIykt69exf9nC+Mb8qUKaSkpBAREcGePXsYN25c0ZjA+8aYkpLCnDlzmDt37r++HuH04/GG8RZnjOC9rz2nG9/feevrTnHGaPS1x/JiEyZMsCZMmFD0/WXLlllDhw41mKj0xowZc9z3hw4danXu3Lno+7425rlz51r//DL09jEmJydbLVu2tJKTky3LsqwtW7YcN0ZvH59lOcdQOD7Lco65d+/ex/28N41xy5Yt1uTJk60JEyZYLVu2/NfPn2483jDe043R2197Tje+v/PW153TjdETXnu8ulREREQc98JmWda/vlC8SXJystW5c+fjxlT4RbFnzx7LsnxrzMnJydbkyZP/ld/bxzh06NDj/uJalvMvbyFvH59lWce92Zzox7x1jHPnzj3hi/XpxuNN4z3RGH3ptedkf4aFfOF152Rj9ITXHq9dU5GQkFB06/Wf4uLiyj+Qm2zevJmEhISi70dHRwPO21S+NuY5c+bQt2/f437MF8Y4ZcoUevfuTUJCQlHmzp07A74xPoCIiAi6dOlCSkoK4BxX4deqr4yx0OnG4yvjrSivPb76ugOe8drj1aXiRCIiIope6LxNREQEycnJtGzZsujHCv+wo6OjfWrMcXFxRV/sf+ftYyzMHx8fT0pKCtHR0QwbNqzoz9Hbx1fo3XffJSEhgapVqzJ27Fji4uKK5t99ZYyFTjceXxhvRXnt8dXXHfCc1x6vX6j5T5GRkSQlJZmO4Tbjx49n8uTJJ2yXhbxxzIVf9MX9YvaWMRb+xY2IiCh6gZ4wYQKNGjUiOTn5pL/PW8ZXKCIigrFjx7Js2TJiYmLo3Lkzffv29bmv01MpHM/Jxuzt4/XF1x5ffd0Bz3nt8do7FSfjLV8AxTF27Fj69evH0KFDT/nrvG3MhbfoSsLbxti6deuibxf+S+BUtxi9bXxjx44lOjqauXPnsmfPHpKSkmjVqtUpf4+3jfF0Tjcebx6vL772VITXHTD/2uO1paJwvu+fCpuot4uNjaVx48bHPe7jC2OOj48/7ov+n7x9jCfLGBERcdy6g3/ylvHBX3OzhbeRo6Oj2bJlCxEREcTGxvrEGP/udOPxtfH64muPr7/ugOe89njt9Ed0dPRJ/2edaM7MmxS2ysJ/JaSkpJCUlOQTY05KSiI+Pr5ojIXPw8fExBAdHU3v3r29eoyFbzIJCQnHzU+npKTQunVrn/gzTEhIOOEt8WHDhgG+93ezOOPxlfH66muPr7/ugOe89njtnQqAcePGHXdbJzY29rS36zxdfHw88fHxtGzZkoSEBBISEpgyZQqRkZGA94+5c+fOjBkzpuij8I1ozJgxRbcmvX2MEyZMYPbs2UXfj42NpXPnzkV/0b19fJ07dy5aDPZ3W7Zs8fo/w5PdCj7deLxpvCcbo6+89pxofL72unOyP0NPeO3x+lNKC5smwKZNm5gwYYLhRGcuJSWFRo0anXAR0d//mHxlzLGxscyePZvY2FjGjBlDly5dihqzt4+xcLdJgKNHj/4rv7ePLyUlhfHjx1OtWrWieduhQ4f+a0dNbxljQkJC0ddjfHw8Y8aMoU2bNv/aifBU4/H08Z5qjL7w2lOcP0Pw7ted4ozR9GuP15cKERER8QxePf0hIiIinkOlQkRERNxCpUJERETcQqVCRERE3EKlQkRERNxCpUJERETcQqVCRERE3EKlQkRERNxCpUJERETcQqVCRERE3EKlQkRERNxCpUJERETc4v+UD2/OIwkIbwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" } ], "source": [ - "system.text_form" + "plt.plot(solution_model, color = 'k')\n", + "plt.plot(ref_data, color = 'r')\n", + "plt.show()" ] }, { @@ -1598,7 +5059,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 88, "id": "d3da228c", "metadata": {}, "outputs": [ @@ -1606,8 +5067,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "-3.951218144289313 * u{power: 1.0} + 0.0 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 1.9999995807630981, dim: 0.0} + 0.0 * u{power: 1.0} * cos{power: 1.0, freq: 2.000000378681952, dim: 0.0} + -0.9900591677672463 * d^2u/dx0^2{power: 1.0} + 1.4766602719611732 * t{power: 1.0, dim: 0.0} + 0.014961528346664596 = du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.9999995886960809, dim: 0.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004122473023559842}}\n" + "0.0 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -4.0 * u{power: 1.0} + -1.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 1.999999954115913, dim: 0.0} + 1.5 * t{power: 1.0, dim: 0.0} + 0.0 * t{power: 1.0, dim: 0.0} * u{power: 1.0} + -0.0 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005904759517055435}}\n" ] } ], @@ -1621,7 +5082,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 89, "id": "5ee77966", "metadata": {}, "outputs": [], @@ -1634,58 +5095,52 @@ " return bop\n", "\n", "bop_u = get_ode_bop('u', 0, [None], t_test[0], x_test[0])\n", - "# Set derivative with central finite difference \n", + "\n", + "# Set derivative with central finite difference\n", "bop_du = get_ode_bop('dudt', 0, [0,], t_test[0], (x_test[1] - x_train[-1])/(2*(t_test[1] - t_test[0]))) " ] }, { "cell_type": "code", - "execution_count": 14, - "id": "e831e78d", + "execution_count": 90, + "id": "360dc0a7", "metadata": {}, + "outputs": [], + "source": [ + "t_test.shape\n", + "t_test_oversampled = np.linspace(t_test[0], t_test[-1], 160)" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "id": "e831e78d", + "metadata": { + "scrolled": true + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Using explicitly sent system of equations.\n", - "dimensionality is 1\n", - "grid.shape is (160,)\n", - "Shape of the grid for solver torch.Size([160, 1])\n", - "Grid is torch.Size([160, 1])\n", - "torch.Size([1])\n", - "[2023-11-17 14:22:25.884183] initial (min) loss is 158.1117401123047\n", - "[2023-11-17 14:22:26.021060] Print every 5000 step\n", - "Step = 0 loss = 158.111740 normalized loss line= -0.000000x+1.000000. There was 1 stop dings already.\n", - "[2023-11-17 14:23:14.018034] No improvement in 100 steps\n", - "Step = 1003 loss = 0.000193 normalized loss line= -1.568164x+154.301719. There was 1 stop dings already.\n", - "[2023-11-17 14:23:18.626122] No improvement in 100 steps\n", - "Step = 1105 loss = 0.001660 normalized loss line= -0.034247x+7.008235. There was 2 stop dings already.\n", - "[2023-11-17 14:23:23.117335] No improvement in 100 steps\n", - "Step = 1205 loss = 0.035738 normalized loss line= -0.001361x+0.462532. There was 3 stop dings already.\n", - "[2023-11-17 14:23:29.748140] No improvement in 100 steps\n", - "Step = 1349 loss = 0.000676 normalized loss line= 0.113782x+16.080034. There was 4 stop dings already.\n", - "[2023-11-17 14:23:38.369849] No improvement in 100 steps\n", - "Step = 1533 loss = 0.012405 normalized loss line= 0.013265x+0.129202. There was 5 stop dings already.\n", - "[2023-11-17 14:23:42.897361] No improvement in 100 steps\n", - "Step = 1633 loss = 0.000953 normalized loss line= -0.059929x+12.522327. There was 6 stop dings already.\n", - "[2023-11-17 14:23:47.428764] No improvement in 100 steps\n", - "Step = 1733 loss = 0.000575 normalized loss line= -0.029961x+31.163667. There was 7 stop dings already.\n", - "[2023-11-17 14:23:55.428386] No improvement in 100 steps\n", - "Step = 1908 loss = 0.015817 normalized loss line= -0.008667x+1.119618. There was 8 stop dings already.\n", - "[2023-11-17 14:23:59.988453] No improvement in 100 steps\n", - "Step = 2008 loss = 0.002209 normalized loss line= 0.027394x+6.372215. There was 9 stop dings already.\n", - "[2023-11-17 14:24:04.566692] No improvement in 100 steps\n", - "Step = 2108 loss = 0.013859 normalized loss line= 0.002540x+0.507256. There was 10 stop dings already.\n", - "[2023-11-17 14:24:09.110130] No improvement in 100 steps\n", - "Step = 2208 loss = 0.007771 normalized loss line= 0.013770x+1.697541. There was 11 stop dings already.\n" + "grid.shape is (160,)\n" ] } ], "source": [ "pred_u = epde_search_obj.predict(system=eq_1, boundary_conditions = [bop_u(), bop_du()],\n", - " grid = [t_test,], strategy='autograd', solver_kwargs = {'use_cache':False, 'step_plot_save':False})\n", - "pred_u = pred_u.reshape(-1)" + " grid = [t_test,], mode='NN')" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "id": "7ecae38a", + "metadata": {}, + "outputs": [], + "source": [ + "pred_u = pred_u[1].reshape(-1)" ] }, { @@ -1698,20 +5153,31 @@ }, { "cell_type": "code", - "execution_count": 17, - "id": "01bcd998", + "execution_count": 94, + "id": "2b48c435", + "metadata": {}, + "outputs": [], + "source": [ + "if isinstance(pred_u, torch.Tensor):\n", + " pred_u = pred_u.detach().numpy()" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "id": "e6d8985b", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "MAPE on the test dataset is 0.006158279444752504\n" + "MAPE on the test dataset is 0.19373547972726995\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGyCAYAAADK7e8AAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABwP0lEQVR4nO3de3xT9f348ddJW8pFSlpKZQ6Upt7YnEpSvN8GrajTbY4WHJ3KFFvdnG5za2VzU3f5Yqu7b7/RIhMBEWi9zDmdtog31EoacRedStNWvM1CG8q11/P745Ok95K2Sc/Jyfv5ePA4aa7vTxKSdz6X90fTdV1HCCGEEMKEbEYHIIQQQggxGElUhBBCCGFakqgIIYQQwrQkURFCCCGEaUmiIoQQQgjTkkRFCCGEEKYliYoQQgghTEsSFSGEEEKYVrzRAYxGV1cXH330EZMnT0bTNKPDEUIIIUQIdF1n3759HHPMMdhsQ/eZRHWi8tFHHzFz5kyjwxBCCCHECOzatYsZM2YMeZ2oTlQmT54MqIYmJSWF9b7b29t59tlnufjii0lISAjrfZuB1dsH0karsHobrd4+kDZaQbjb19LSwsyZM4Pf40OJ6kQlMNyTlJQUkURl4sSJJCUlWfZNZ+X2gbTRKqzeRqu3D6SNVhCp9oUybUMm0wohhBDCtCRREUIIIYRpSaIihBBCCNOSREUIIYQQphXVk2mHQ9d1Ojs76ejoCOn67e3txMfHc/jwYTo7OyMc3dizevtA2mgVVm6jFSddChFulk9UdF3H5/PR2Ng4rA85XdeZPn06u3btsmQxOau3D6SNVmH1NoayPFOIWGb5ROWTTz7B5/MFlzDHx8eH9GHX1dXF/v37Oeqoo45YNS8aWb19IG20Cqu2Udd1Dh48yP/+9z9JVoQYgqUTlc7OTvbu3cu0adNITU0d1m27urpoa2tj/PjxlvpwDLB6+0DaaBVWbuOECRPo6uriwIEDdHZ2ylCQEAOw1v/6Ptrb29F1nUmTJhkdihBCDGjixInYbLaQ588JEWssnagEWHFcWwhhDYHPJ13XDY5EiAG43TBvHlpNjWEhxESiIoQQQogRWLsWtm5Fe+ghw0KQRMUiioqK0DSNqqoqo0OJGJfLRUFBwZg/bnZ2NkVFRaO+H6PiH0hVVRUZGRlomjbstmVnZ1NSUhKhyMzDTK+XEGOqoQFqasDjgU2bALBt2sSU2lp1XkPDmIZj6cm0saSsrAyHw0F5eTlZWVlGhxMRy5cvx263Gx3GiJklfp/PR25uLlu2bMHpdOLz+YwOyZTM8noJMeZmzeo+HZg6sXs3F912W/f5YzhUKT0qFuDxeEhJSaGoqIjNmzeP6D4Cv7DNYLBYcnJyoiIJM3v8VVVVpKSk4HQ6AQb9MjbTeyKSzP56CTHm1q+HeH8/hj8h0fxHPT5eXT6GJFGxgNLSUrKyssjKysLn81l6+EcIIUSE5eVBdfWAF3Vs26YuH0OSqFjA5s2byc3NxeFw4HA4KC0t7XedvvMsPB4PycnJAOTm5pKdnY3X60XTNDRN6zUcUFRUREZGBsnJyf3G7HNzcykpKaGgoIDk5GQyMjKoqqrqNQciNze3120qKipwuVxomkZGRgYVFRW97m+wWAaaK1JSUhJ8HJfLNWSSVlRURHJy8oDXHaqNoTyXgZUbw41/JM9tKAa736KiInJzc4PxDdbWI70n9uzZM2RcPS8rKysLKeahbrN06VKmTp2Ky+WirKys10q+oV4PCO/7LVKvlxBm1elPE3QDaxjFZKKi63DggHn+jWaor6qqCp/PF+yizsnJ6fVBHIry8nLKy8txOBzouo6u68HhgNzcXDweD5WVldTV1dHU1ER2dnbwtj6fL/jlV1dXh9PpJDc3l9LSUmpqaqipqaGioqLXF09TUxOrVq1C13VKS0uDj3GkWPoqKChg06ZNlJeX09zcTHFx8aDzLaqqqqioqKCurg5d1ykuLiYlJSWkNobruexrJM9tKJM7h7rf4uLiXvENlNSG0o6SkpJB4wokQnV1dVRWVlJUVBR8fYeKebDbXHzxxbz55ptUVlayZcuWQWMeTLjeb5F6vYQwozc+TONjplODi7duWYk+Zw6H7XaYNm3sg9Gj2N69e3VA37t374CXHzp0SH/rrbf0Q4cO9Tp//35dV+mBOf7t3z/y5yAnJ0fPysoK/l1TU6MDenl5ea/rZWVl6YWFhcG/t2/frgN6Z2enruu6Xl5erjscjl63CdxXc3Nzr/PtdrteWVkZvF+n0xm8rLKyUgeCl+u6rjudzl6P3ZfD4dCLi4uDfw8US982NDc364BeW1s76P12dnbqzc3Nemdnp15eXq7b7fZ+1wm1jT3j7/t34D6GE/9ontvB2hjq/Q4WX19DtWOwuGpra/s9fmlp6ZCv/1C3CVz2/PPP93qv9nwejvR69DWS91u4Xq+BHDhwQHe73XpLS8sRrxut2tra9Mcff1xva2szOpSIsVIb29p0/dRTdX0ch/Vv5HWp81pb9SfKy8PWviN9f/cUkz0qVlJRUdFraMXpdGK324f9q3Mgbrcbh8PR7xdmZmYmlZWVvf4OCPRS9DzP4XD06+koKysjNzcXl8uF1+sddmxVVVXY7XYcDkdI18/KyiIlJQVN08jOzg72OoXaxnAbzXMbjvsdrcHiCvRUpKenk5ycTHJy8hF7VIa6jcfjwW63c9pppwWvH+pr3tNo32+Rer2EMKNf/Qr++U+YPDWRX//GP4yqaXQZtMVDTC5PnjgR9u8f+jpdXV20tLSQlJQU8f1FJk4c2e0CX7ZFRUW9xtIDE2p9Pt+olleGumx1oMcY6nFdLldwlVJWVhYul2tkAQ6D3W6ntraWsrIyKisryc3Npbi4OOKPO5jRPLfhuN/RGioup9NJzTCrWA52m+EOYw4kHO+3SL1eQphNfT3cfbc6/etfGzPS01dM9qhoGkyaZJ5/I63wX1paSk5ODs3Nzb3+BT7wh1qq3NTUdMT7z8rKwuv19vuQdrvdzJ07d0Qxe73e4Dj/aJZ+Bup/DPfXcX5+PuXl5ZSWlrJp06awtDGU57KvSDy3kbzfUDmdTjwez7ASpqFuE+iNq6+vD553pOe75+Xher8Z/bwKMVZWrIDDh+Gii+Dqq42ORonJRMUKAr0mA03WczqdOJ3OXsM/Docj2MXu9XpZvnx5r9s4HI7gB3FVVRVerxen00lWVhbz588PXhZYXZSTkzOiuAPd4YHJtRUVFf2GBQaKpS+Hw0F+fn5wEqbP56OiomLQKqsVFRWUlJTg8/nw+XxUVlbicDhG1Ma+z2Xfxwwl/kg8t+G+31DaMdBter4u0P3cj+Q2gffy0qVLg7EM9HwP9nqE6/0WqddLCDN5/3144AF1+uc/H/mP6HCTRCVKbd68GYfDMeivxIKCgl6/UgsKCnC73cFllTfccAOzelQfDHwhpKen9xoSCfwSdblcpKenk5KSMuxu/Z7sdjuFhYXBJZyB++/ZZT5YLH0F6sdkZ2eTnJxMaWkpixcvHvC6DoeDysrK4DwIn8/HqlWrRtTGvs9lQUFBr3kTocYf7uc23Pcbajv6Ki0txel04nK5gq/LkXozhrpNZWUlycnJzJ07l9zc3H6v8VCvRzjfb5F6vYQwi5ISaG9XvSnnnWd0NN00XY/eLTtbWlqYMmUKe/fuJSkpqd/lhw8fpq6ujvT0dMaPHz+s+x7LOSpGsHr7QNpoFX3b6PF4cLlcltlt+ODBg7z99tuceOKJTJ482ehwIqK9vZ2nnnqKyy67jASDJmRGWrS38aOPwOGA1lZ47jn44hd7Xx7u9h3p+7sna36yCSGEECJk996rkpRzz1U9KmYiiYoQQggRw/bsgcCUxp/8xDxzUwIkURFCCCFikdsN8+bxzC/dHDoEp50GF19sdFD9SaIihIgqTqfTMvNThDDU2rWwdSsdD6wD4MYbzdebAjFa8E0IIYSISQ0NsHu3ykg2bQJggW8j54y/lm/M1qEhFY47zuAge5NERQghhIgVPcpSBLpPptHItsMuuMh/vsl6LGXoRwghhIgV69dDvL+Pwp+Q2PAnJvHx6nKTkR4VIYQQIlbk5cHs2TDQnlfV1eB0jn1MRyA9KkIIIUQM6vKnALpm7lTA3NEJIYQQIrzS0mifOh03Lr6TsJKuOS6YPh3S0oyObECSqAghhBCxZMYMfnJ1PWdSzadXFhDnrob6epgxw+jIBiRzVIQQQogY0tUFGx5JBOCqq1CrfxITjQ1qCNKjEqVKSkrQNI3k5GSSk5PRNI2MjAyKioqCOyaHoqqqioyMDDRNo6ioKHIBD5PL5aKgoCDk62dnZw87fo/Hg2bG6kZCCBFBr7wCu3bB5Mlw6aVGR3NkkqiMhr/8MG63IQ9vt9tpbm6mubkZXdeprKzE6/XicrlCSlb27t3L4sWLKS8vR9d1li9fHvmgQ7R8+XJyc3ONDkMIISxn40Z1vPJKGD/e2FhCYXiiUlZWRklJCWVlZcPuDTCcv/ww69YZHQkADoeD8vJympqa2Lx58xGv//zzz5OSkoLTvxzNbrdHOML+Aj06feXk5JCVlTXm8QxlsFiFECJadHRAebk6fdVVxsYSKkPnqJSUlJCfnx/8gvT5fNxwww2UB55FMxqg/DAbN8K116riOanmKz8shBBCADz/PHz6KUydCib7LTgoQ3tUKisre/2Kt9vt5u9RmTULMjNVsZzGRnVeY6P6OzOzd3niMeb1esnNzSUlJYX8/Pzg+QUFBSQnJ5ORkUFZWRkAt99+O0uXLsXr9aJpWq/5IANdHyA3N5eysjLKysrIyMigqqoqpNuUlJT0ujxwu9zcXLKzs4MxaJoWfP37zjmpqKjA5XIF5+JUVFQM+/nx+XxkZ2ejaRoul6tX/Ed6jKFiDUdsQggxFgLDPjk5kJBgbCyhMjRRsdvtZGdnBz/wvV4vDofDyJCObIDyw8HjGJcf9vl8wS/NwJekw+GgpqYmeJ3c3Fy8Xi91dXVUVlZSVFSEx+PhnnvuYc2aNTgcDnRdp7S0dMjrBx6vtLSU4uJiiouLg0MzR7pNUVERubm51NXV4XQ6g0lReXk55eXlwRh0XR90+KmpqYlVq1YFY83NzQ0+Rqhyc3NpamqitraWLVu2sH379pAfY6hYwxGbEEJEWmcnPP64Or1okaGhDIuhQz+rVq3C5XKRnJxMYWEhGRkZwS/MgbS2ttLa2hr8u6WlBYD29nba29v7Xb+9vR1d1+nq6qKrq2tYsQW2kQ/cPujrX4eTTsI2d26/23S9+qoqPzzMxxqJwBflnj17ALWCZe7cuRQVFZGUlERXVxder5eKigr27NlDUlISSUlJrFixgo0bN3Laaad1x+2Pd6jrn3766ei6jtfrpba2FrvdfsTHCNzG6XQyb948AG644QYWLFgQfMy+x57t6/ncL1u2LHi9efPm4XA4qKys5PTTTx/w+oHzAsedO3dSVVXFe++9xyx/r1dRUREVFRUhP8ZgsR7pdpE06PvUQqzexkD7Ojo6Bvwcs4JAu6zaPoiONm7bprFnTzzJyTpnndXBcEINd/uGcz+GJip2u52ioiIqKyspKSkhKyuLRYsWDfqresWKFdx99939zn/22WeZOHFiv/Pj4+OZPn06+/fvp62tbUQx7tu3r995cQcOMBnQNQ1N14PHAwcO0OlPniLt8OHD6LoeTNaOP/54LrroIr7//e/z29/+FoBt27YB9OulmjNnTrBdXV1dwfsY6votLS10dnZy4YUXYrPZhnWbL3zhC8HrJ/rX6gf+PnToUK8YAjo7O2ltbe11/po1a3j++eepr6/H6/Vy+PDh4OUDXT9g3759bNu2jSlTppCamhq8zoEDB3rFcqTHGCzWI91uLAz0PrUaq7Yx8Nn0yiuv0NHRYXA0kVVZWWl0CBFn5jauWfM54AROPfUDKitH1usbrvYdPHgw5OsamqgUFRWRnZ1NeXl5cH6Fy+WitrZ2wOsvX76c73//+8G/W1pamDlzJhdffDFJSUn9rn/48GF27drFUUcdxfhhrsHSdZ19+/YxefLk/rU20tPRjz4aZs6k67rr0P7yF/Rdu5iUng4DxBEJ48ePR9O0Xu2+9957mTt3LnfccQcOh4MJEybgdDr7DXFA9684m80WvI+hrg8QFxfHiSee2OsxQ7lNWlpa8DaTJk0C6PWYPWPoebvExMTg+XPnzg32vGVlZTF37lzGjx8fvLzv9QNtDLyGEyZM6Pd89Y3lSI8xWKxHul0kDfk+tQirt/HQoUMAnHPOORx11FEGRxMZ7e3tVFZWkp2dTUK0TIwYpmhoY2Gh+srPz/8Ml1122bBuG+72DeeHnGGJitfrxefzBec5BOZWuFwuKioqyMnJ6XebxMTE4C/ynhISEgZ84jo7O9E0DZvNhs02vOk4gS7mwO17OfZYtfpn3Dj1wXnjjdDWhjaGlf0CH9g9Y8vMzCQrK4vly5dTXl5OZmYmHo+HlpaWfr1UPbvQA/cx1PUDj9n3+RjubY50HOh2Xq8Xj8cTTK56Xidwu4Fi6/kaHn/88fh8Purr64O9Pz0fO5THGCjWUG4XSUO+Ty3C6m0M/F+Oj4837RdcuAz2WW0lZm3jO+/Au++qCbRf+lL8iCfShqt9w7kPw/7Xe73eAb/YhlON1FCJiWqJMpiq/HBxcTEVFRV4PB4cDgf5+fnBya6gVqiUlJQMeNvhXn+kt+l7+0DSWlVVFbyPnlJSUgCCq4kC7RsOp9OJ0+kkNzcXn8+H1+vlhhtuGNZjDBRrOGITQohI+9vf1PGii2DKFENDGTbDEpWsrCw8Hk+/5cg1NTUD9qaI0DidTrKysoJLe0tLS3E6ncFJy6WlpUMWUhvu9Ud6m57xOp1O0tPTKS4uHvA6drudwsLC4BLnyspKsrKyhl2gbsuWLaSkpJCcnExBQQEFBQXB3pVQHmOgWMMVmxBCRNITT6jjl79sbBwjoel9+6zHkM/nY8WKFUydOjVYQ6VnAbgjaWlpYcqUKezdu3fQOSp1dXWkp6cPe45KYNJkUlKSJbubrd4+kDZahdXbePDgQd5++21OPPFEJk+ebHQ4EdHe3s5TTz3FZZddZsphkXAwcxt374ajj1YLUhsa1OyF4Qp3+470/d2T4at+BvsVLYQQQojRe+oplaScfvrIkhSjWe/niRBCCCGCnnxSHa+4wtg4RkoSFSGEEMKiOjshsFvIpZcaG8tISaIihBBCWNT27dDcDHY7DFBQPSpIoiKEEEJY1DPPqGNWVvc2ddEmJhIVAxc2CSHEkAKfT1asuisM5nbzpV/Pw4WbBQuMDmbkLJ2oJCQkoGlacE8XIYQwm4MHD9LV1UV8tP7cFaZ1eNVaMlu2cjXrojpRsfT/jLi4OKZMmUJjYyOtra0kJSURHx8f0i+Xrq4u2traOHz4sCVrN1i9fSBttAqrtlHXdQ4ePEhjYyP79u0jLi7O6JCEFTQ0qMIpmgYbNwHwjbiNTG28Fj7VITUVjjvO4CCHx9KJCsD06dOZMGECn3766bA2QdJ1nUOHDgU3s7Maq7cPpI1WYfU2JiUl8d577xkdhrCKWbOCJxNR/19SOhvB5eq+TpRNh7B8oqJpGna7nSlTptDZ2RnyNurt7e28+OKLXHDBBaarMhgOVm8fSButwsptTEhI6LVBqBCjtn49LF0KHR1o+Oc/+Y/Ex8OaNYaFNlKWT1QCNE0jPj4+5HHguLg4Ojo6GD9+vOU+HMH67QNpo1VYvY2SqIiwysuD2bN796AEVFeD0zn2MY2SdQZ8hRBCCBHUGfiKj/K5XdEdvRBCCCF6S0ujOXE6Nbh45sqVqndl+nRISzM6shGJmaEfIYQQIhZ0TJ/BiePq2d06ju0/0sCVD21tkJhodGgjIomKEEIIYSFvvAG79yUyZQrMmYNaqhylSQrI0I8QQghhKc89p44XXQRWKM8jiYoQQghhIYFE5YtfNDaOcJFERQghhLCItjZ46SV1et48Y2MJF0lUhBBCCIuoroZDh2DaNPj8542OJjwkURFCCCEsouewT5SXTwmySDOEEEIIsXWrOlpl2AckURFCCCEs4eBBePVVdVoSFSGEEEKYyiuvqMm0n/0sHH+80dGEjyQqQgghhAW8+KI6fvGLqsabVUiiIoQQQljACy+o4wUXGBtHuEmiIoQQQkS5w4fV0mSQREUIIYQQJrN9O7S2wtFHw4knGh1NeEmiIoQQQkS5nsM+VpqfApKoCCGEEFEvMJHWasM+IImKEEIIEdXa29XSZJBERQghRDi43aoil9ttdCTCAt54Aw4cgORkOOUUo6MJP0lUhBBirK1dq2qdr1tndCTCAgLzU84/3zr7+/QUb3QAQggRExoaYPduNdNx0yZ13saNkJfHlJ071eVWKicqxoyV56eAJCpCCDE2Zs0KntTR0AC9sZGEM8/kIoAf/AB03ZjYRNTq7ISXXlKnrZqoWLCTSAghDNZnDorXC2UXrqfd/9tQQyUkmj8x6bTF0f7AGkNCFdHtP/+BvXth0iSYM8foaCJDEhUhhAg3/xyUtr+s44YbVAGughfyOJPqAa8+t+t1Ln7wanbtGuM4RdTbtk0dzzoL4i06RiKJihBChENDA9TUgMcTnIOyf9VGau73cFpnDddc2MCaNf7r+mc86po6jkvo4PnnbZx6KpSXGxC7iFqBZcnnnmtsHJEkiYoQQoTDrFmQmQkuF3pjIwD2jkY8uKghkwdfmMWp89Ng+nRwuWDlSrRMF/rRR/Otu2rJzOzC54NFi6CsDFnCLEIS6FE55xxj44gkSVSEECIc1q8P9r0H5p7Y/HNRiI9Xl8+YAfX1ave4ggKorqZj506mfH4iL7zQya23qqvfeCO8c4csYRZD+/hjqKtTC8nOOsvoaCLHoiNaQggxxvLy0E+ejZbp6n9ZdTU4nep0YmL3+ZoW/DshAX7z3Qamf7ibzRUa9md6LGG+9lq1Iig1FY47LsINEdEiMOxzyikwZYqxsUSSoYlKbm4uixcvxuFwYLfbe13mcDiMCUoIIUZo40b4OtCJjTi61FyUrq6Qb6+lz+J24HagC7WznP5pI5qrR/IjS5gFgNvNqd8rxEUJc8/NNDqaiDJ06Mfj8ZCbm4vL5SIjIyP4r6ioyMiwhBBi2Dwe+PHv0viY6ew+Vs1BweVSc1LS0kK7kx7DR4FhI63v8JEQAGvXcsKurVzNOkvPTwGDE5WCggJ0Xe/1r7S0lHKZ9i6EiCJtbbBkCdS1z+CWK+pJq+ueg0J9vZqbEoq8PHWbgVRXq8tF7OqxskzfqIYGr2IjX5ziUec3NBgcYGQYOvSTk5PT6++qqioyMwfvwmptbaW1tTX4d0tLCwDt7e20t7eHNbbA/YX7fs3C6u0DaaNVREMb//hHG++8E0dams4fV9no6OyATv+FNpva3nYQ/drX0UECoNtsaF1dwWGkrVs7OO8L5n0OhhINr+FojUUbE3pUN8Y/NDiNRmxf6R4abG9ri8hjh7t9w7kfTdfNMeDp9XqpqqoiPz9/0Ovcdddd3H333f3O37BhAxMnToxkeEIIMaB9+xK46aYs9u8fx7e+tYOLLx7dr9rxu3dz4Q9+wKHUVBqys5m44WUm720kK+kl7lj5DhMndoQpchFtZrzwAnN+/3tsnZ39LuuKi+ONW27hgwsvNCCy4Tt48CBLlixh7969JCUlDXld0yQqBQUFlJaWDnmdgXpUZs6cye7du4/Y0OFqb2+nsrKS7OxsEhISwnrfZmD19oG00SrM3sbbbrPxhz/EccopOtu3dxAXN7zbD9i+1lYYNw40jdbDOmfO6eKt2vHcemsn994b+uRcszD7axgOY9bGN94g4cwz+z9+dXVEa+iHu30tLS2kpqaGlKiYYnmyx+MJ6XqJiYkk9lza55eQkBCxN0Yk79sMrN4+kDZahRnb+O678Oc/q9O/+Y3G+PEjj69X+3q0MyEBfv0nuOQS+OMf41i2LI5TThlN1MYx42sYbhFvo3+ydWBIUNdsaHoXCfHxvd43kRKu9g3nPkxR8K20tJSMjAyjwxBCiGH58Y+howO+9CXIyorc4yxYAFdeqXbKvflmWaEc09LS6EidTg0ubo5fie4c5sqyKGSKRKWqqqpfHRUhhDCzd9+FRx5Rp4uLI/94v/kNTJgAL7yg6rWIGDVjBg/9sp4zqebNswqwbR/myrIoZIpExev1SoE3IURU+dWvVM/GFVfA5z8f+cc77jj40Y/U6R/9SC2JFrHppdcTAU3VT+lR3diqTJGoOBwOUlJSjA5DCCGOzO2m9bx5/OsBtVlgYeHYPfT3v696+evr4f77x+5xhbnEwo7JPZkiUamtrcUZ2AdDCCHMbO1aErdtZXH7Os46a2y/LCZOhDvuUKd/8Qs4eHDsHluYQ1MTvP22On322cbGMlZMkagIIYSp9agI2tWjIugvF3rQPGNbEfSGG9Qw0DEfu2k8dR643WP22MJ4r76qjieeCNOmGRvLWDHF8mQhhDC1HhVBNX9F0DQaOfqHY79Z4LhxcOedsO+6tRxXu5XW1etIHKKit7CWbdvU0er7+/QkPSpCCHEkPTYL1IzcLNDfs3P15z3kxamenY51G9WOiBbe60V0i7X5KSA9KkIIcWR5eTB7ttoNua/qahirOXb+np14IMXfszPhQGPvuKTIimW1t8Prr6vT0qMihBBiQJ2Bj02bAR+fA/Ts2Izo2RGG2LEDDh2C5GQ4+WSjoxk7kqgIIUQIWsan8QmqImjtD1eqXoyxrgial6d6cAbQsa1aXS4sKzDsc/bZxuTJRpGhHyGECMHDL83gFurJOHkc/ynWgHxVdc2oYls2G3R1Bfd8qayES88wJhQxNgITaWNpfgpIj4oQQoTk/vuhjUSW3aChaRhXETQtTfXkuFywciX/+6yLj5nOr9enyfQUC9P12FzxA9KjIoQQR7RjhypXkpAAV19tcDAzZqjStOPGgaYxITef449to+m/iTzzjNplWVjP++/DRx9BXBycEWM9Z9KjIoQQR7BunTp+5SsmKbKVmKh6dIDkFI1rblA9O7//vZFBiUgKzE+ZM0dVKI4lkqgIIcQQurpgkypZwje+YWwsg7n5ZpW3PP202tVZWE+szk8BSVSEEGJI27bBhx/ClCnmHVbJyIDLL1en//hHY2MRkRHoUYm1+SkgiYoQQgzp4YfV8corjVvgE4pbblHHBx6AlhZjYxHhtW8fvPmmOi2JihBCiKCODigvV6evusrYWI5k/nxVPHf/flizxuhoRDi9/roagjz2WDWXOtZIoiKEEIN47jnYvRtSU2HePKOjGZqmdfeq/OEP6otNWEMsz08BSVSEEGJQGzeqY06OWppsdldfDXY77NwJzz5rdDQiXGJ5fgpIoiKEEANqbYVHH1Wnv/51Y2MJ1aRJcO216nRpqbGxiPDo7IRXX1WnpUdFCCFEUGUl7N0LxxwD551ndDShKyhQx7/9DT74wNhYxOi99ZaaHD1pEnzhC0ZHYwxJVIQQYgCPP66OX/tadG0AN3s2XHCB+iW+erXR0YjRCsxPOeus4MbZMSeK/vsJIcTY6OyEJ55Qp7/yFWNjGYlAr8r996uVSyJ6xfr8FJBERQgh+nntNWhsVEXeLrzQ6GiGb+FCtVLp6A/cNDvnqY2KRFSK9RU/IImKEEL0Exj2ufzy6Fjt01diIixdCtewlmn/2tq9WZGIKp98Al6vWnp+1llGR2McSVSEEKIHfbubr/1pHi7cfPWrRkczAg0NUFPDd871sBi1SVHnQxvB44GaGnW5iAqBYZ9TTlG9e7EqRqfmCCHEwJp+t5azD21lqW0dCxZkGh3O8M2aBcCxgI7aYdm2pxFcru7r6PrYxyWGTeanKNKjIoQQ/l4IPB7GP656Ib4Rv5HJ70VhL8T69cHlIRp6ryPx8epyERVkfooiPSpCCOHvhQCY6O+FmNIWpb0QeXlqjXLP2AOqq8HpHPuYxLAdPqxyZJAeFelREUIIq/ZC+AvAdMpHfdT573o3/2ifx8UpbhwOo6MxlvSoCCGE1Xoh0tJg+nSYOZPaL17PnpLVHMsuJk1IY7LRsYmQdD6wlnls5WDyOjQtCudKhZEkKkII0UMnNuLoUr0R0boF8YwZUF8P48bhQONLj+dT924bf9qWyLLZRgcnBtXQoLbr1jQy3Gqu1Bf/txE816qhx9RUOO44g4Mce9IfKIQQQHtyGv/TplODi/rbV6relenTVe9ENEpMBE1D0+C66zXaSOSBB4wOSgxp1izIzASXi6S2RgAm7vfPlcrM7DWXKpZIoiKEEMAr78/gWL2ey1OrOfaXBWrIp75e9U5Euauvhrg4tdz1v/81OhoxqB5zpWxWmis1SqNKVHbs2MHtt9/OggULgufdd9997NixY7RxCSHEmHrqKWgjkUsu1dQcVE1TvRIW8JnPwKWXqtNr1hgaihhKXp5KkAdSXa0uj0EjTlRWrVrF/PnzycjIwN1jH4n09HSKiorCEpwQQoyVp55Sx8suMzaOSLnuOnV88EHZqDAaBFdqRdPW3REy4megpKSEmpoabrjhhl7nL1y4sFfiIoQQZrdrF/z73+o74eKLjY4mMr70JTUX85NP4JlnjI5GDCotjcY4NVfqn9+ywFypMBhxorJnzx6mTp3a7/y6ujr0aCmMJIQQwNNPq+NZZ0FKirGxRMq4cWquCiCTak2sMXEGMzrrOZNqPvsza82VGqkRJyq5ubnk5ubS0tISPK+lpYWCggLy8/PDEpwQQowFqw/7BHzzm+r4xBNqFawwn1deUXOlZs/WmDoVS82VGqkRJyqlpaVMnjwZu91Oc3Mzc+fOJTk5mYyMDO65555wxiiEEBHT1gZbtqjTgQmnVvWFL6hVru3t8NBDRkcjBhLY3+e884yNw0xGVfCtvLwcr9fLG2+8AYDT6SQ9PT0sgQkhxFh47TXYvx+mTYPTTzc6msj75jfB7VbDP7feanQ0oq+XX1ZHSVS6jboyrcPhwDHKjQiKiorIyMgAICUlhZycnNGGJYQQIamsVMfs7NhYYHHVVfC978Gbb8I//wmnnmp0RCLg0CGVRIIkKj2NOFGx2WxomjbgZQ6Hg/fee++I9+Hz+Zg/fz5btmzBbrfj8XhwuVwyGVcIMWaefVYds7ONjWOspKTAFVfAI4/A2rVw331GRyQC3G41LDd9OsjgRLcRJyo1gf2ne9izZw+33347N954Y0j3UVRUxOLFi7Hb7YAaOqoM/LwRQogIa2rq/gUbK4kKwDXXqETloYfgnnuCxVCFwXoO+wzSDxCTRvz2nDNnzoDnb968mZtuuolly5Yd8T7Kysqora3F6/Xi9XrJysoiKytrpCEJIcSwPPec2nfwc5+Dz37W6GjGziWXdNdUqapSfwvjyfyUgYU9j3Y4HCEVfPN6vQB4PJ7gPJeCggJyc3MHTVZaW1tpbW0N/h1YGt3e3k57e3sYou8WuL9w369ZWL19IG20iki28ZlnbEAcWVmdtLcbs1OyEa+hpsHixTb+9Kc41qzpYv78zog+nrxPj6yrC155JR7QOPPMDtrbzTUFItyv4XDuR9NHOCFk+fLlA57v8Xhwu93s2bNnyNtXVVWRnZ1NZWVlMDHx+Xykp6fT3Nw84G3uuusu7r777n7nb9iwgYkTJw6zBUKIWKbrUFCQxaefTuKOO14lM/NTo0MaUzt32vnBDy5k3LhO1qz5BxMnSl19IzU0TObWW+cxfnwHDz30FHFx5kpUwu3gwYMsWbKEvXv3kpSUNOR1R5yoXDxInelAz8hgQ0MBgUSlubk5OEcFQNO0XslLTwP1qMycOZPdu3cfsaHD1d7eTmVlJdnZ2SQkJIT1vs3A6u0DaaNVRKqNO3fC5z6XQEKCzqefdjBpUtjueliMeg11HZacuINvNRThW/5/XHG3K2KPJe/TIysrs3HzzXHMm9fFP/4R2R6ukQj3a9jS0kJqampIicqIh36eDUyVH6HBljTb7fbgsFBfiYmJJA5QoS8hISFib/5I3rcZWL19IG20inC38fnn1fHcczXsduOfOyNew9s/u47Mhq1UPPgwCf93VsQfT96ng3vtNXU8/3wbCQnmXScfrtdwOPdh2LMRmJfSNynx+XxkZmYaFJUQIlYEfmtZdRPCQTU0QE0NeDzMeWcTAOd/tJEP/+ZR5zc0GBxgbApMpD33XGPjMKOQe1QGm5MymBUrVhzxOsXFxWzatAmn0wlARUUFWVlZwb+FECISOju7e1RibqHhrFnBk3H+NbDTaMT25R5DP1LLakx9+KHad9BmUxtjit5CTlQGqpsymMEKwfWVk5NDU1MTJSUlgKrDInVUhBCR9uab4PNBUhIcYTqd9axfD0uXQkdHMCGxoY56fDzamjXGxRajAvv7nH46TJ5saCimFHKiMto5KYORnZaFEGNt61Z1vOCCGCx2lpcHs2eDq//k2X+WVXNanvRojzUZ9hmaeWfsCCFEhAQSlYsuMjQM4/k3N+ryfxU8+aSRwcQuKfQ2tFH9ltixYwdVVVX9zrfb7SFVphVCiLHW0QEvvqhOf/GLxsZimLQ0taHMzJlw/fXs/81qDryziw1Vadx2GMaPNzrA2LFvnxqKBOlRGcyIE5VHHnmE3NxcHA4HdXV1wbopHo+H7OxsSVSEEKbk8agvB7sdTjvN6GgMMmOGmr05bhxoGkcty8c5q43aDxJ58kmQDezHzmuvqaq06emxtY3DcIx46Of222+nqqqKnTt3MmfOHNxuN263m82bNw9aI0WIUXO7Yd687p3khBimwLDPhRdCXJyxsRgqMTG4850tTmPR1apG1dq1RgYVe2R+ypGNOFGpra1l3rx5gKqJstX/vz8nJ4fNmzeHJzoh+lq7Vn3TrFtndCQiGrndLCiZhwt37A77DOKaa9Tx6afh09jaTcBQgRU/Mj9lcCNOVJxOJzt27AAgKyuLe+65B4BVq1bh8/nCEZsQfPwx/PZ7DRRl13DjGR6a/qwKVB1as5GmKilQJYanc81aTm/aytWsk0Slj5NPhjPOUHN4Hn7Y6GhiQ3t7d0VaSVQGN+I5KsuXL2f79u2cfvrp5OfnU1paSlxcHLquU1RUFM4YRQx66y1YsQI2bYK29lnB87tQXdWJLY1MyJYCVSIEDQ2wezdoGp0bNhEHLNE2MrXtWqjRITUVjjvO6ChN4Zpr4PXXVcflrbcaHY31vfEGHDgAyclqxbgY2IgTlYULF/b6u6amhrq6OlJSUpgyZcqoAxMxyO2GwkL+Ma+EK3+ZyeHD6uyfnbieH+9cSlxXR7AwVeDYTjzfnriGuavg+uuDqy2F6NajEmuCP9FN1RvR5kqi29fixfC976kJx//+N5xyitERWdsLL6jj+efLZ9dQRvzU2Gw2rrrqKh577LHgeenp6ZKkiBHrelDNP3nnJ+s4fFjtwbJ9O/z0nTzitlcPeJtrT6pm1cE88vPVr8GO12Syrehj/fpgVTfNn+AGjsTHq8sFoDqXvvQldVom1UZeIFG58EJj4zC7EScqbrcbu93O9ddfT1xcHIsXL+a5554LZ2wiFvg3SNNrPOxbpeafXMVGfneth6d+XkPmtD7zTwI/O/zHtWvhvvvU981DD8E/lshkW9FHXh5UD5zoUl2tLhdBgUm169erPZFEZHR2wksvqdOSqAxtVJNpV65cSVNTE9u3b2fWrFnk5+cTFxfHt771rXDGKKxs1izIzETLdDG5tRGANBq55UEXcWdmdnfbBwpUuVywcqU6Tp9O/DFp3JbTwJaSGs5M8DC3TiU7+sMb4Y03mLJzp0y2FUGd/o88XfrZB3XZZZCSoiayb9lidDTW9eab0NKi9ps6/XSjozG3sPxvdTqdFBcXU1payvz58yktLQ3H3YpYsH49XXGqW942VLd8oEBVdTUUFKhjfb06f9YsLvh+Jq+1u5iGSnZobCThzDO56Ac/IOGEE8a2TcJ80tLYd9R0anCxyrUSzZ/okpZmdGSmk5gIX/+6Oi3DP5ETGPY577wYr+cTglEnKo8++iiLFy8mLi6ORYsW4XK5cMv8ABGiNz6Xx3nxIXbL9yhQhaapv6HXHIS+yU5XXBwdshusmDGDhc56zqSazmV9El3RT2D459FHVRVfEX4yPyV0I05UFi1aRFxcHMuWLSM5ORm3282ePXtYsWJFsJy+EENpa1MfiIdb1d96n/knIRtiDsKfr12NvmTJKKIUVnD4MLxYnQhoqn5Kz0RX9DN3Lpx0Ehw6BBUVRkdjPV2vu/n+31XhQUlUjmzEiUpKSgrPPvssTU1NrFy5UpITMWz33quWQHYkp9GVNl11x/eYfzKibvk+u8E+9NBsamvDGbWIRq++Cq2t8JnPwIknGh2N+Wlad6+KDP+EX9Nv13JBx1aui1+H02l0NOY34joqK1euDGccIsa8+y78/OfqdNEfZmDLqQ9ukEZ+vupuGc4v3j67wbJqNXv++T7vtx5Dfn4czz8vdQpiWWB/ny9+sXv0UAztG9+AO+6A559X89GlJt4o9Sg8OPFvatL/120bSfjXtaqOjxQeHNSIExUhRkrX1XzY1lZYsACWLAG0HknJSLrl++wGa8vPp+k/B9gzdzwfvmTj97+H7343jI0QUaVnoiJCc+yx6vl67jk1DezHPzY6oijXo/DgBH/hQXtbo+pBDpDCgwOS35hizD30kPqVNnEi/PnPYfyF22ey7ayTElm69D8ALF8O77wTpscRUeXgwe4pTJKoDM8114ALN/N+OQ99uyySGBUpPDhikqiIMdXWBj/9qTp9xx2Qnh7Zx1uwoJ7587s4fBiWLoWursg+njCfV15Rm7/NnAkOh9HRRJevfQ2ui1/L2Ye28sl9UkRxVKTw4IhJoiLG1AMPQF0dHH003HJL5B9P06C0tJPJk9UupbJSOfbI/JQR8FeMnvyeh7w4NZ9i0t82qk2AZMfyUQsUHpSJc6GRZ0mMmcOHuyfQ/vjHMGnS2DzuscfCnXeq07ffDj7f2DyuMIdAonLRRYaGEV38FaNxuUjyV4w+6pB/PkVmZq/5FmIY0tJoTlSFB5+5cpQrHGOIJCpizDz6IzdrP5zHl452k58/to/9ne+ouhCNjfCXb8nGhbFi/361sSXI/JRhGWA+hU3mU4xax/QZnDhOFR5M/bEUHgxVyImKzWYjLi4upH8nSMly0ceBA3Bw5VrmsZV7Pr9uzGttjRsHv/2t//RG2bgwVrz8MnR0qA4A6QQYBplPERE1NbB7XyJ2u6b295HCgyEJeXlyTU1Nr7+rqqrYvn07y5cvD56n6zr5+fnceOON4YtQRDd/7YC/bdK44pAa6/78vzaCZ4xrBzQ0cMm03Xz3Ao3cF1UcbNwI10oNAyuTZclhYLNBVxed2Iiji+ZmSDY6pij13HPqeNFFsr/PcIScqPStPJufn8+WLVtISkrqdf7mzZu56aabWLZsWXgiFNHN/zP2KqDLXztA221A7QB/HL/pEYf+aaOqhjuWcYgxJYnKKPQpovjf768m5eAunt6WxnXzjQ4uOgV2o54vz9+wjHiOSk1NDc3Nzf3Onzp1KlVVVaMKSljIALsjBxOCsRzrHmLjQhlzt6a9e1VXO0iiMiJ9dizf8n/VzKKe//eEzKcYicOHYds2dXrePGNjiTYjTlTmz59PTk4ODT2WqdXX15OVlcV8SRdFQF4eN55ugrFuGXOPOS+9pOrmHH+8zFUcsR5FFL++RENPSKSmBt580+C4otCrr6pkZfp0mD3b6Giiy4gTlYqKCux2O+np6UydOpWpU6eSkZGBrutUyHabwm/HDnD7f9XqmklqB/gfP1DLoLPTyGBEpMiwT3hNmwZf/ao6vXq1oaFEpcD8lHnzpJ7PcI34G2PKlClUVlZSW1tLWVkZZWVluN1u3G53v3krInb95jfwKWn4xk9HywzD7sijERhzd7k48OuVvBnn4mOm8/grUsPAiiRRCb/rr1fHdevg0CFjY4k2Mj9l5Ea1KeGOHTvYuHEjb7zxBs888wwA9913H1lZWZx++unhiE9EsU8/hYcfhnZmsLOqnsxzRrE7cjj02Lhwkqbx7OF87vxRG8f+MZGvfDs4hUVYQFOT6s0DKfQWTllZqoDi++/DY4/5NxQVR9Tc3D3ynJVlbCzRaMQ9KqtWrWL+/PlkZGTg7lE4Kz09naKiorAEJ6LbunVqj5UzzoDMc3tvGGhY7YAeY+43f0dj8tREdu6EzZuNCUdExosvqjnbJ50En/mM0dFYR1wcXHedOn3//cbGEk22bFHzpU4+WSV6YnhGnKiUlJRQU1PDDTfcgN5jWefChQt7JS4iNul69zh2oLvYbI46Cr73PXX6l7+UDQutRIZ9Iueb31S5/tatsHOn0dFEh2efVccFC4yNI1qNOFHZs2cPU6dOBUDrMTOorq6uV+IiYtOrr8Lbb8PEiXDVVUZHM7ibb4YpU+Ctt1RXtrAGSVQi59hju79w//IXY2OJBroO/pkRkqiM0IgTldzcXHJzc2lpaQme19LSQkFBAfljvZGLMJ1Ab8qiRWDmudVTpnTv4vyLX0jNNyvYvRv+9S91WuanREagnueaNWqLAjG4d95Rc3oSE+HCC42OJjqNOFEpLS1l8uTJ2O12mpubmTt3LsnJyWRkZHDPPfeEM0YRZfbtg03+KvVmHfbp6dZb1TDQjh3w978bHY0YrRdeUMfPf142pY2UK65Qy5U//hieftroaMwt0Jty/vmqh1kM36gKWpSXl1NbW0t5eTm33347O3fu5M9//nO4YhNRatMmtQnhSSfBuecaHc2RTZ0K3/qWOl1cbGwsYvRk2Cfyxo2Da65Rp2VS7dACicrFFxsbRzQbcaISGPJJT09n4cKFLFy4kPT0dOrr66mvrw9XfCIK9ZxEGy2FjW69FRIS1G67r75qdDRiNCRRGRuB3tK//x0++sjYWMyqtRWef16dlvkpIzfiRCU5eeD9M2traykoKBhxQCK67dwJr72mljFefbXR0YTumGPgG99Qp++919hYxMj9739qYrSmyXyASJs9W/WYdnbCgw8aHY05vfyyKoz3mc/AF75gdDTRa8SJymArezIzM2V5cgx7+GF1nD9fFYGNJj/4gTo+/ji8+66hoYgRenO1my3M46rj3fgXJYoICkyqXb1alvcPpOewT7T0LpvRsGtxHn/88WiahqZpnHDCCf0u93q9OJ3OkO6rqqqK0tJSsrOzcTgcVFZWMnfuXHJycoYbljABXYcNG9TpaKxY+bnPweWXw8dPuun6YiH8tQQyM40OSwxD/MNrmcdW4ietA+S1i7TcXLVqrrZWFdmTVVa9BSbnX3KJsXFEu2EnKqWlpei6zsUXXzzg6h6Hw8GcOXNCui+fz0dVVRUVFRU4HA6KiookSYlib74J//2vWoZ35ZVGRzMyP/wh7HhyLSd/tJWDpeuYKImK+TU0qDXJmsapb6vlZmfWbQTPtSp7Tk2F444zOEhrmjRJ/Shxl7pJXVwIf5fkPsDrVcOQ8fGSqIzWsBOV+f4dlXJycli4cOGoA6irq8Nut4/6foTxAr0pl19u7topA/J/2Z0/SeOU+E3QAV0Pb4Sb5MvO9GbNCp5MQfWvj2tpVJtfBkiBnIjJz4fPla7llE8lue/pySfV8fzzQb7iRmfEc1QKCgp49NFH+52/fPlydgR2AxMxo6ure35KNA77MGsWZGaiZbpI7mgEYOIB/5ddZmavL0NhMuvXB3eUtKESEi2QmMTHq8tF+DU0QE0NTjx8I171ZOkPbwSPB2pq1OUx7G9/U8fLLzc2DisY8X6xt99+O8UDFJ3IzMykqKgouJvykWzevJmUlBSampqora0d8D4DWltbaW1tDf4dWCLd3t5Oe3v7MFswtMD9hft+zSLc7dtxfw0PfvAjfjapmOzs0zHD0zacNmpr1hC3bBlaRwea/8su8KWnx8fTef/96GZoVB9Wf59CCG1ctAhOOIGEM8/sf9tt22DOHEzxhhxEtL6GCT2S92R/T9aEA717strb2tQxSts4HD3b2NICL7wQD2hcckm7md9+IQv3azic+9H0EW7MY7PZ8Pl8JPXp49+7dy8pKSl0dnYe8T68Xi+g5rUAlJWVUVlZSXl5+YDXv+uuu7j77rv7nb9hwwYmSsk/Y93yGF95/0EenbGUuD9+1ehoRmRKbS0X3XZbv/O33vcrWo7PMCAiEarAa9eJjTi60DUNTdd5/le/Ym+GvHaRMOOFF5jz+99jG+CzvisujjduuYUPYnSNeMMjPk5Z9yD3pt7Fjfc3Gx2OKR08eJAlS5awd+/efnlEXyNOVI4//ngeeeQRTjvttF7n19XVkZ2dzc4RbKvp8/lITk6mubl5wHkrA/WozJw5k927dx+xocPV3t5OZWUl2dnZJCQkhPW+zSAs7WtogD176OrSaDn3Cqbpn9I6ZRq2Z59UcwKmTjV0Xsew2/jGGySceSa6zYbW1RX80vOsquYL14Y2QXysWf19CqG18cPqD0g8/xw+YAaz7/smEx/+C9oHH9Dx6qswY8YYRzw8Uf0a+v/P9NVeXa16sgJ/R3MbQ9SzjS87i7j4nT/y4unf4ezXf2V0aGER7tewpaWF1NTUkBKVEQ/95Ofns2zZMioqKjjO/2VUX1/PokWLyM3NDek+Kioqeq3yCSQngy1xTkxMJDExsd/5CQkJEXvzR/K+zWBU7euxPH1qcBLjbrSeH1wmmMQYchuPOQamT0ebOROuv56GH69mwp5d3P/EMfy/ZeZ+D1j9fQpDt/GF2nTyqef0ueOovk2D798EbW0kDPB5YVZR+Rr65wZhs0GP5P6Tj+OZeUb/tkRlG0PV0MCUnTuJS/sMc97dDMBZ9ZtI+NdSS03ID9drOJz7GHGiUlhYSG1tLenp6cEqtT6fj/z8fFasWHHE2/t8PnJzc6mtrQ0O/fh8PqB7KEiY3Pr1sHQpdHQMPIlxzRrDQhuRGTOgvl5tZKJptJyRz2xnG51PJfKjD0z/wzymPf88tJHIF+f5z9A0tU5eRFZamqrs6E/udxauJqllFw/8PY2ffsXo4MZWwgkncJH/dOCHW8JeWX0WDqPalLC0tJSmpibKysooKysb1qaEdrudwsLCXklJWVkZOTk5slw5WuTl0fVq9cCXVVdDXt7YxhMOiYnBEpKnz9E464JEOjtB9to0N9nfxyCB5L66GgoKqN1QzSzq+fXmGezfb3RwY6tjzRq64uIAWX0WbiPuUQmw2+0jrqeyfPlySkpKgn/v2bNn0Im0wpz+/W84FYJdvoEuYKu49VZVcbO0FO64AyZMMDoi0VddnZouFR8fHbt1W06PnqtLLtU47oRE3nsP1q7t3pU8FuhLlvDC7j188Qf9J+RTXQ0hVmwX/YWcqNx0003k5uYyb57qW12+fPmQ1w9l+CfQqyKi119fTWMa0zmYMpOM/7tebfqxa5fqEraAL39ZDSs3NKg6MdddZ3REoq9Ab8rcuXDUUcbGEutsNvjOd1RZ/d//Hm68UZ0XKz76SL0BrfrDzSghv4W2b98enEMCUFNTM+g/j8cTiViFyeg6/OXZGcyinjfLVNcv1dWqK9giEzri4+Hb31anf/c7GWI2Ixn2MZelS1Vl6nfegWefNTqasfXif0/mY6bjtbtg5Uo1P2X6dMv8cDNKyD0qfXdEfjbW3oGinzfeUDnJxImJXHKp/0wLTmJctgzuugv++U81DBSjpSFMSdclUTGbyZNVz+Nvf6t6VWJlnxtdhyd3OFlDPWv+NI4Tlmhqf4G2Nst9Jo61GOqUE+EW2EHh0kvByvX2kpPhmmvU6d/9zthYRG/vvgsffqgWap1zjtHRiICbb1a/WZ5+Wm1UGgv+8x819KMljuNLl6sJ+Vb84WaEkBMVm81GXFxcSP9O6FFfQ1jXX/+qjtG6U/JwfOc76vjXv8b8FiamUlWljuedZ+1kOdpkZKj5XQC//rWxsYyVRx9VX6fZ2Xr0bcpqciEP/dTU1PT6u6qqiu3bt/eaVKvrOvn5+dx4443hi1CYkterVvzExcFllxkdTeR97nMwfz5s2aJWAP3f/xkdkYDuRCUry9g4RH8/+IFK7NeuhZ/+1OhoIi+QqHzta13IYEV4hZyozJnTu4R4fn4+W7Zs6Vf6dvPmzdx0000sW7YsPBEKU3riCXW84AI1NBILbr5ZJSqrVqkP3vHjjY4otnV0dM9PkUTFfM49F846C157Df70JxtnnWV0RJHz9tvw1lsa8fFdXH65zLgPtxGnfTU1NTQ3999saerUqVQFfuYIywokKoHu3Vhw+eWqAOfu3SDlfoxXUwN794LdLiUqzEjT4Ic/VKdLS20cOhRnbEARtGGDOp522qdIvdLwG3GiMn/+fHJycmjoMWBfX19PVlYW8+fPD0twwpyam9XqF4ArrjA2lrEUHw833aRO//GPxsYiuod95s1TQ5DCfL7yFTj+eGhu1tiyJfr3uRlIVxesW6dOX3TRB8YGY1EjTlQqKiqw2+2kp6czdepUpk6dSkZGBrquU1FREc4Yhck8/TR0dsLnP68mzcWSZcvUCpPO1920ZM6DPsv2xdiR+SnmFxcHt/kLtf71rxm0txsbTyS8/LKaYD95ss4ZZ3xsdDiWNOJEZcqUKVRWVlJbWxvc68ftduN2u4+4ZbOIboHVPl+JsU3HAKZNg8WL4RrWklSztfunlBhTBw7AK6+o05KomNu110J28nY2Nl5O5YqaI98gygQ+AhYu1ElMlCq0kTCqqck7duygtLSUsrIyFi5cyJw5c7jvvvvYsWNHmMITZtPWpnpUILbmpwDqZ1NNDT+c72ExmwDo2rARPB41YULWLY+Zl19W78Vjj1VDC8K8JkyA/zt5LfPYiu+PG+jsNDqi8Dl0CDZvVqfz8iRJiZQRJyqrVq1i/vz5ZGRk9Kpam56eTlFRUViCE+bzwguwbx8cfbTaWyWmzJoFmZl8YamLNBoB0Hb7t3HPzFSXizHRc9jHv9m1MBt/Yo/Hg/M9ldhf4ttEZbF1Evu//Q1aWlTCfP75stonUkacqJSUlFBTU8MNN9yA3mMDlIULF/Yrty+s48kn1fHyy2NrszFAbdMer1b0a4Ft3JFt3I0g81OigD+xx+VC27MbgGk0csmPrZPYB4Z98vJi8PNwDI34qd2zZw9Tp04FQOvxk6aurq5X4iKsJTDsEwtF3vrJy1ObLg6kulpdLiKusRECo8v+zdyFGfVM7P3fCTZ/Yt8VF/2J/f/+1/15ePXVxsZidSNOVHJzc8nNzaWlpSV4XktLCwUFBeTn54clOGEutbXw3nvqsyfmV6D7fz51SgXKMffcc+p46qlqCFKY1BCJ/dfTq+n6enQn9qtXq9WPZ54Js2cbHY21jfhTtrS0lMmTJ2O322lubmbu3LkkJyeTkZHBPffcE84YhUn84x/qeM45MGWKsbEYJi1NbdvucrH7FyupwaW2dd8v27iPFRn2iT66P7HXNXV8b2f3JNRo1NmpttKA7tpKInJCLqE/kPLycurq6vB4PAA4nU7S09PDEpgwn0A356WXGhuHoWbMgPp6GDeOVE3jutfyeebJNm56NJHfXmB0cNan61BZqU5LohIF/Im9/tnP8uYZZ3Dq66+z/50P+XR/Gj/5CSxcCAkJRgc5fE89Be+/DykpsGiR0dFY34h7VJ7z97+mp6ezcOFCFi5cKEmKhR0+3N3lHtOJCqht2/3zsr59s0YbiTzwAOzfb3BcMcDrVYtFEhLg/PONjkYckT+x73zlFRoWLKDzlVegrp62aTPYuRMeeMDoAEfA7Wbm0nm4cPPNb6rl1yKyRpyo5Ofn89hjj4UzFmFiL76oagYcc4yaGyCU7Gw44QS1RPGhh4yOxvoCwz5nnw1HHWVsLCJEPRJ7NI3JqYn8+Mfqz7vvVp8r0WTvH9ZyetNWrmYdN95odDSxYcSJSmFhIYWFhezbty+c8QiTCgz7XHKJ1K3oyWaDb31Lnf7jH9XQhIgcmZ9iDTfeqGqPfPRRlOyb1aMmTFyFqglzzbiNHN9inZowZjbiOSqapjFlyhRmzZpFVlYWDoej1+UrVqwYdXDCPHY95mYLhegnlgCZRodjKkuXwo9/DP/+N7z0Elwgc1Uiouv1Gr7z+I+oo4SsLHkPRrPERLjrLrjuOvjlL9X/oWnTjI5qCD1qvkxE/VKzt/mLPQa0tY1xULFjxD0qlZWVpKSk4HK5aG5upqamJvgvMLlWWENdHVzQoEpgn1cne9v0ZbfDN76hTkfFr8Mo1fy79VzQsZXrE9bFXlVkC7rmGjj9dNi7F376U6OjOYIeNWFsUuxxzI24R2VzNK8tE6FpaIDdu9lergX3tkl8bCPkX6vGOFJT4Thrbt0+XN/+NpSVwWOPwYcfwmc/a3REFtHQAJ98wpTaWiY/pT5zvq5tJP6f8h6MdnFx8LvfwYUXqv87N91k4vlveXm0Hz+bhLNc/S+rrganE0tuDW0Sw05UduzYgdvtZu7cuZx22mmRiEmYhb+7cxHQ5e/upLFPd6dMygDUB+z556uhn7IyNUlQhMGsWSQAFwG6/z04pW+Xu7wHo9YFF0BuLpSXw3e/C1u2mHcO3NNPw5dRRR7j6FIT1LpkI8KxMKyhn0WLFuF0OiksLGTOnDlcddVVkYpLmMH69eh9ujuDXwrS3dnPzTerY1mZDFeHTY/3oOyvZE0lJWrOytat8OijRkczsM5OuG9tGh8znU9nuGDlSpUsT5+uasWIiAo5Ubn33nvxer3U1tbS1NTEzp07cbvd/OpXv4pkfMJIeXlU/072tgnVlVfCZz4Dn3xi3g/cqJOXR8e2bQNfJu9BS5g1C374Q3Dh5ugl89j/vPk2tX3sMXipbgZz7PUc9Z9qKChQ77/6elUrRkRUyIlKWVkZ999/f7Com8PhYOXKlWzcuDFiwQnjBb4jugJvFdkidFAJCerzC+BPfzI2FivqlPegZf3oR3DLlLWc17aV179jrgn7nZ3w85+r0wW3JDI5qbsmDImJxgUWQ0L+H+/1ejn99NN7nZeVlSUrfCzub9Wqu9OXId2docjPVyMSL78Mb75pdDTWoKdO43/a0dTg4l83y3vQUvz1SSa87eEqTU3Y//y/N+K53zz1SR58EP75T7W675ZbjI4mNoU8mXZKzO5CF7vq6+GF2hlk2Or5aPs4SNbUN3Fbm/ySGMRnPqP2L9m5yU3ipYXwRAlkSs2P0Xh73wzO0BvQxiXQVGyDCfIetIwe9UnG+WfRTqORo28wx2Tp/fsJVtH9yU9g6lTDQolpIfeoaGadii0iJlCN1nVOIvZk6e4M1be/DdewlpM/3srhVebqxo5Gzzxjo41ELrwIJk5E3oNW0qM+SSAhCUzc77QZP1n63nvVnDOHQ/2/FsYIuUelubmZqQOkk7quD3j+nj17RheZMJzsljxM/roz503U+FzcJuiErg0boUBqfozGM8+oJHnBAlmGbDl5eTB7du/l5n5zu6r5ZYoToz5+PvhAJSrQvTJJGCPkRKW4uDiScQiTaW2V3ZKHzd+NrQEp/pof4/dLzY/R2L8fXn45kKh0AXHGBiQiJ1CXpEd9kmuvVXO9PvOZsQ/nttvUhonnnQdf+9rYP77oFnKi8sMf/jCScQiTeeklOHBAzVnsM4daDGb9erVpSUdHsNaHrWfNjzVrDAstWj33HLS1aRx99AFOOGGc0eGISEhLUx80M2fC9dfD6tXo7+8iNSWNN95WpfafeWZsF3tt2gSbN6vqub/9rXmL0MUKWecnBiS7JY9AXp6qrTAQqfkxIn//uzq6XP+T96FVzZihZu5Xd9cn0Rrq+f2jM5g4Ue2YffvtYxfO7n+4+cw35uHCzY9/POColBhjkqiIAcn8lFHy//wL1P744AMjg4lOXV3wt7+p03PnfmJsMCKyEhO7fxH5J0uffDLcf7866957u09Hkq7DKzet5YKOrdw2bR133BH5xxRHJomK6KehAd5+W33XZmcbHU2UCXRju1TdmZ1JLj5mOmuekpofw+XxwMcfw1FH6ZxyikzOj0Vf/zrcdZc6fdNNai+giPDXc9lY6OHMelXPJbdzIwn/Mk89l1g24t2ThXUFelPOPhuSk42NJeoEurHHjQNN471j8ln45TYmbU7k+7/2L68VIXniCXXMztZJSJDN32LVT38K774L72xwE7+gkH+tKuEL3wxzbSL/RPiv070Ba3yzTIQ3C+lREf3IsM8o9ejGvvQyjc+mJ9LcbHhJiKgTGPa5/HJJUmKZpsHq1bD8s2u5sHMrLxWs46WXwvsY3p+tpx3ZgNWsTJWoZMs4g+FaW7u7VyVRGb24uO6y27/+tewKH6r334cdO9Tw46WXyi/ZmOUfkhn/locr29SQzML2jdx+sYfX/hSeIZn//AfO+VMeZyIT4c3KNIlKRUUFVVVVRocR87ZtU8uSjz5aliWHy/XXw5Qp8M478NRTRkcTHQK9Keeco+rkiRg1a5bagsLlwra7EYA0Gtl22MVZN2fCrFmjSv6feUa9x/73PzjheP+ZNtn80mxM8Ur4fD6ampqMDkPQe1my/D8Nj8mT1RZJAL/6lbGxRItAonLFFcbGIQw2QIn9QI2iduLJYz2XXKImXQ9HVxf8/vdw2WXQ0gLnnw9/fqT3RHjZ/NI8TPFVtHnzZhYtWmR0GAKZnxIp3/mO+rx9/nm1mkUMbt8+2LpVnf7yl42NRRhsiNpET99ZzWMT8qishOOPh6IiOOLOLW43zXPmcc3n3Nx6q0pYrr0WKish5dT+9Vyor1cT5IWhDF/1U1VVRVZWVkjXbW1tpbW1Nfh3S0sLAO3t7bS3t4c1rsD9hft+zWKg9u3aBf/5TwI2m85FF3UQ7U0302s4fTrk5MSxcaON++7r4sEHO8Nyv2ZqY7g8+aRGW1s8xx+v43B0WLKNPVm9fTDKNnZ0kADoNhtaV1fweOllHbzy1Xby8+PYvt1GSQm8+oft/L+jinhn2f9x4tddTJyoJuN+9JHGli0an1v5ILmfbGUu63hisos77+ziO9/pQtNQn3c2G3R0dD+2zUaoH4RWfx3D3b7h3I+m68auuaqoqCAnJwefz0dycjJDhXPXXXdx99139zt/w4YNTJR1n6P2zDPH8ec/n87JJ+/hnnteNjocy6mtncJtt11EXFwXpaWVpKYeNjokU7rvPhcvvzyDK698j2uvfcvocITBxu/ezYU/+AGHUlNpyM7muMpKJuzezQv33cfh1FR0HbZvP5qHHz6Z79b9lFv4A7/jFr7L7wA4lgZS2Y2OxtNcytF8im/cVF760Z1MPqqNtqQkDsnwzpg7ePAgS5YsYe/evSQlJQ15XUMTlbKyMvL9g/ehJCoD9ajMnDmT3bt3H7Ghw9Xe3k5lZSXZ2dkkJCSE9b7NYKD25eTE8cQTNu66q5Mf/Sj6l6eY8TXMzo7jhRdsfP/7ndxzz+ifYzO2cTQOHYJjjonnwAGNbds6mDtXt1wb+7J6+yAMbWxtDdYmQtehra17O+OGBtizB13X4NLLGedrpCkhjSvinqKrU+fV9rnBu9HR0NDRNQ2tx3dNe1vbaJto+dcx3O1raWkhNTU1pETFsKEfj8dDZubwivYkJiaSOMBe2wkJCRF7Y0Tyvs0g0L62tu7dki+/PI6EBOvsUmum1/AHP4AXXoDVq+O46644Jk8Oz/2aqY2j8fe/q1VnM2fC2WfH99rfxyptHIzV2wejaGPf24zrsUHlCSd0n/a/YVI6GtnW3uP7JT6+12ahmt57s9BwPu9Wfx3D1b7h3IdhiUpTUxMejye4JLm2thaAkpISHA4HOTk5RoUWk7Ztg/371QT3OXOMjsa6LrsMTjpJLVVevRq++12jIzKXRx5Rx4ULZTNMEaIeu5YHi7T1SUSYPXvg3QWrq8HpHKtIxQgZlqhkZWX1mkTr8XgoKyujsLDQqJBimixLHhs2G3zve3DjjfC738HNN3evvox1ra3dZfPld4oIWV7ekRORwFI7m00t9QkcRVQwxVdSRUUFK1asAKCoqEgKvxlAliWPnWuuUUXMpta72XP6PHC7jQ7JFNwr3TzWMo8FU92cfbbR0YioNFixtjSpkRLNTPFbLicnR4Z6DLRrF/z737Jb8liZMAG+9S2Y+rO1HP2frehr16ENc76WFR0qXUsWW9E+uw6bTZ4PMQyBRGTmTFUKevVq9cEWSET6bBZKfn7vCbnC1EyRqAhjuVe62UIh6z5XwtSp8gURUQ0NsHs3t56v0YHau6R93UbGLb1WjaunpsJxxxkc5BjyPx/tnRqn/Vc9H+e+vxE8PZ6PY44xOEhheqEkIj1Pa5okKVFEEhXBuI1rmcdWJk1aB0iiElH+7eRTUEslARJ8MbydvP/5SACmBp6PvX2ejzAsHRUxQBIRyzLFHBVhgIYGpuzcScfrbzC3Tv2Sdb67UU06qwnPrqRiAD32LgkulSSGt5Pv8XzYBlo6GmvPhxCiH+lRiVEJJ5zARf7Tif5fsvGx/Mt+rISyQiGW5OVx8LjZTDx/iOfDoiXJhRChkR6VGNWxZg1dcaqom/ySNYh/ZUKn/7+h12tkMMZ5/nl17GKQFRtCiJgmnwgxSl+yhBdLSga+sLpa/fIXkdFnqaTX7uJjpvPbDbG5VPLhLWl8zHQ+PkaWjgoh+pOhnxjm86ky1J3YiEOKII2ZPisUWlz5nDK3jY6/JnLzu3DiiUYHOHY+/RQefmkGm6nnX8+Ng5Nk6agQojfpUYlh7veP52Om884k+SU75hITgzXiXZkaC65IpKsLfv5zg+MaY5s3Q2cnnDY3kRNP8tfMlxUbQogeJFGJYVveOZVZ1FP+g2ooKFBDPvX16he/GFN33qmOGzaofYBixUMPqaOMNAohBiOJSoxqb4d//nMabSRy6WXyS9ZoLhd8+ctq5O0XvzA6mrHx1lvw2msQFweLFxsdjRDCrCRRiVGvvqpx8GACqak6Ur3dHGKtV+X++9Xx8svViKMQQgxEEpUY9Y9/qF6U7GxdVoOahNPZ3aty111GRxNZra2wdq06fcMNxsYihDA3+YqKUU8/rV76Sy6RVT5m8rOfqePGjbBjh6GhRNTjj8OePWo61CWXGB2NEMLMJFGJQfX18J//aNhsXSxYINVnzeS002DJEnX6Rz8yNpZIWrVKHa+7Ts1REUKIwUiiEoP+/nd1nD27iZQUY2MR/f3sZ6o48NNPwwsvGB1N+NXWwpYtau72ddcZHY0QwuwkUYlBTz6pjpmZ/zM2EDGgjAxV88yFm0lfnoe+3W10SGG1erU6XnwxHHecsbEIIcxPEpUYs38/PPecOp2Z+YmxwYhB3XEHXBe/lsyWrXjvXmd0OGFz+HB3oiKTaIUQoZAS+jGmqkpVJ3c4dGbM2G90OKKvhgbYvZvPaBrXjNsEHTDlHxtpe+1axiXokJoa1d0QGzaosvnHHgtf+YrR0QghooEkKjEmMOxz2WVdgQruwkxmzQqenOR/gVI6G7Gd7eq+jh6dE6B1HX79a3X6O99R83CEEOJIZOgnhnR1dU+kveyy6Pyys7z164Pf4Jo/IbGhjnp8vLo8GrndNM+Zx/j/uJk0CZYtMzogIUS0kEQlhng88MkncNRRcMEFkqiYUl6e2nNpAPd8tTp6N8VZu5aUN7dyNeu4/nqw240OSAgRLaTzNYYEhn0WLIBx44yNRYTAZoOuLnTNhqZ38cgjcNmbqtZKVPDPt0HT6HhoE/HAVWykbd61UBP9822EEGNDEpUY8re/qePllxsbhziCtDS1+c3MmXD99WirV9P8r118cjiNG2+EbduIjm0Pesy3iUPNt0mjEe2r0T/fRggxdqLh406EwYcfqqEfTYNLLzU6GjGkGTNU+eDqaigogOpqDv6nnr1HzeC117o38zO9nvNt/PNsAkeieb6NEGJMSaISI556Sh3POAOOPtrYWEQIEhMJLsvSND7rSOQXv1B/FhXB/6KhVt8Q822ojuL5NkKIMSWJSowIzE+RYZ/o9e1vw5w54PPBbbcZHU1o3n9fHTsDHzVRMWYlhDAT+dSIAYcOqUJvAFdcYWwsYuTi46G0VHW0PPRQ91JzM7tvbRofM53aKS5YuRJcLjX/Ji3N6NCEEFFCEpUYsHUrHDyopj6ceqrR0YjRmDsXvvc9dXrZMtizx9h4hvLf/8Kf/jqDWdSzf0v3fBvq69WbUQghQiCJSgzoOewj1Wij3y9+ASefrGri/P6aNzjnJz9Bq6kxOqx+fvhDVWTw0q8k4nR1z7chMdHYwIQQUUUSFYvTdXj/UTdbmEfeSdbahTdWTZgADz4IcXEwvXI90/71L7SHHjI6rF6efFL9S0iA4mKjoxFCRDNJVCzun/+Ei/+3lnls5az3rLMLb0xraOCMuBp+v9TDYjYBoD+8Sa0/r6lRhdYMdPgwfPe76vT3vgcnnWRoOEKIKCcF36zKXxX0tTIt+GUWX7ERrr9WdbNMmWJwgGLE/IXUvgXo/kJqcXt2q4mqAQYWUvvVr6C2Fo45Bu64w7AwhBAWIYmKVfm/zAqALv+XGY2NwS+zBIDHHzcgMDFq69fD0qXQ0TFwIbU1awwLrb4efvlLdfq++2DyZMNCEUJYhAz9WNX69Wq3Xbp33w3+yo6Pp8PALzMxSkMUUnv258YVUuvogKuvVsvhL7wQrrrKkDCEEBYjiYpV5eWx/ubBq4LqS5aMbTwiInR/AbUu/3/lO++E114zIBC3m10nzuPQy24mT4YHHpAVZkKI8JBExcK2blVHXZOqoJbj37hQnzOHHTfdBK45NCVOZ1dbGldcATt3jm04H5esJb1uK1ezjv/3/yA9fWwfXwhhXfLNZVGffAKVb6qqoO2nSlVQy/FvXNj5yis0LFhA5yuvkPhRPdNdM9i9W2082dgY4RgaGqCmBt9zHhIeURO2lyZuJG+2OVYfCSGsQSbTWtRf/wofMIPczHpefn2c6ofPz4e2NlVwq73d6BDFaPV8HTWNSSnjePJJOOss1aMyf77aOiFieal/wrad7gnbSW2NaJnmWH0khLAG6VGxqMceU8crcnrvwitVQa1t+nT4xz/U8V//gm+d4ab13HngDn+xv6616+nQek/Y1npM2Gb9+rA/phAi9hjao+Lz+di8eTMAtbW1eL1eVq1ahd1uNzKsqOfzwZYt6vSVVxoaijDAySfDiy+qHpULGtaS2LAV3x/WYX8wM2yP0dkJ396Wx+v6bDy4+l+huhqczrA9nhAidhnao1JUVERWVhb5+fkUFxeTkpJCbm6ukSFZwuOPq6Wip5wCJ55odDRizDU0cEJLDa/9Pw9LbGruSNu6jTz/6/DMHTlwABYu9O/kHDjTJhO2hRCRYeinitfrpaKiIvh3RkYG7gh0UceaTeq7icWLjY1DGGTWLMjM5JgrXEzV1YzaVL2Ri25zQWZmcG5JyNxumKeGjz78EC64QM2BSkyEn/xBrT7CJRO2hRCRYWiiUllZSWFhYfDv7du3k5WVZWBE0W/PHjWBEmDRImNjEQZZv17NEaF7zkhgDkk78Xxv2noefbTHPNceiciA1q6FrVt547Z1fP7zakuh1FR47jn46s1q9RHV1VBQoI719WpVkhBChIFpVv1UVFTg8/koLy8f9Dqtra20trYG/25paQGgvb2d9jCvYgncX7jvN9IqKjQ6OuI57TSd9PSOQRf3RGv7hiNm27hoEZxwAglnntnv+penvsazjS5+uxDOOquLb36zi6u3r2HC1q10PvggXaedpq7Y0AB79rB/v8b4NRuZABzz4kYcXMvsk7r42f+zc+zc49T7y2ZTY40BNltYV5VZ/XW0evtA2mgF4W7fcO5H03Vj1w8GJtT6fD7sdjv5+fmDXveuu+7i7rvv7nf+hg0bmDhxYiTDjBp33nk2b76Zxje+8RY5Oe8ZHY4wyJTaWi667TZ0TUPT9eDx2RW/YfUbl+J5LIGk9mZ0NP7BJaTRSPO4qfz24j/T2aHxi390zxXrQsOGHjwG/FX2ihJCjNDBgwdZsmQJe/fuJSkpacjrGp6o9FRWVkZRURF1dXUDrvwZqEdl5syZ7N69+4gNHa729nYqKyvJzs4mISEhrPcdKY2NcOyx8XR2arz9djsZGYNfNxrbN1wx3cYPPiD+7LPRZ8xAv+46tL/8Be2DD+h49VWYMYOEceOCVx0sEWknngQ66EuPj6fz/vvHbBsGq7+OVm8fSButINzta2lpITU1NaRExbChH5/Px4oVK1i+fHkwKcnKysLn81FVVUVOTk6/2yQmJpI4QB2QhISEiL0xInnf4fbEE2rZqMsFJ58cWszR1L6Risk2pqdDQwPaOH+xv5tugrY2EgL/f3rswBxITgLHTi2eR65Yg37ybBaX9F96rFVXE2/A0mOrv45Wbx9IG60gXO0bzn0YNpnW6/VSUlJCU1NT8DyfzwcgdVRGyF+SRibRCiVxiGJ/Q+zAHOeuZtFf87pXjcnSYyGEgQz75HE6nRQWFuJwOILnbdq0CafTKSt/RuDDD+H559VpSVTEsAyWiKTJ0mMhhPEMXfWzfPlySkpKgn/7fD62BEqqimHZsEEtNz3vvOGXyRAxKpCIzJwJ118Pq1fDrl3diYh/40PGDbBXlBBCjBFDExW73d6rjooYuXXr1PHqq42NQ0SRUBKRnqdlryghhAFMU0dFjNybb6oN6MaNA9mBQAyLJCJCCJOT2XEWEOhNueIKSE42NhYhhBAinCRRiXKd1W6++od5uHDLsI8QQgjLkUQlyn24Yi3ntW3lhvHruPRSo6MRQgghwkvmqESjhgbYvRs0jSnPqK2Sv65tZNy/r1VLf1JT4bjjDA5SCCGEGD1JVKJRj/XHk1EFvSYfalR1LgLMszOCEEIIMWIy9BON1q+HeJVjBsqea4E9WuLj1eVCCCGEBUiPSjTKy0M/eTZaZv99WKiuBgP2YRFCCCEiQXpUotRbb6ljJ7IPixBCCOuSb7co9eDTaXzMdOpTZB8WIYQQ1iVDP1Fo3z74899m8DvqqXx0HBkXyj4sQgghrEl6VKLQxo2wfz+kn5TI+ReoVT9S/lwIIYQVSaISZXQd/vQndXrZMpWfCCGEEFYliUqUefFFtQnhxIlw3XVGRyOEEEJEliQqUea3v1XHa66BlBRDQxFCCCEiThKVKOL1wl//qk7fcouxsQghhBBjQRKVKPLHP6o5KgsWwOzZRkcjhBBCRJ4kKlFi3z5YvVqd/u53DQ1FCCGEGDOSqESJ1auhpQVOOgkuvtjoaIQQQoixIYlKFDh8GO69V53+/velWr4QQojYIV95UWDNGvjoI5gxA6691uhohBBCiLEjiYrJdbzm5tTvzsOFm8JCKT4rhBAitkiiYnLv3LGWc1q3UjBhHcuWGR2NEEIIMbZkU0IzamiA3bvp6NQ4+vlNACyJ28iEt69V65NTU+G44wwOUgghhIg8SVTMaNYsQL04KajNfCYeaASXq/s6uj72cQkhhBBjTIZ+zGj9evR4lUPaUAmJFkhM4uNh/XqjIhNCCCHGlCQqZpSXx/3Lqge+rLoa8vLGNh4hhBDCIJKomNAnn6glyQC65n+JpHiKEEKIGCTffib0059Cw+E0didMV/NSVq5Ux+nTIS3N6PCEEEKIMSOTaU2mpkaVy+9iBu8+U885F40DTYP8fGhrk0IqQgghYookKibS3g7LlkFXF1x1FZzzxR5JiaZJkiKEECLmyNCPifzqV7BjB6SkwO9+Z3Q0QgghhPEkUTGJ996Du+5Sp3/zG5mKIoQQQoAkKqbQ2amGfFpb4eKL4eqrjY5ICCGEMAdJVIzkdsO8eZTlu3nxRZg0SS3w0TSjAxNCCCHMQRIVI61dC1u30vaXdQCUlUF6usExCSGEECYiq37Gmn/DQTSNzg2biAOuYiOHcq9lyUk6NMiGg0IIIUSAJCpjzb/hIIDNv+FgGo3cXu6Ccv8FsuGgEEIIAcjQz6C0mhrO+clP0GpqwnvHPTYc1AIbDiIbDgohhBADMbxHpaSkBIDa2loASktLjQwnSFu/nmn/+hedDz0EZ50Vtvttzcnje3+Yzf+rdvW/sLoanM6wPZYQQggR7QxNVIqKiiguLg7+XVBQQHZ2NpWVlcYE1GP+iG3zZgBsmzbBN7+phmNSRzd/pKkJFi2CJv/GyLpmQ9O71IaDXV3haIEQQghhKYYN/fh8PjweDz6fL3heQUEBVVVVeL1eY4KaNQsyM9UGgI271Xm7d6u/MzN7zS8JiX/5MW43//0vnHkmbNkC+8an0Zo8HS1TNhwUQgghhmJoj4rb7cbr9eL0D3c4HA6AXslLT62trbS2tgb/bmlpAaC9vZ329vZRx6OtWUPcsmVoHR3d80f8E1v1+Hg6778f3f84Wk0NtuXL6VqxAt01wDAOYFuzhritW/nP7Q9yrtvF3r0axx2n83DF0dhOfo/2cf4NB7/5ze4NB8PQjlAEnq9wPG9mJW20Bqu30ertA2mjFYS7fcO5H03XzbPEpKKigtzcXJqbm7Hb7f0uv+uuu7j77rv7nb9hwwYmTpwYlhim1NZy0W239Tv/vq+vZ9bXkkhIUEM0X1i1Csff/07t5Zfz72XLgteb8OmnjGtpAU3jjDt/zsT9Pv5HGpfyNI5ZzVx18y4Sjk8OS6xCCCFENDp48CBLlixh7969JCUlDXldUyUqLpeLgoIC8vPzB7x8oB6VmTNnsnv37iM2NGRvvEHCmWei22xoXV10YiOOLpzUoE1N4ZtXfMo558LpP7ocW2Mj+rRpdDz5pJrDMnUqCSecELyrLjRs6MFjQHtbW3hiHYX29nYqKyvJzs4mISHB6HAiQtpoDVZvo9XbB9JGKwh3+1paWkhNTQ0pUTF81U9AUVERixcvHjRJAUhMTCQxMbHf+QkJCeF7YxxzDEyfjv7Zz/LmGWdwymuvs/+9D4k7Ko3tn8yENcAalYQA0LibhDPPDN48j/WsYSkJdASTE1vP5cdr1pjqTRzW586kpI3WYPU2Wr19IG20gnC1bzj3YYo6KhUVFWRkZFBYWGh0KDBjBtTX0/nKKzQsWID+2isctbueV96fwWs3r6dDU7mdrU8NlHbiyWM9G8jjqlnVA993dTXk5Y1JM4QQQggrMDxRqaqqAgj2pPh8PuNW/QQkJnbvDKhpkJhIQgKc9Yc84t0DJyF//0k133o5jz174JFH/GfabL2PQgghhBgWQ79BPR4PHo8Hp9OJ1+vF6/VSVlZGSkqKkWGFpk8S8tWvwrnnQkoKapnx9Olq2bEsPxZCCCFGzLA5Kj6fj/nz5+Pz+SgqKup1mSmGgAYTSEJmzoTrr4fVq2HXrt5JiH/4iMDy4/z87uXHQgghhAiZYYmK3W6nubnZqIcfuVCTkJ5/+4ePhBBCCDE8pln1E1UkCRFCCCHGhMzyFEIIIYRpSaIihBBCCNOSREUIIYQQpiWJihBCCCFMSxIVIYQQQpiWJCpCCCGEMC1JVIQQQghhWpKoCCGEEMK0JFERQgghhGlJoiKEEEII04rqEvq6rgPQ0tIS9vtub2/n4MGDtLS0kJCQEPb7N5rV2wfSRquwehut3j6QNlpBuNsX+N4OfI8PJaoTlX379gEwc+ZMgyMRQgghxHDt27ePKVOmDHkdTQ8lnTGprq4uPvroIyZPnoymaWG975aWFmbOnMmuXbtISkoK632bgdXbB9JGq7B6G63ePpA2WkG426frOvv27eOYY47BZht6FkpU96jYbDZmzJgR0cdISkqy5JsuwOrtA2mjVVi9jVZvH0gbrSCc7TtST0qATKYVQgghhGlJoiKEEEII05JEZRCJiYnceeedJCYmGh1KRFi9fSBttAqrt9Hq7QNpoxUY2b6onkwrhBBCCGuTHhUhhBBCmJYkKkIIIYQwLUlUhBBCCGFakqgIIYQQwrSiuuBbpJSVleHz+bDb7dTW1rJ8+XLsdrvRYYVNSUlJ8PSePXsoLi42MJrR8/l8bN68mfLyciorK/tdXlJSEnz9fD4fhYWFYxzh6B2pjUe63OxCeQ0BamtrASgtLR3T+MJhqDYGLgPVRq/Xy6pVq6Luc2c478Ps7GzLvVerqqooLS0lOzsbh8NBZWUlc+fOJScnx6Bohy+U17CoqIiMjAwAUlJSIt4+SVT6KCkpIT8/v9cX2w033EB5ebmxgYVJbm4u2dnZ5OfnAyopKyoqitpkxePx4Ha78fl8NDU19bs88AUXaG9VVRUFBQVR9UV3pDYe6XKzO1L8fd+fBQUFUfclF0obi4qKcDgcgGpjbm6updrYU0VFBVVVVWMUWfgcqY0+n4+qqioqKipwOBwUFRVFVZISSvvmz5/Pli1bsNvteDweXC5XSBsLjoouesnKygrpvGhUW1urA3pzc3PwvObm5n7nRaPy8nLd6XT2O99ut/drW7S+7QdrY6iXm91A8Tc3N+tZWVm9XsOamhod0Gtra8c4wtEb7DXKysrSi4uLg38XFxfrdrt9LEMLmyO9D5ubm/XS0tKo/X+o64O3sby8POo/S3V98Pbl5+f3ep/quq5XVlZGPB6Zo9KH3W4nOzsbn88HgNfrDf7KiXZerxegV3dy4LTb7TYgosjyer3BIby+ovHXXKxyu93B9y4Q/P8Y+D9qBZWVlb2GJLdv305WVpaBEUXO5s2bWbRokdFhiBEoKysjJycHr9cb/Awdi/epDP30sWrVKlwuF8nJyRQWFpKRkRFVwwRD6fkB3/fLu+cXgVUM1ia73W6pLzkrs9vtNDc39zov8AFplR8QfVVUVODz+Swz3NxTVVWVZROwgM2bN5OSkkJTUxO1tbVRO6zeV+Dz1OPx4HA4cDgcwSHKSL+mkqj0YbfbKSoqorKykpKSErKysli0aFHUTWobiMPhICsri6qqquC4aSz2LAQ+RER0WrFiBaWlpZb4P9lTYBKjz+cjNzfXcu0D1UaHw2HZHwpOpxPoTqLLysrIzc21RNLZs0c+0M7i4mLS09P7/ZgINxn66SMwoa28vJza2lqamppwuVxGhxU2lZWVbN++nbKyMioqKkhJSQGs++t0IJKkRK+ioiIWL14cnBxtJXa7nfz8/OAQUHJysqW+0APDBlYW6GkIWLRoUbCHzCoyMzODpwO905H+wSuJSg+BOQ2BbiyHw0FNTQ12u52KigqDowuf4uJi8vPzycnJCf6n6vnms4rBkq/ArzoRXSoqKsjIyIjK5eVD8fl8FBUV9foyy8rKGpMvgLHi8Xgs+RnTV9/viUCvmBWG1gf7zLTb7RFvnyQqPXi93gG7WwsKCsY+mAjxeDy9/g4MA1mxm9nhcAz6n8jq4+RWE/jCDvSk+Hw+S3z4g/rcKSkp6dXTF0harPL/sqmpiaqqKkpKSigpKaGoqAhQ5QOs8iMwMGTX830ZeB2t8MMo0FvU9/+dz+eLeBIqiUoPWVlZeDyeft10NTU1lumyzM3N7fUrrbS01BKTvQYbzlm+fHmv9lZUVETtsMGRhqyifUhrsPg9Hg8ejwen04nX68Xr9VJWVhYctowmA7XR6XRSWFjY68ts06ZNOJ3OqEyoB2pjVlYWhYWFwX+BH3+FhYVR+dk6UBvtdnu/1zEw3BVtCedg/xeLi4vZtGlT8O+KigqysrKCc1YiRdP1SFdqiS4+n48VK1YwderU4PhbzwJw0a6qqgqPxxOsultQUBDV2b7X66WiooJNmzbh8XgoLCzsVwmypKQk2Mbt27dHXWJ2pDaG8hyY2VDx+3w+0tPTBxzjj6aPriO9Rj6fj7KysuD1A6tFoulzJ9T3YeA6FRUVFBYWkp2dHTUJ2XBfx2ir/B3Kaxio3A5j1z5JVIQQQghhWjL0I4QQQgjTkkRFCCGEEKYliYoQQgghTEsSFSGEEEKYliQqQgghhDAtSVSEEEIIYVqSqAghhBDCtCRREUJERFlZGcnJyUf8V1JSAoDL5bLUdhVCiPCQgm9CiIjpuS+I1+slOzub8vLyXiW3U1JSght/2u32qKlSKoQYG5KoCCHGhNfrJSMjg5qamojvDSKEsA4Z+hFCCCGEaUmiIoQwhezsbIqKioJ/5+bmUlJSQkFBAcnJyWRkZFBVVUVVVRUZGRlomkZubm6/++l5/Z4bxAkhopMkKkIIU/L5fBQVFZGbm0tdXR1Op5Pc3FxKS0upqamhpqaGioqKXslIbm4uXq+Xuro6KisrKSoqwuPxGNgKIcRoSaIihDAtp9NJVlYWdrudgoICfD4fBQUF2O12nE4nTqeT2tpaoHuL+vLycux2Ow6Hg+LiYjZt2mRwK4QQoxFvdABCCDGYzMzM4OmUlJR+5zkcDnw+H0Cw5yQ9PX3Q+xBCRB9JVIQQpmW320M6L8DpdFJTUxO5gIQQY06GfoQQluB0OvF4PMEeFiGENUiiIoSwBIfDQX5+fnBCLUBFRUWw8q0QIjpJoiKEsIzS0lKcTicul4vk5GRKS0ul0q0QUU4q0wohhBDCtKRHRQghhBCmJYmKEEIIIUxLEhUhhBBCmJYkKkIIIYQwLUlUhBBCCGFakqgIIYQQwrQkURFCCCGEaUmiIoQQQgjTkkRFCCGEEKYliYoQQgghTEsSFSGEEEKYliQqQgghhDCt/w/0pLpSQ3LnPAAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGyCAYAAADK7e8AAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABbtUlEQVR4nO3df3xbdb0/8NdJ23UbW5t23VZhY2068DFF2ZIOEAV3t3RwuSgqzYarypAtBe79qoA0FAU3rzgawd9fXTrAAQXXNni9yo+vJHOIIresyYZc5HpZ09XxS9a1acd+tGlzvn98ljRpkzZNk5yTk9fz8cjjJCfJ6fuTkybvfH5KsizLICIiIlIhndIBEBEREcXDRIWIiIhUi4kKERERqRYTFSIiIlItJipERESkWkxUiIiISLWYqBAREZFqMVEhIiIi1cpXOoCZCAaDePvttzF//nxIkqR0OERERJQAWZZx/PhxnH322dDpJq8zyepE5e2338bSpUuVDoOIiIiScOTIESxZsmTSx2R1ojJ//nwAoqBFRUUpPXYgEMBzzz2H9evXo6CgIKXHVgOtlw9gGbVC62XUevkAllELUl2+wcFBLF26NPw9PpmsTlRCzT1FRUVpSVTmzp2LoqIizb7ptFw+gGXUCq2XUevlA1hGLUhX+RLptsHOtERERKRaTFSIiIhItZioEBERkWoxUSEiIiLVyurOtNMhyzJGR0cxMjKS0OMDgQDy8/Nx+vRpjI6Opjm6zNN6+QCWUSu0XEYtdrokSjXNJyqyLMPv9+Po0aPT+pCTZRnl5eU4cuSIJieT03r5AJZRK7RexkSGZxLlMs0nKu+++y78fn94CHN+fn5CH3bBYBDvv/8+5s2bN+WsedlI6+UDWEat0GoZZVnGyZMn8Y9//IPJCtEkNJ2ojI6OYmBgAAsXLkRZWdm0nhsMBjE8PIzZs2dr6sMxROvlA1hGrdByGefMmYNgMIgTJ05gdHSUTUFEMWjrv36cQCAAWZZx1llnKR0KEVFMc+fOhU6nS7j/HFGu0XSiEqLFdm0i0obQ55MsywpHQhRDZyewdi0kj0exEHIiUSEiIqIkPPoosG8fpMcfVywEJioaYbPZIEkS3G630qGkjclkQn19fcb/bk1NDWw224yPo1T8sbjdblRVVUGSpGmXraamBna7PU2RqYeazhdRRvX0AB4P4PUCra0AAF1rK4q7usS+np6MhqPpzrS5pLm5GQaDAe3t7TCbzUqHkxaNjY3Q6/VKh5E0tcTv9/thsViwd+9eGI1G+P1+pUNSJbWcL6KMq6gYux7qOtHbizW33z62P4NNlaxR0QCv14vS0lLYbDa0tbUldYzQL2w1iBdLbW1tViRhao/f7XajtLQURqMRAOJ+GavpPZFOaj9fRBnX0gLkn6nHOJOQSGe2cn6+uD+DmKhogMPhgNlshtlsht/v13TzDxERpVldHdDREfOukRdfFPdnUE4mKrIMnDihnstMa9Da2tpgsVhgMBhgMBjgcDgmPGZ8Pwuv14uSkhIAgMViQU1NDXw+HyRJgiRJUc0BNpsNVVVVKCkpmdBmb7FYYLfbUV9fj5KSElRVVcHtdkf1gbBYLFHPcTqdMJlMkCQJVVVVcDqdUceLF0usviJ2uz38d0wm06RJms1mQ0lJSczHTlbGRF7L0MiN6cafzGubiHjHtdlssFgs4fjilXWq98SxY8cmjSvyvubm5oRinuw5mzdvxoIFC2AymdDc3Bw1km+y8wGk9v2WrvNFpFpn5i6SFZzDKCcTlZMngXnzJr8UFemwZIkeRUW6KR8708vJk8mXxe12w+/3h6uoa2troz6IE9He3o729nYYDAbIsgxZlsPNARaLBV6vFy6XC93d3ejr60NNTU34uX6/P/zl193dDaPRCIvFAofDAY/HA4/HA6fTGfXF09fXh127dkGWZTgcjvDfmCqW8err69Ha2or29nb09/ejqakpbn8Lt9sNp9OJ7u5uyLKMpqYmlJaWJlTGVL2W4yXz2ibSuXOy4zY1NUXFFyupTaQcdrs9blyhRKi7uxsulws2my18fieLOd5z1q9fj1deeQUulwt79+6NG3M8qXq/pet8EanSokVAeTlgMgE7d0JetQqn9Xpg4cKMh8LOtFku1OwTsnHjRtjtdjidTtTW1s7o2F6vF06nE/39/eEP7/b2dpSUlMDtdof/rtFoDF+vr6+H0+lEfX099Ho9jEYjjEYjurq6wse1Wq3h62azGQaDAW63O9xnIhF+vx/Nzc3o6uqCwWAIH2uyx/f19YXLEXpsomVMtWRf26kSqEyVJ15cPp8v6u/r9Xo0NTWhtbU17vmd7Dl6vR579+7F888/D6PRCJ1Oh8bGxgm1dJNJxfstXeeLSLWWLAEOHwZmzQIkCaM33ADXb36DK5csyXgoOZmozJ0LvP/+5I8JBoMYHBxEUVFR2qftnjs3+ec6nc6oX5hGoxF6vR4Oh2PGiUpnZycMBsOEX5jV1dVwuVzhD+Tq6urwfaFaish9BoNhQk1Hc3MzXC4XfD4ffD7ftGNzu93Q6/XhJGUqZrMZpaWlkCQJZrMZ9fX1qK2tTbiMqTaT1zYVx52peHGFaioqKyvjPn68yZ7j9Xqh1+tx4YUXhvcnes4jzfT9lq7zRaRqhYVj1yUJQYWWeMjJREWSgKlm1Q8GgdFR8Ti1Li8SauKx2WxRbemhDrV+v39GwysTHbYa629M9ndNJlN4lJLZbIbJZEouwGnQ6/Xo6uoKf2FZLBY0NTWl/e/GM5PXNhXHnanJ4jIajfBMcxbLeM+ZbjNmLKl4v6XrfBHR1FT6FUyJCNWa9Pf3R11CH/iTDVXu6+ub8vhmsxk+n2/Ch3RnZydWr16dVMw+ny/czj+TX/eh+T+m++vYarWivb0dDocDra2tKSljIq/leOl4bdN53EQZjUZ4vd5pJUyTPSdUG3f48OHwvqle78j7U/V+U/p1JcplTFSyVKjWJFZnvVC/kMgmIYPBEK5i9/l8aGxsjHqOwWAIfxC73W74fL5we/u6devC94VGFyXbrBSqDg91rnU6nRM6WsaKZTyDwQCr1RruhOn3++F0OuPOsup0OmG32+H3++H3++FyuWAwGJIq4/jXcvzfTCT+dLy2qT5uIuWI9ZzI8wKMvfbJPCf0Xt68eXM4llivd7zzkar3W7rOFxFNjYlKlmpra4PBYIj7K7G+vj7qV2p9fT06OzvDwyq3bt2KiojZB0NfCJWVlVFNIqFfoiaTCZWVlSgtLZ12tX4kvV6PhoaG8BDO0PEjq8zjxTJeqCNxTU0NSkpK4HA4sHHjxpiPNRgMcLlcqKysRElJCfx+P3bt2pVUGce/lvX19VH9JhKNP9WvbaqPm2g5xnM4HDAajTCZTOHzMlVtxmTPcblcKCkpwerVq2GxWCac48nORyrfb+k6X0Q0OUnO4iU7BwcHUVxcjIGBARQVFU24//Tp0+ju7kZlZSVmz549rWNnsjOtErRePoBl1IrxZfR6vTCZTJpZbfjkyZN4/fXXcf7552P+/PlKh5MWgUAAzzzzDK666ioUKNQhM920XsZUl2+q7+9I2vxkIyIiIk1gokJERESqxUSFiIgoF3V2AmvXiq2KMVEhoqxiNBo10z+FSFGPPgrs2wc89pjSkUwqJyd8IyIiykk9PUBvr5j5tLVV7NuzB7j+erFCblkZsGyZsjGOw0SFiIgoV0RMS4HQKuNHj4rFB0NUVmPJph8iIqJc0dIC5J+powglJKFtfr64X2VYo0JERJQr6uqAFSuia1BCOjqAaawqnimsUSEiIspFoUkiVT5ZpLqjIyIiotRatAgoLxe1Kjt3im15udivQmz6ISIiyiVLlgCHDwOzZokOtVYrMDwMFBYqHVlMrFHJUna7HZIkoaSkBCUlJZAkCVVVVbDZbBOWop+M2+1GVVUVJEmKu/KwEkwmU8yVoeOpqamZdvxerxdSqNc7EVEuKSwcG/UjSapNUgDWqGQ1vV6P/v7+8O3QEvcmkwkejydqhdhYBgYGsHHjRuzduxdGo3FaCU66NTY2Thk/ERFpH2tUZkJl0w8bDAa0t7ejr68PbW1tUz7++eefR2lpKYxnenkrkRiEanTGq62thdlszng8k4kXKxERpY/iNSrNzc3w+/3Q6/Xo6urKrl/SkdMPV1crHQ0REZHmKFqjYrfbsWHDBjQ0NMBqtaKxsRFbt25VMqSp9fQAHg/g9UZPP+z1iv09PYqF5vP5YLFYUFpaCqvVGt5fX1+PkpISVFVVobm5GQBw5513YvPmzfD5fJAkKao/SKzHA4DFYkFzczOam5tRVVUFt9ud0HPsdnvU/aHnWSwW1NTUhGOQJCnc/DS+z4nT6YTJZAr3xXE6ndN+ffx+P2pqaiBJEkwmU1T8U/2NyWJNRWxERBSboomKy+WKqj3R6/Wq6icRU0WFqD0xmcS0w8DY9MPV1dHTE6eZ3+8Pf2mGviQNBgM8Hk/4MRaLBT6fD93d3XC5XLDZbPB6vbjvvvuwe/duGAwGyLIMh8Mx6eNDf8/hcKCpqQlNTU3hppmpnmOz2WCxWNDd3Q2j0RhOitrb29He3h6OQZbluLVpfX192LVrVzhWi8US/huJslgs6OvrQ1dXF/bu3Yv9+/cn/DcmizUVsRERUWyKJip6vR41NTXh5MTn88FgMCgZ0tRUNP2wXq8Pf2mGkpPIpjOfzwen04n29nbo9XoYDAY0NTWhNVQTNE4ij/f5fPB4PKitrU34OUajEWazGXq9HvX19fD5fNMuq9VqDfelMZvNMBgME2pEJuPz+eB2u8PJhl6vR2NjY0r+xkxjIyKi+BTto7Jr1y6YTCaUlJSgoaEBVVVV4V/2sQwNDWFoaCh8e3BwEAAQCAQQCAQmPD4QCECWZQSDQQSDwWnFFlpGPvT8sM9/HvjgB6FbvXrCc4IvvSSmH57m30pGKL5QbCtXrsS6devQ0NCAnTt3AgA6z3TyraysjHquyWQKPz/yGJM9PhgMQpZlrFu3DkVFRdN6Tug6MNZhN3R7/DayfONf++bmZrjdbnR3d8Pn80XdH+vxkeews7MTer0eFRUVk/7tyf5GvFinel46xX2faojWyxgq38jISMzPMS0IlUur5QO0X8ZUl286x1E0UdHr9bDZbHC5XLDb7TCbzdiwYUPc6v8dO3Zg+/btE/Y/99xzmDt37oT9+fn5KC8vx/vvv4/h4eGkYjx+/PiEfXknTmA+AFmSIMlyeHvixAmMnkme0u306dOQZTmcrAHA3XffjTVr1uCWW25BRUUFTp06hQsvvBDPP//8hOeHyhUMBsPHmOzxg4ODGB0dxTnnnBP1NxN5zpw5c8LPOXHiRPi+0PMjYwgZHR3F0NBQeP+aNWtQUlKCr371q1izZg3WrFmD06dPh+8f//jxZT116tSE12t8LFP9jXixTvW8TIj1PtUarZYx9Nn05z//GSMjIwpHk14ul0vpENJO62VMVflOnjyZ8GMVTVRsNhtqamrQ3t4e7ghqMpnQ1dUV8/GNjY247bbbwrcHBwexdOlSrF+/HkVFRRMef/r0aRw5cgTz5s3D7NmzpxWbLMs4fvw45s+fP3FSsMpKyIsXA0uXIvjlL0N6+GHIR47grMpKIEYc6TB79mxIkhRV7ssuuwzr1q3Dd77zHbS1teHjH/84Nm/ejGAwOCH5C/2K0+l04WNM9ngAyMvLQ2FhYdTfnO5zzjrrLAAI354zZ05UDLGe5/P58Morr2B0dDTq/tmzZ4efFyu2yHP44Q9/GAMDA+jt7Q03L0bGksjfiBVrIs9Lp0nfpxqh9TKeOnUKAHDppZdi3rx5CkeTHoFAAC6XCzU1NSgoKFA6nLTQehlTXb7p/JBTLFHx+Xzw+/3hDpmhTqAmkwlOpzPcByJSYWEhCmPMnldQUBDzhRsdHYUkSdDpdNBNc9GlUBVz6PlRzj1XjO6ZNUt8cN50EzA8DCmDM/uFPrDHx2a322EymXDw4EEYjUZYrVZs3LgRDocDBoMBTqcTPp8PX//618PPCR1j+fLlcR/f0NAQ7rQb+Ten+5zx2+XLl8Pn82FwcBCdnZ0wGAwwGAxRzysrKwMAPPjgg7BarXA6nfB6vdi4cWP4OLFiizyH1dXVMBqN4Qnu+vr6wp16E/0bsWJN5HnpNOn7VCO0XsbQ/3J+fr4mv+Aixfus1hKtlzFV5ZvOMRT7r/f5fDF/gU9n2nRFqXT64VDH1dDQXofDAaPRGO4L5HA4Jp1IbbqPT/Y5kfEajUZUVlaiqakp5mP0ej0aGhrCQ5xdLle4c+507N27F6WlpSgpKUF9fT3q6+vDtSuJ/I1YsaYqNiIiik2SI3tVZlio2SfyQ72+vn7SDrWRBgcHUVxcjIGBgbhNP93d3aisrJx200+oL0JRUZEmf8VpvXwAy6gVWi/jyZMn8frrr+P888/H/PnzlQ4nLQKBAJ555hlcddVVmq1t0HoZU12+qb6/IynaR6W9vR07duzAggULwnOoxPtVTURERLlH8VE/TEyIiIgoHu3VoxIREZFmMFEhIiIi1WKiQkRERKqVE4mKggObiIgmFfp80uJkdkSpoOlEpaCgAJIkhadKJyJSm5MnTyIYDCI/X9GxDaRFnZ3A2rVim8U0/Z+Rl5eH4uJiHD16FENDQygqKkJ+fn5Cv1yCwSCGh4dx+vRpTc7doPXyASyjVmi1jLIs4+TJkzh69CiOHz+OvLw8pUMirXn0UWDfPuCxx4DqaqWjSZqmExUAKC8vx5w5c/Dee+9Na20BWZZx6tQpzJkzR5NVslovH8AyaoXWy1hUVIQ33nhD6TBIK3p6gN5eMWN6a6vYt2cPcP31gCwDZWXAsmXKxjhNmk9UJEmCXq9HcXExRkdHE16dNBAI4IUXXsDll1+u2VkGtVw+gGXUCi2XsaCgILyWEVFKVFSMXQ8l9kePAibT2P4s67ep+UQlRJIk5OfnJ9wOnJeXh5GREcyePVtzH46A9ssHsIxaofUyMlGhlGppATZvBkZGxhKS0DY/H9i9W6nIkpYziQoREZHm1dUBK1ZE16CEdHQARmPmY5oh7fRMIyIiojGhzudZ3gk9u6MnIiKiaIsWAeXlolZl506xLS8X+7MQm36IiIi0ZMkS4PBhYNYs0aHWagWGh4HCQqUjSwoTFSIiIq2JTEokKWuTFIBNP0RERKRiTFSIiIhItZioEBERkWoxUSEiIiLVYqJCREREqsVEhYiIiFSLiQoRERGpFhMVIiIiUi0mKkRERKRaTFSIiIhItZioEBERkWoxUSEiIiLVYqJCREREqsVEhYiIiFSLiQoRERGpFhMVIqJM6+wE1q4VWyKaFBMVIqJMe/RRYN8+4LHHlI6ESPXylQ6AiCgn9PQAvb2AJAGtrWLfnj1AXR2KDx0S9y9frmyMRCrERIWIKBMqKsauS5LYHj2KgosvxhoA+PrXAVnOfFxEKsemHyKiTGhpAfLP/DYMJSRntsG8PIzs3q1MXEQqx0SFiCjVYnWWrasDOjpiPvwFux3ypk0ZCo4ouzBRISJKtak6y+p00Vsiiov/JUREqdDTA3g8gNcb3VnW6xX7e3qARYuA8nLAZAJ27gRMJsiLF2OouHji8TiEmQgAO9MSEaVGnM6yMJnG9ssycPgwMGuWeIzVipETJ3B6796Jx4uslamuTmfkRKrGGhUiolSYpLMs8vPF/QBQWDiWyEiSuB2SSK0MUY5RtEbFYrFg48aNMBgM0Ov1UfcZDAZlgiIiSkZdHbBiRXQNSkhHB2A0Tn2MRGtliDo7gYYGwG7XfI2boomK1+uF0+mcsL+2thbt7e0KRERElAI6HRAMjm0T1dICbN4MjIzErpXhEGYKyaGmQUWbfurr6yHLctTF4XAwSSGi7BSjsyzKy8X+REwyhBkdHeJ+yl052jSoaI1KbW1t1G23243qSTLDoaEhDA0NhW8PDg4CAAKBAAKBQEpjCx0v1cdVC62XD2AZtSKryrh4MfDGG2OdZW+4ARgeFv1Q4sQ/oXwjIygAIOt0kILB8DYwMhL3GGqXVecwSZkoY0FE06AsSZAAyEePQopoGgwMD6flb6e6fNM5jiTL6mjw9Pl8cLvdsFqtcR+zbds2bN++fcL+J554AnPnzk1neEREGTG7txef/PrXcaqsDD01NVjmcmFOby/+cP/9OF1WpnR4pKAlf/gDVv34x9CNjk64L5iXhwNf+Qre/OQnFYhs+k6ePIlNmzZhYGAARUVFkz5WNYlKfX09HA7HpI+JVaOydOlS9Pb2TlnQ6QoEAnC5XKipqUFBQUFKj60GWi8fwDJqhdbLGLN8Q0NjtTKyPFYrk6W0fg6BDJbxwAEUXHzxxL/f0QGsWpW2P5vq8g0ODqKsrCyhREUV86h4vd6EHldYWIjCGP+sBQUFaXtjpPPYaqD18gEso1ZovYxR5RtfzlmzMh9QGmj9HAIZKGNoCPy4DtsF+fkT3zdpkKryTecYqphHxeFwoKqqSukwiIiI1G2mHbazkCpqVNxuN0yx5h4gIiKiMUuWTJjdONubBqeiikTF5/NxgjciIqJERCYl42c31iBVNP0YDAaUlpYqHQYR0dS4WCBRRqmiRqWrq0vpEIiIEpNDM4ISqYEqalSIiFRNjTOCsmaHcoQqalSIiFRNjYsFsmaHcgRrVIiIptLSMjZ/RazFAltaMhOHGmt2iNKMNSpERFOpqwNWrIiuQQnp6ACMxszEocaaHaI0Y40KEdF06HTR20xSS80OUQYxUSEiSoQaZgStqxM1OLF0dIj7iTSGTT9ERIlQ24yg49Z6IdIq1qgQESWqsHCsb4hSM4KqoWaHKINYo0JElE3UVrNDlGZMVIiIsk2OrfVCuY1NP0RERKRaTFSIiIhItZioEBERkWoxUSEiIiLVYqJCREREqsVEhYiIiFSLiQoRERGpFhMVIiIiUi0mKkRERKRaTFSIiIhItZioEBFpVWcnsHat2BJlKSYqRERa9eijwL59wGOPKR0JZSFZBl57DXjjDWXjYKJCRKQlPT2AxwN4vUBrq9i3Z4+47fGI+4niOHFCvG1uuEEs1H3BBcADDygbE1dPJiKK1NkJNDQAdjtQXa10NNNXUTF2XZLE9uhRwGQa2y/LGQ2J1E2WAbcbeOQR4Ne/FslKyOzZwMiIYqEBYI0KEVG0bG8uaWkB8s/8Bg0lJKFtfr64nwgiIdm5E/jwh4H164HHHxf7DAbg618HXC6gvx948EFl42SNChFRTw/Q2ytqICKbS66/XnzJl5UBy5YpG2Oi6uqAFSuia1BCOjoAozHzMZGqnDwJ/PznQFOTqGwDgPnzgS9+UVwuvnisMk4NmKgQEWm1uUSnA4LBsS3ltNFRYNcuYPt24N13xb7KSuCrXxV9UoqKlI0vHjb9EBFprblk0SKgvFwkWjt3im15udhP2SHFQ8v/8AdRmXbzzSJJWbYMeOgh4H//VyQqak1SANaoEBFpr7lkyRLg8GFg1ixRQ2S1AsPDQGGh0pFRoiL7Ss2gU3dvL3DrrWO5tl4valRuukm8PbIBExUiokhaaS6JTEokiUlKNkhhXylZFk/9ylfEIXU6oL4e+Pa3xWGyCRMVIiJgrLlk6VLgxhtFvfiRI2wuocxJUV+pY8dEUvLkk+L2BReIt/NFF6Uu1EyaUR+VgwcP4s4778QVV1wR3nf//ffj4MGDM42LiCizQs0lHR3iU76jQ9xeskTpyChXpKCv1HPPAR/5iEhS8vNFM4/Hk71JCjCDRGXXrl1Yt24dqqqq0BnR2aeyshI2my0lwRERZVRh4dgvWTaXUKbV1YkEOZaODnF/HKOjwDe/CVxxBfDOO8AHPyiecs892dMXJZ6kExW73Q6Px4OtW7dG7b/22mujEhciIiKaJp0uejuJ3l7gn/8ZuPdecfumm8SKCdnWBzyepBOVY8eOYcGCBRP2d3d3Q87G+QaIiIiUNs2h5fv3i4e4XMDcuaJ16Oc/F9e1IunOtBaLBRaLBW1tbeF9g4ODqK+vh9VqTUlwREREOSXBoeWyDDQ3i1E9w8PA8uXAr34l+qdoTdI1Kg6HA/Pnz4der0d/fz9Wr16NkpISVFVV4b777ktljERERLljir5Sw8PAli2iiWd4GLjmGjEvnBaTFGCGw5Pb29vh8/lw4MABAIDRaERlZWVKAiMiIqJofX3A5z4nZprV6US/lIaGhLqyZK0Zz6NiMBhgMBhmdAybzYaqqioAQGlpKWpra2caFhERkaYcOgRcdRXwxhtiEcG2NuDKK5WOKv2STlR0Oh2kOMsrGgwGvPHGG1Mew+/3Y926ddi7dy/0ej28Xi9MJhM74xIREUX44x+Bz3xG1Kicey7w1FPabeoZL+lExePxTNh37Ngx3HnnnbjpppsSOobNZsPGjRuh1+sBiKYjl8uVbEhERESa09IiJkseHgZWrwZ+8xsxEChXJJ2orFq1Kub+trY23HzzzdiyZcuUx2hubkZXVxd8Ph98Ph/MZjPMZnOyIREREWmGLIs+KHffLW5fe61Yq1BLQ48TkfK1fgwGQ0ITvvl8PgCA1+sN93Opr6+HxWKJm6wMDQ1haGgofHtwcBAAEAgEEAgEUhD9mNDxUn1ctdB6+QCWUSu0Xkatlw9gGZMxOgp87Ws6OBx5AIDbbx/FvfcGodMBSryMqS7fdI4jyUl2CGlsbIy53+v1orOzE8eOHZv0+W63GzU1NXC5XOHExO/3o7KyEv39/TGfs23bNmzfvn3C/ieeeAJzcy3FJCIiTRoe1uEHPzDhpZfOhiTJ2Lr1VVx1VbfSYaXUyZMnsWnTJgwMDKCoqGjSxyadqKxfvz7m/lDNSLymoZBQotLf3x/uowIAkiRFJS+RYtWoLF26FL29vVMWdLoCgQBcLhdqampQUFCQ0mOrgdbLB7CMWqH1MipZPsnjga6xEcEdOyBHrtCbYlo/h0DqyjgwAFx7bR5eeEGHWbNk7N49itpa5QeYpPocDg4OoqysLKFEJemmn+eeey7ZpwJA3CHNer0+3Cw0XmFhIQpjLBJWUFCQtjd/Oo+tBlovH8AyaoXWy6hI+Z54Anj+eeh++UvgkkvS/ue0fg6BmZXx7bfFmj1/+QtQVAT8+tcS/umfUt5DY0ZSdQ6ncwzFpogJ9UsZn5T4/X5UV1crFBURkcb19AAej1i1rrVV7NuzR9z2eMT9lHH/+7/ApZeKJKW8XEzo9k//pHRU6pBwqhavT0o8O3bsmPIxTU1NaG1thfHMEo9OpxNmszl8m4iIUqyiYux6aC6so0fFynYhnMsqo15+GfiXfxGrIC9fDjz3HMBJ3scknKjEmjclnngTwY1XW1uLvr4+2O12AGIeFs6jQkSURi0twObNwMjIWEIS2ubnA7t3KxVZTvrd78Sw4xMngOpq4Omn4y6UnLMSTlRm2iclHq60TESUQXV1wIoV0TUoIR0dAGu0M+axx4Avf1nkjOvXA08+Ccybp3RU6qPhZYyIiGhSoZXstLyinQrJMtDUBHzpSyJJ2bQJ+O1vmaTEM6PuxAcPHoTb7Z6wX6/XJzQzLRERKWDRItFjc+lSMTf7Qw8BR46wzSEDgkHg1luBH/9Y3P7610XSwlwxvqQTlSeffBIWiwUGgwHd3d3heVO8Xi9qamqYqBARqdWSJcDhw8CsWaJDrdUqFpKJMf0Dpc7p06IWpb1d3P7+90XSQpNLOlG588474Xa7sXbtWlRXV4enzXc6ndi7d2/KAiQiojSITEokiUlKmg0MiNWPn38eKCgQa/Zcd53SUWWHpCuburq6sHbtWgBiTpR9+/YBECN52traUhMd0XidncDatWJLNF18/5AC3noLuOwykaTMnw88+yyTlOlIOlExGo04ePAgAMBsNuO+++4DAOzatQt+vz8VsRFN9OijwL59ors80XTx/UMZ9vrrYiK3V18V3YJeeAFYt07pqLJL0k0/jY2N2L9/P1auXAmr1QqHw4G8vDzIsgybzZbKGCnX9fSImZAkKXomzeuvF93ny8qAZcuUjZHUi+8fUsiLLwKf/jTQ1wecfz7w//4fJ3JLRtKJyrXXXht12+PxoLu7G6WlpSguLp5xYERhnEmTZoLvH1LAI4+M9VG++GLgqadETkzTl3TTj06nw3XXXYf/+I//CO+rrKxkkkLJi9d/oKVFzJgJxJ5Js6UlczFS9uH7hzJodBSw2cTkv8PDwOc+B+zdyyRlJpJOVDo7O6HX63HjjTciLy8PGzduxO9///tUxka5Jl7/gbo6MWNmLB0d4v4Qdpak8abz/iGagVOn8mGx5OHMqjD4xjfEUOSzzlI2rmw3o860O3fuRF9fH/bv34+KigpYrVbk5eXhlltuSWWMpGXTXcl1qpk02VmSJsOZWClNenqAxsZP4KmndCgsBB5/HPjOd/hWS4UZzUwbYjQaYTQasX79ejQ1NcHhcOBnP/tZKg5NWpdo/4HJZtKM11myrg7Fhw6J+5cvz1iRSIU4Eyul0b59wHXX5eO994qxeLGMX/9awiWXKB2Vdsw4UfnVr36F1tZWOJ1O6PV6WK1WNDU1pSI2ygWJruQ62Uyakat1RyQ7BRdfjDWAmKOanSVzG2dipTQYHQXuvRfYvh0IBiVUVAzA7Z6LqqoCpUPTlKQrpTZs2IC8vDxs2bIFJSUl6OzsxLFjx7Bjx47wdPpEU5pO/4HIpCRyJs1JOksG8/IwwmXrCYj//iFKwj/+AVx5JfCtb4n1ezZvDqKp6Y8499wEnsy+dNOSdKJSWlqK5557Dn19fdi5cyeTE5q5ZPsPTJLsvGC3Q960aYaBERGNef55YOVKwO0G5s4VQ5Gbm0dRWDia2AHYl25akm762blzZyrjoFyWyv4DOp34eRPaEhGlyMgI8N3vhpp6gA99SIzq+dCHgEBgiidz4sGkpaQzLdGMpKL/QIxkR/773zHEeX2IKAVef13kFPv3i9ubNwM//ek0hh5z4sGkceAUqcNM+w+Ekp2ODqC+HujowMihQzjNWZaIaAYCAeB73wNWrRJJSnGxaLH5xS+mOT8KJx5MGhMV0g52liRKvRzu+Pnyy8Dq1UBDAzA0BFxxBfDf/w184QtJHIwTDyaNiQoREcWXgx0/33sPuOkm4JJLgFdeAUpLRde5Z58VlbczxokHp4V9VIiIKFqOdvwcGhL9Tr79bWBwUOz74heBBx4AFi5MwR/gxINJYaJCmdPZKepQ7XaguppxEKlVjnX8DATEEONvf1vkDQBgNAI//CFw2WUp/EOceDApCdc76XQ65OXlJXQ577zz0hkzZSu1VCGrJQ4itcqRjp9DQ8CuXcCKFcDWrSJJOeccUdGxf3+Kk5QQ9qWbtoRrVDweT9Rtt9uN/fv3o7GxMbxPlmVYrVbcdNNNqYuQsptaqpDVEgdRNqirE9/ekTUoIR0dorohix07JhKUH/0IePddsW/hQqCxEbj5ZmD2bGXjo2gJJyrjZ561Wq3Yu3cvioqKova3tbXh5ptvxpYtW1ITIWU3tVQhqyUOomyjkUkUZVmM4vn5z8VvlKEhsf+cc4Dbbxc1KvPmKRsjxZZ0l2OPx4P+/v4J+xcsWAC32z2joEhD1FKFrJY4iLJFqOOnyQTs3Cm25eVZ1/GzuxvYsUNUEF1yieiLMjQkKoV+8QvA5wNuvZVJipol3Zl23bp1qK2thdPpxLIzVeaHDx+GxWLBunXrUhYgZTm1VCGrJQ6ibJGlHT+DQcDjAf7zP4Hf/AZ49dWx++bMAWprgVtuAS6+OHrhdVKvpBMVp9OJ2tpaVFZWoqSkBADg9/uxatUqOJ3OlAVIGqKWKmS1xEGkdpFJiYo7fr71FvCnPwG//z3w1FPA22+P3ZeXB1x+uRhmXFsLzJ+vXJyUnKQTleLiYrhcLnR3d8Pr9QIADAYDV1GmidQyd4Ba4iCipAUCwP/8D/DSS8Af/ygSlMOHox8zbx5w5ZXANdcAV10lJmyj7DWjeVQOHjyIPXv24MCBA/jd734HALj//vthNpuxcuXKVMRHWqCWKmS1xEFEUxodBd58UyQlf/mLuLz6KvDXv05cqVinA1auBD7xCeCf/xn4p3/iv7WWJJ2o7Nq1C3feeSfuu+8+7Nq1K7y/srISNpstnLgQAVBPFbJa4iDKcSMjwDvviGabt94SlZtdXWOXw4fF74hYiopEd7PLLhPJySWXsElHy5JOVOx2OzweDyoqKmCz2cL7r732Wlit1pQER0RE2UGWgZMnxaj/o0fFejmh7bvv6nDw4Cr8/Od56O0VfUj+8Y+pu4gVFABVVcBHPxp9OfdcdoTNJUknKseOHcOCBQsAAFLEO6a7uxsy56MgIspqgYCYGO3oUTFXYugy2e3Tp+MdLQ/AuRP25ucDZ58tWmXPOQcwGERiErosWSI6w1JuSzpRsVgssFgsaGtrC+8bHBxEfX09a1SIiFRKloH+ftHUEmp2iXU5diy548+aJfqnL1okZntdtAgoKxvFsWP/g8su+yDKy/PxgQ+IJGThQi4gTFNLOlFxOBywWCzQ6/UAgNWrV8Pr9cJqteK+++5LVXxERJSEoSHR8fT114E33gD+93/F9o03AL8/sWPodMCCBWKFifGXhQtj7zvrrInNMoFAEM88cwhXXXU+CgpSXlTSuBmN+mlvb48anmw0GlFZWZmSwIiIKDEnTohFwQ8eBA4cENvXXhMdVuMpKxPNLaFml/GXD3wAKClh0wspL+lEZXBwEEVFRaisrIxKTg6fGdBeEbm2ChERpcyJE2IekeefF5eXX544ZBcQicYFFwDnnw8sXw6cd564LF8OzJ2b6aiJkpN0olJSUoLR0dEJ+7u6umC32zk8mYgohbq7gSefFFPDd3RMTEzOOQeorhbziaxaJS5Ll3J0DGW/pBOVeCN7qqur0dnZmXRARERJ6+wEGhoAu118a2e5v/1NJCdOp2jSibR0qZjYbM0acamoYFJC2jTtRGX58uWQJAmSJOG8886bcL/P54MxwQXe3G43HA4HampqYDAY4HK5sHr1atTW1k43LKLU0diXXU559FFg3z7gscey9tz19QGPPy5WeHjllbH9Op1ISK69Vsy+ysSEcsW0ExWHwwFZlrF+/fqYo3ums96P3++H2+2G0+mEwWCAzWZjkkLK08CXXU7p6RGTeEgS0Noq9u3ZA1x/vRiLW1YGnFnhXc0OHAB++lOgrU2M2AHEhGfr1onk5JprxKgaVWJyT2k07URl3bp1AIDa2lpce+21Mw6gu7s7PMSZSDEa+bLLSZEd90NVDEePijnWQ1Q6CaUsA089JeGb3/w4/vrXsXG7F14IbN0KfP7zWbKgHpN7SqOkp9qpr6/Hr371qwn7GxsbcfDgwZnERJR5FRXiA9ZkEl9ywNiXXXV19JchqUtLi5jiFBhLSELb/Hxxv8oEg6LvyapVwOc+l4+//rUM+fky6upER9kDB4B//VeVJyk9PYDHA3i90cm91yv29/QoGx9pRtKdae+88040NTVN2F9dXT2tRQnb2tpQWlqKvr4+dHV1xTxmyNDQEIZCdaIQQ6QBIBAIIBBrbN4MhI6X6uOqRarLJ3k80DU2IrhjB+TIX7IKmk4Zpd27kbdlC6SRkQlfdnJ+PkYffBCyCt8LWn+fAgmUccMG4LzzUHDxxROf++KLIhtQyesjy4DbLeGuu/Lwyiui9mfePBk1NYfwve8twbnnio/kyeY/UYuCiORdliRIAOSjRyFF/P8HzqwqyPdp9kt1+aZzHElOcmEenU4Hv9+PoqKiqP0DAwMoLS2NOXR5PJ/PB0D0awGA5uZmuFwutLe3x3z8tm3bsH379gn7n3jiCczlpACK+siuXTA8/TS6rr4a/71li9LhJKW4qwtrbr99wv7nH3gAA1VVCkREiQqdO1mSIMlyeKumc3f4cBEefvgC/OUvoqPJ3LkBXH21D1df3YWiouz7clvyhz9g1Y9/DF2Mz/pgXh4OfOUrePOTn1QgMsoGJ0+exKZNmzAwMDAhjxgv6URl+fLlePLJJ3HhhRdG7e/u7kZNTQ0OHTo07WP6/X6UlJSgv78/Zr+VWDUqS5cuRW9v75QFna5AIACXy4WamhoUaHDO55SUr6dHLAgiSci/+mpIR49CXrgQI089JX46LligaL+OaZfxwAEUXHwxZJ0OUjAY3gY6OsSvchXS+vsUSLCMb76J/I99DPKSJZC//GVIDz8M6c03MfLSS2LqVQX19QHbt+vgcOgQDEqYNUvGLbcE0dAQRFlZlp/DM/8z443/n8nqMiYosoyz/vIX1dUwz1Sqz+Hg4CDKysoSSlSSbvqxWq3YsmULnE4nlp35Mjp8+DA2bNgAi8WS0DGcTmfUKJ9QchJviHNhYSEKCwsn7C8oKEjbmz+dx1aDGZUvcnj6mU6MUm9v9AeXCjoxJlzGs88GysshLV0K3HgjpIceAo4cQcHZZ0PtC5Ro/X0KTFHGykqgpwfSrFnivXjzzcDwMApifF5kiiwDv/wl8JWvjC3wt2EDYLdLWLYsD2JF4TFZeQ5DfYN0OtHx5sy2ID8/5v9MVpZxmgoKCpD/xBPA889D98tfApdconRIKZWqczidYyTdmbahoSG8ts+CBQuwYMECVFVVobq6Gjt27Jjy+X6/HxaLJdz8E9oHjDUFkcplYSfGSS1ZAhw+LHoz1teL7eHDiv8ipwQVFo6N+pEkcVshb70FfPrTQF2dSFIuuAD4/e9Fn1NNDR5btAgoLxedznfuFNvycrE/1/T0oPjQIdETmp2LU2pGixI6HA40NTVh7969AKa3KKFer0dDQ0NUUtLc3Iza2loOV84WdXXAihXRw0BDOjqABCf+U5XILzeFv+wo+8gy8PDDwG23AYODwKxZwD33iClGNFmREEruQzVZViswPJyT/zcF552HNaEbWTZMXu1mlKgAIuFIdj6VxsZG2O328O1jx47F7UhLKjeu6pco1xw+LOY+cbvF7YsuEknLhz+saFjpx+QeADCyezd0N94oOhfHqmHevVux2LJdwonKzTffDIvFgrVr1wIQScZkEmn+CdWqUBYLVf2e6deBM/06crLql3LWk08CX/6yqEWZPRv4938Hbr0VyMub+rmkDfKmTXjh2LGYIweztoZZJRJOVPbv34+amprwbY/HE/exEhegyB2s+qUcNjwM3HEH8OMfi9uXXAI88ghw/vnKxkUKYw1zSiWcqIxfEfm5555LeTCUpVj1Szno8GEximf/fnH7jjuAe+/VaF8USshQcTHkxYshnXsua5hTaMZ9VIiIcs1//ieweTPg9wMlJaIW5VOfUjoqUtrpsjKMHDqEgrPOYg1zCiU8PFmn0yEvLy+hy3mR82sQEWnE8LAY0fOZz4gk5ZJLgIMHmaRQBBUNk9eKhGtUxvdJcbvd2L9/f1SnWlmWYbVacdNNN6UuQiIiFejpATZuFP0iAZGw7NghumcRUfoknKisGjeFuNVqxd69eydMfdvW1oabb74ZW7J0vRciovGeegr40peA/n5ArxcjTa+5RumoiHJD0jPTejwe9Pf3T9i/YMECuEMTCRARZbFAQHSS/dSnRJJy0UVi4lEmKUSZk3Sism7dOtTW1qInYlrgw4cPw2w2Y926dSkJjohIKUeOAGvWAPffL25/9avAH/8IVFQoGRVR7kk6UXE6ndDr9RPW+pFlGU6nM5UxEqlPZyewdq3YkuY8+6xY/PfPfwaKi4Ff/Qr44Q/ZH4VICUkPTy4uLobL5UJ3dze8Xi8AsZjg+L4sRJr06KPAvn3AY48B1dVKR0MpMjIC3H03cN994rbJBLS1AVwndWYkjweX3n03pMWLNbeaMKXfjOZROXjwIPbs2YMDBw7gd7/7HQDg/vvvh9lsxsqVK1MRH5F69PQAvb1iyGHk6qjXXy/W9Cgr09jSuLnlrbeAz39eNO8AwL/9m2j24ejSmZNaWrDw1Vcx+vjjTFRo2pJu+tm1axfWrVuHqqqqqFlrKysrYbPZUhIckapUVIjaE5NJrIoKjK2OWl3NzgtZ7He/A1auFElKURHQ3g785CdMUmakpwfweACvF7q2NgCArrUV8HrF/oj+jUSTSTpRsdvt8Hg82Lp1K+SIpauvvfbaCdPtE2lCS4tYBRWIvTpqS4sycVHShoaA228HrrxSVJatWiW+Q2trlY5MAyIT+95esa+3l4k9TVvSicqxY8ewYMECANGLEHZ3d0clLkSaUVc3NtvXeB0d4n7KGq+/Llohvv99cftf/1V0nl2+XNm4NCMisZfOfCdITOwpCUknKhaLBRaLBYODg+F9g4ODqK+vh9VqTUlwRKql00VvKWvIMuBwiB/2Bw+KrkW//S3w058Cs2crHZ2GMLGnFEn6U9bhcGD+/PnQ6/Xo7+/H6tWrUVJSgqqqKtwX6jJPpDWLFgHl5eJbbudOsS0v5+qoWeLoUeCznwVuugk4dQpYvx74y1+Aq69WOjJtk88k9DITe0rCjEb9tLe3Rw1PNhqNqKysTElgRKq0ZAlw+LCYUIOro2YNWQYefxz42teAY8eAggIxBPlrX2OlWFqdSezlc87BKxddhI++/DKkt95iYk/TknSi8vvf/x5r165FZWUlkxPKLZFJCVdHVb3ubuDmm8XIHgD4yEeARx4RHWcpzc4k9qOShJ5nn8WHf/hD6GSZ/zM0LUn/lrBarfiP//iPVMZCRJQyo6PAD34AXHCBSFIKC4F77xWjepikZFBhoUjoAW0k9pyVOuOSTlQaGhrQ0NCA48ePpzIeIqIZe+klMaLnttuAkyeBT35S9EW56y7R7EOUtMhZqSkjkm76kSQJxcXFqKiogNlshmHcHNM7duyYcXCkIp2dQEMDYLdzynhShOTxiExjkvfg3/8O3Hkn8MtfitvFxcD3vgfceCP7otAMJDIr9dlnKxujhiWdqLhcLpSWlqK0tBT9/f3weDzh+yLnVSGN4No2pDCppSXue/Af/wB27BADsYaGxPfJDTeIpp7ycoUCJu2InJwu9P0WmpU6ZHg4oyHlkqQTlbYzUyKThnFtG1JaTw/w7rso7uoKT8Me+R58L1iG+9uX4f/+X9HEAwBr1ohJ3NgPhVKmpQXYvFmsWhlrVurdu5WKLCdMO1E5ePAgOjs7sXr1alx44YXpiInUIpFfEZyFmNKpogIFANYAkGO8BxcB+B7Ee/Cii4DvfAcwm8ferkQpUVcHrFgR/dkX0tEBGI1AIJD5uHLEtFptN2zYAKPRiIaGBqxatQrXXXdduuIiNeDaNqS0lhbI46ZhD70HA8hHHVrw8Y+LmWX/67+AmhomKZRmnJU64xJ+pb/3ve/B5/Ohq6sLfX19OHToEDo7O/HAAw+kMz5SEqfAJoXJm+rw2kMvxryv4fIO3PzHOvzpT2JmWSYoWU7tw345K7ViEm76aW5uRnt7e3hyN4PBgJ07d6KxsRG333572gIkldDpgGBwbKtS/f3Aa68Bhw4Bf/ubDh0dq9DcnIf+fuDECdHEHAiIIapnnSUuc+eK7bx5Y59F5eXABz4gtmefDcyfr3TJcsfoqJjr5Fe/Ap58Eph/KB9eAKPQIQ9BBKGDDkH84AcAjEpHSymj9g77nJVaMQknKj6fDytXrozaZzabccUVV6Q6JlKT0Df30qVijOdDDwFHjqjiV4QsA3/9K+B2A3/4A+D1ir6XY/IAnJuSv1VaKrrsVFaKbehSWSn6E8+bl5I/k5NkGXjjDWDvXnEu9+0TCWeIYdZCHJMXQrdsGYpu3YK83ep5D9IMZVuHfc5KrYiEE5Xi4uJ0xkFqpbJfEbIsJvP65S/Fr+133pn4mGXLgPPPByorR3Hq1N9w6aXnY9GifMybJ2pS8vNFrcqJE2KkSGh7/LgY5vruu9GXgQGgr09czixrNUFZ2VgSs2wZcO650Vu9nk0TIX6/WLXY6xU1J3/8o8g7IhUVAVdcAdTWAjU15fjT3p/hyk9/GnmzZgE385esZrDDPiUg4USFc6PkMBX8ihgcFJU5P/mJWLslZM4c4PLLRdP2RRcBK1eKpAAAAoEgnnnmDVx11Xkzmo30+HGRq0VeurvHrvf3ix+Fvb3A/v3iOSZ0wo4GbIEdHlRj3ryxxCUyiTnnHGDxYlFppaVkRpbF982hQ2OX118HDhwAuromPn7WLODSS8WInXXrRM1/qB93IAAECwq0NQ07CRz2SwlIOFHp7+/HggULJuyXZTnm/mPHjs0sMiKIWowdOwCHQyQMgGhm+exngeuuE19q6f7Omj9fLGT3kY/Evn9gIDqB+fvfgXW/eRRru/bBOucx1J+qxvvvi74zr70W/+/MmjXW0rZ4sbgsWgSUlIgkJtblrLNSW9bJBIPA+++LGpHQ5ehRUav1zjui9umdd4C33wZ8vrHzFUtFhRjRaTQCq1cDn/iE6CtEOSaRYb+U8xJOVJqamtIZB1GU06eBn/5UzCzq94t9K1YAt94qPtvU9KVWXAxceCFwob4HWHKmvf1x0d5unb8HX3Jfj3ffkXHkVBn+dnoZ/v530TR/Zi6zcPPS8DDw5pvikrgC5OV9CnPnSpg9W9QwzZmD8PXZs8VFkiZedLqx6yMj4jUfGhKX8ddPnRK1WtPpRy1JotZo+fKxy6pV4lJaOq2XmHJBlnTYp8xLOFG544470hkHUdiLL4rpz994Q9z+yEdErco//7PKpy6I094+++MmVACoAHBZnPb206eB994TfWRC/WT+8Q+xb2AguhYjdBkYEM8dHdXh+PHJazBSadassVqeBQvE6KjQCKnQ9cpKcWELDU1JxR32SR2SnkKfKNVOnQLuvltMfy7L4gvvu98FvvhFIC9P6egSMIP29tmzx/qvJGp0FDh2LIDf/vb3uPTStRgZKQjXfkRuT58WYcS6BINiW1AgkorQZfbs6Ntz5ow1N82eneTrQxSLyjrsk/owUSFV6OkBPvMZMRoEEN/3P/jBWMfYrJDh9va8PFGzUVZ2GsuXY0YdhokUpYIO+2FcKV511FyRTjni+efF58HBg8DChcBvfgP84hdZlqSMx2m2ibJT5MRzpAr8FCVFPfywGJLa2ysqHDo7gU99SumoZoDTbBOlRzqn2O/pEZP6eL3RE8+FJvuJnkmSMoxNP6SYH/0I+NrXxPW6OmDXLtEXIquxvZ0oPdI5xT4nnlM11qiQIr773bEk5etfF589WZ+khBQWcnIyolTIVE0HV4pXNVXVqNTU1MDlcikdBqVZUxPwjW+I69u2Affco50ZWYkohTJV08GJ51RNNTUqTqcTbrdb6TAozXbvBu68U1y/7z7gW99ikkJEcShR08GO8KqjijPh9/vR19endBiUZk8/DWzZIq43NAA2m7LxEJHK1dWJGo1YOjrE/anCjvCqpYpEpa2tDRs2bFA6DEqjAwcAi0VMUvalL4naFCKihCVS0zHVyKDJ7g91hO/oAOrrxfbwYbGfFKV4HxW32w2z2ZzQY4eGhjA0NBS+PTg4CAAIBAIIBAIpjSt0vFQfVy0yWb5jx4DPfjYfp05JWL8+iJ//fBQjI2n/s5o/hwDLqAVaLx8wwzKWlCB/8WLIS5ZA/vKXIT38MKQ338RISYlYWjuCbvdu5O3bh9FHHkHwwgsnHGqq+6HTIerDSaeb8Dfi0fp5THX5pnMcSZaVHXPldDpRW1sLv9+PkpISTBbOtm3bsH379gn7n3jiCcxV0yp1FDY6Cvz7v38MBw8uQnn5+7j//hcwb542/5GJKD10gQCC+fmiQ5ssQzcyguCZqZjnvPceZg0OApKEj3372ygcGMBQcTFeuuce8dhAQDw2zv3DRUU4xeadjDt58iQ2bdqEgYEBFBUVTfpYRROV5uZmWK1WAEgoUYlVo7J06VL09vZOWdDpCgQCcLlcqKmpQYEG5ybPVPnuvluHpqY8zJ0r44UXRvDRj6btT02g9XMIsIxaoPXyAektY8GsWeHrsiRBkuXwdrx49weGh2cch9bPY6rLNzg4iLKysoQSFcWafrxeL6qnOWlPYWEhCmPMSVFQUJC2N0Y6j60G6SyfyyWGIgPAgw9KMJmUeR21fg4BllELtF4+IE1ljFgMNJR8SJEjg7ZuFbNJxrt/9+6UxqT185iq8k3nGIolKn19ffB6veEhyV1dXQAAu90Og8GA2tpapUKjFOjrE58dAHDzzcDnP69oOESkVYnMgbJlC+dIyWKKJSpmszmqE63X60VzczMaGhqUColSRJaBm24C3n4b+OAHgfvvVzoileIqrdH4etBM6XRAMDi2ne79pEqqGJ7sdDqxY8cOAIDNZuPEb1mupQVobx+bj4n9nOPgKq3R+HpQsqaaA4VzpGQ1xYcnA0BtbS2bepSUwl+y77wD/J//I65v28YfxhP09IiloiUpeu2S668XVVFlZcCyZcrGmEmJvB5nn61sjKR+Uy0GysVCs5oqEhVSWApXJf3qV4GBAXEYzjwbA1dpjZbI65GCERmUAyKTjliLgU51P6mWKpp+SAE9PSg+dEhMGZuiVUmfeko0+eTliU72+UyDJ+IqrdH4ehDRFPhVkqMKzjsPa0I3UvDL/v33gVtuEddvuw1YuTIFQWoRV2mNlsjrodGZPokoMaxRyVEju3cjmJcnbqTgl+w99wBHjgCVlWJFZEoAV2mNxteDiGLgJ0KOkjdtwgt2e+w7p7kq6V//Cvz4x+L6z34GnHVWCgLUMo5AiMbXg4gmwaYfmtHcArIM3HqrWNPnmmuAK69MU4xawhEI0fh6ENEkWKOSw4aKiyEvXjyjX7JPPw0895z4juHEbtNQWDjWN4gjEPh6EFFcrFHJYafLyjBy6BAKzjorqV+yQ0OiNgUQ2+XL0xgsERHlJNao5LoZ/JL9yU+AQ4dEJcw3vpGm+IiIKKcxUaGk9PcD994rrt97LzB/vrLxEBGRNjFRoaTcdx/g9wMXXCBmOyciIkoHJio0bUeOAD/6kbh+331iJloiIqJ0YKJC07Ztm+hIe/nlwFVXKR0NERFpGRMVmpbXXgN27xbXm5rG+uESERGlAxMVmpZvfUvMCffZzwKXXKJ0NBrX2QmsXSu2REQ5iokKJezgQeDJJ0Utyr//u9LR5IBHHwX27QMee0zpSIiIFMMJ3yhh99wjttddB3z4w8rGolk9PUBvr8gGW1vFvj17xNAqWQbKyoBly5SNkYgog5ioUEJefhn47W/FckBcHTmNKirGroc6AB09KpY3CAmtck1ElAPY9EMJCdWmfPGLwAc/qGwsmtbSAuSf+f0QSkhC2/x8cT8RUQ5hokJT+vOfgd/9TsyXcvfdSkejcXV1QEdH7Ps6OsT92Ygdg4koSUxUaErf+Y7YXn89UFWlbCw5RaeL3mYzdgwmoiSxjwpNyuMBnn1WfFfedZfS0eSIRYvESo9LlwI33gg89JCYDnjRIqUjmx52DCaiFGCiQpMK1aZs2sTalIxZsgQ4fBiYNUt8yVutwPDwtFa2VgV2DCaiFNBAnTKly6uvAr/+tfiOYW1KhhUWjn25S1L2JSkAOwYTUUqwRoXiuvdesa2tBVasUDYWykJ1deKNE1mDEtLRARiNmY+JiLIOa1Qopr/9DWhrE9e/8Q1lYyEN0FLHYCLKKH5qUEw7doha+k9/GrjwQqWjoawV6hhsMgE7d4pteXn2dQwmIsWw6Ycm8PnGug9885vKxkJZTisdg4lIMaxRyQXTnGyrqQkYHQWuuAJYvTrNsdGMSB4PLr37bkgej9KhxKeFjsFEpBgmKrlgGpNtvfkm8ItfiOusTVE/qaUFC199FdLjjysdChFRWrDpR6ummmyruDjm0+x2IBAAPvlJ4BOfyGC8lLiIc6s70+NZ19oK3HADJ1IjIs1hoqJVU0y2VQCISVIi/OMfwK5d4jprU1Qs1rnt7eVEakSkSWz60aopJtsa2b17wlO+/33g9Gng4ouBdesyEyYlIeLcSmfOqcSJ1IhIo5ioaNUUq/DKmzZF7ervB372M3H9G98Y+6FOKqTGFZa5OjIRpQkTlVyQwGRbP/kJ8P77wEc/Clx9dYbiohmTz5xTWemJ1Lg6MhGlCfuoaFmCq/C+/z7wox+J63fdxdqUrHDm3MrnnINXLroIH335ZUhvvZXZidS4OjIRZQATFS2bbLKtQCD8sJ07gb4+4Pzzxbo+lAXOnNtRSULPs8/iwz/8IXSynNk5Srg6MhFlAJt+tG6KybZOnwYeeEBcv/NOIC8vw/FR8hKZSC2dfUe4OjIRZYCiNSp+vx9tZ+aB6Orqgs/nw65du6DX65UMK6fs3q3Du+8C554LfOELSkdDKRfZd6S6OrXH5urIRJQBiiYqNpsNNpsNBoMBAFBfXw+LxQKXy6VkWDljZETCAw+ISrU77gAKChQOiFJDib4jOh0QDI5tiYhSRNGmH5/PB6fTGb5dVVWFTg5vzJgXXliCnh4JixeLvrakERUVovbEZBJ9RoCxviPV1dF9S2aKqyMTUZopWqMyvuZk//79MJvNCkWTW0ZHgSefPA8AcNttwJw5CgdEqdPSAmzeDIyMxO47EmOyv0l1dgINDWJ9hfHNR1wdmYjSTDWjfpxOJ/x+P9rb2+M+ZmhoCENDQ+Hbg4ODAIBAIIBAxCiWVAgdL9XHVQunM4i33pqPkhIZW7aMQIvF1Po5BOKUccMG4LzzUHDxxRMf/+KLwKpVUaO+JI8HusZGBHfsgByjv4lu927k7duH0UceQfDCCycGodOJpCjydgpfc62fR62XD2AZtSDV5ZvOcSRZVnb8YKhDrd/vh16vh9VqjfvYbdu2Yfv27RP2P/HEE5g7d246w9QUWQZuvXUNDh8uxsaN/4PPf/5vSodEKVbc1YU1t98OWZIgyXJ4+/wDD2CgqirqsR/ZtQuGp59G19VX47+3bAEAzHnvPcwaHAQkCR/79rdRODCAoeJivHTPPYAsY7ioCKfYvENESTp58iQ2bdqEgYEBFBUVTfpYxROVSM3NzbDZbOju7o458idWjcrSpUvR29s7ZUGnKxAIwOVyoaamBgUa62X6zDMSPvOZfMyePYI33hjG4sXaKl+Ils9hSNwyvvkm8j/2MchLlkD+8pchPfwwpDffxMhLL4nmmp4e4NgxQJKQf/XVkI4ehbxwIUaeegqQZRRcckn4UOOTnfDfHh5WtowaofXyASyjFqS6fIODgygrK0soUVGs6cfv92PHjh1obGwMJyVmsxl+vx9utxu1MWYeKywsRGGMtu+CgoK0vTHSeWwlyDLQ1CSuX3llNxYvrtBU+WLR2jmMZUIZKyuBnh5Iob4jN98MDA+jIPT/c955Y489MxeL1Nsb3VyUnw+MjMRe+HD37oy/plo/j1ovH8AyakGqyjedYyg26sfn88Fut6Ovry+8z+/3AwDnUUmj558HXnoJKCyU8elPdykdDqXTZBPCJTJZm9oWPiSinKRYomI0GtHQ0BCeQwUAWltbYTQaOfInjb77XbG94YYgSkuHJn8wadd0VmBOYFFLIqJ0UXTUT2NjI+x2e/i23+/H3r17FYxI2zo6ALdb/GC+/fYgXntN6YhIFeJN1pbgopZEROmkaKKi1+vR0NCgZAg5JTRg6gtfEBOTMlHJcVMlIpwjhYhUQDXzqFB6vfQS8OyzYtHBb35T6WhIFRJJRCKvx1v4kIgojdjonCPuuUdsb7gBGDeNBuWyRFZgJiJSEBOVHPDCC6JvSkEB8I1vKB0NERFR4pioZLvOTmDtWrGNQZbHalNuvDG169ERERGlGxOVbPfoo8C+fcBjj8W8e98+4A9/EN0QWJtCRETZhp1ps1FPD9DbK/oUtLaKfXv2ANdfL6pQysqAZcuialPq60XfSSIiomzCRCUbRbbfhDpCHj0KRK58K8t47jngxReB2bOBxsaMRkhERJQSbPrJRglMfx5Zm3LLLcAHPpD5MImIiGaKNSrZqK4OWLEiugYlpKMDMBrxzNPAyy8Dc+cCnFOPiIiyFWtUsl2MdVgia1P+7d+AxYsViIuIiCgFmKhkq9D05yYTsHOn2JaXA4sWob0d8HqBefOAO+5QOlAiIqLkseknW8WZ/nxYKsRdd4mH3HGHGABERESUrVijks1iTH/ucABdXaK557bblA2PiIhoppioaMjgIPDtb4vr27eLph8iIqJsxkRFQ+x2MQ/cBz8opssnIiLKdkxUNOLvfwe+/31x/b77xqZZISIiymZMVDTCZgNOnQIuvxy45hqloyEiIkoNJioa8Mc/iqV+JAn40Y/G+tcSERFlOyYqWW50FPjqV8X1rVuBlSsVDYeIiCilmKhkuYcfBg4cAIqLge98R+loiIiIUouJShbr7R1bFXnbNmDhQkXDISIiSjkmKlnsjjuAY8eACy4A/vVflY6GiIgo9ZioqF1nJ7B2rdhG+MMfgN27xfXmZqCgIPOhERERpRsTFbV79FFg3z7gscfCu4aGgJtuEtfr64GPfUyh2IiIiNKM04KpUU+P6IAiSUBrq9i3Zw9w/fWALOPnLWX4n/9ZhsWLgR07lA2ViIgonZioqFFFxdj10KQoR48CJhMA4GsAboWMH/4QKCnJcGxEREQZxKYfNWppGZsDX5ajtiPIRx1acO21wMaNCsVHRESUIaxRUaO6OmDFinANSqSL0IE3Fxrx2s85Ay0REWkfa1TUTidOkSyNnarmZs6ZQkREuYGJilotWgSUlwMmE96/fydeyTfhHZTjE59dhM98RungiIiIMoNNP2q1ZAlw+DCC+bNw3TUSng5Y8aGqYby0u1DpyIiIiDKGNSpqVliI+x+Q8PTTQGGhhMedhSgqUjooIiKizGGiomL79gF33SWu//jHXBmZiIhyDxMVlfrb34DPfQ4YHRWDgLZuVToiIiKizGOiokK9vcC//Avg94vp8R98kEORiYgoNzFRUVKMBQdPngQ++1mgq0tMUPvrXwOzZysWIRERkaKYqChp3IKDp04B11wD/OlPQFER8PTTYpQyERFRruLw5EyLs+Dg0HXX447bZPzvf5XhrLOW4emngQ99SNlQiYiIlMZEJdNiLDgoHz2KwktN+CmAnwJ44RkZn/iEEsERERGpi+KJit1uBwB0dXUBABwOh5LhhEkeDy69+25IixcDl1ySugO3tACbNwMjI+GFBqUz2wDy8cZdu3H55an7c0RERNlM0T4qNpsNDQ0NaGhoCCcoNTU1SoYEABgaAn73hcex8NVXIT3+eGoPXlcHdHTEvOtvj3TgQ/fWpfbvERERZTHFEhW/3w+v1wu/3x/eV19fD7fbDZ/Pp0xQPT2Ax4OW27yo7moDAJx8qA2nXvQCHo+4fzpijOoBgEBAbEfPvPzBM9sLLphZ+ERERFqjaI1KZ2dnVFJiMBgAICp5yaiKCqC6Gjf+zIRFOAoAOOvUUcz5hAmoro7uXxInCYkyblQPALz4InDV5kV4B+XwwIQHV++EbDSJBQg5xIeIiCiKYn1U9Ho9+vv7o/a53W4AYwnLeENDQxgaGgrfHhwcBAAEAgEEQtUUMyDt3o28LVsgjYxAgug3osNY/5G7PvALXPDICK69Vsac3buRt28fRh95BMELLxw7SE8PcOwYIEnI37MHEgD5l7/EgY/U4cFdEp5+eRH+jmWoXtCN7/80D9dfCwTlGxAcHgYKC8eqW9Is9Hql4nVTK5ZRG7ReRq2XD2AZtSDV5ZvOcSRZPtOTUwVMJhPq6+thtVpj3r9t2zZs3759wv4nnngCc+fOTUkMxV1dWHP77RP2f3z2f+HN0+UoQy9mzx7FU6NXoSRwDKfm6/Ff37obEmQMFxVhfUTsMgAJQBBSOOEBgPU13airex16/XBKYiYiIsomJ0+exKZNmzAwMICiKVbbVU2iYrPZsGDBAjQ0NMR9TKwalaVLl6K3t3fKgibswAEUXHwxZJ0OUjAY3vr3dkC/7uLww0LJx/gkZNvyR/GNQ19GAUYmHHpUysd79gdR9tVNqYl1BgKBAFwuF2pqalBQUKB0OGnBMmqD1suo9fIBLKMWpLp8g4ODKCsrSyhRUXx4MgA4nU5UVVXFrUkJKSwsRGFh4YT9BQUFqXtjnH02UF4O+Zxz8MpFF+GjL78M6a23oD//bKClBfLmzZBGRsLJSWTT0GbsxhOH6vAbfBhemCYcOq+zAx8wGlMTZ4qk9LVTKZZRG7ReRq2XD2AZtSBV5ZvOMRRPVEL9UkJJit/vR19fX9x+Kmm3ZAlw+DBGJQk9zz6LD//wh9DJsug/UlcHacUKwDQxCfmvH3ZgY6URnwsAFX0ArAjXxkCnA4LBzJeFiIgoyymaqHi9Xni9XtTW1oZH/zidzilrVtIuslOrJAGzZk18TCj5OLO97DIAocqSNxcB5eWQli4FbrwReOgh4MgRjuohIiKaJsUSFb/fj3Xr1sHv98Nms0XdN1k/FcUtEkkIJktCztTKYNYskehYrUBoVA8RERElTFXDk7NCoklI5G1JYpJCRESUBMX7qGQlJiFEREQZoejMtERERESTYaJCREREqsVEhYiIiFSLiQoRERGpFhMVIiIiUi0mKkRERKRaTFSIiIhItZioEBERkWoxUSEiIiLVYqJCREREqpXVU+jLsgwAGBwcTPmxA4EATp48icHBQRQUFKT8+ErTevkAllErtF5GrZcPYBm1INXlC31vh77HJ5PVicrx48cBAEuXLlU4EiIiIpqu48ePo7i4eNLHSHIi6YxKBYNBvP3225g/fz4kSUrpsQcHB7F06VIcOXIERUVFKT22Gmi9fADLqBVaL6PWywewjFqQ6vLJsozjx4/j7LPPhk43eS+UrK5R0el0WLJkSVr/RlFRkSbfdCFaLx/AMmqF1suo9fIBLKMWpLJ8U9WkhLAzLREREakWExUiIiJSLSYqcRQWFuJb3/oWCgsLlQ4lLbRePoBl1Aqtl1Hr5QNYRi1QsnxZ3ZmWiIiItI01KkRERKRaTFSIiIhItZioEBERkWoxUSEiIiLVyuoJ39KlubkZfr8fer0eXV1daGxshF6vVzqslLHb7eHrx44dQ1NTk4LRzJzf70dbWxva29vhcrkm3G+328Pnz+/3o6GhIcMRztxUZZzqfrVL5BwCQFdXFwDA4XBkNL5UmKyMofsAUUafz4ddu3Zl3efOdN6HNTU1mnuvut1uOBwO1NTUwGAwwOVyYfXq1aitrVUo2ulL5BzabDZUVVUBAEpLS9NePiYq49jtdlit1qgvtq1bt6K9vV3ZwFLEYrGgpqYGVqsVgEjKbDZb1iYrXq8XnZ2d8Pv96Ovrm3B/6AsuVF632436+vqs+qKbqoxT3a92U8U//v1ZX1+fdV9yiZTRZrPBYDAAEGW0WCyaKmMkp9MJt9udochSZ6oy+v1+uN1uOJ1OGAwG2Gy2rEpSEinfunXrsHfvXuj1eni9XphMpoQWFpwRmaKYzeaE9mWjrq4uGYDc398f3tff3z9hXzZqb2+XjUbjhP16vX5C2bL1bR+vjIner3ax4u/v75fNZnPUOfR4PDIAuaurK8MRzly8c2Q2m+Wmpqbw7aamJlmv12cytJSZ6n3Y398vOxyOrP0/lOX4ZWxvb8/6z1JZjl8+q9Ua9T6VZVl2uVxpj4d9VMbR6/WoqamB3+8HAPh8vvCvnGzn8/kAIKo6OXS9s7NTgYjSy+fzhZvwxsvGX3O5qrOzM/zeBRD+fwz9j2qBy+WKapLcv38/zGazghGlT1tbGzZs2KB0GJSE5uZm1NbWwufzhT9DM/E+ZdPPOLt27YLJZEJJSQkaGhpQVVWVVc0Ek4n8gB//5R35RaAV8cqk1+s19SWnZXq9Hv39/VH7Qh+QWvkBMZ7T6YTf79dMc3Mkt9ut2QQspK2tDaWlpejr60NXV1fWNquPF/o89Xq9MBgMMBgM4SbKdJ9TJirj6PV62Gw2uFwu2O12mM1mbNiwIes6tcViMBhgNpvhdrvD7aa5WLMQ+hCh7LRjxw44HA5N/E9GCnVi9Pv9sFgsmisfIMpoMBg0+0PBaDQCGEuim5ubYbFYNJF0RtbIh8rZ1NSEysrKCT8mUo1NP+OEOrS1t7ejq6sLfX19MJlMSoeVMi6XC/v370dzczOcTidKS0sBaPfXaSxMUrKXzWbDxo0bw52jtUSv18NqtYabgEpKSjT1hR5qNtCyUE1DyIYNG8I1ZFpRXV0dvh6qnU73D14mKhFCfRpC1VgGgwEejwd6vR5Op1Ph6FKnqakJVqsVtbW14X+qyDefVsRLvkK/6ii7OJ1OVFVVZeXw8sn4/X7YbLaoLzOz2ZyRL4BM8Xq9mvyMGW/890SoVkwLTevxPjP1en3ay8dEJYLP54tZ3VpfX5/5YNLE6/VG3Q41A2mxmtlgMMT9J9J6O7nWhL6wQzUpfr9fEx/+gPjcsdvtUTV9oaRFK/+XfX19cLvdsNvtsNvtsNlsAMT0AVr5ERhqsot8X4bOoxZ+GIVqi8b/3/n9/rQnoUxUIpjNZni93gnVdB6PRzNVlhaLJepXmsPh0ERnr3jNOY2NjVHldTqdWdtsMFWTVbY3acWL3+v1wuv1wmg0wufzwefzobm5OdxsmU1ildFoNKKhoSHqy6y1tRVGozErE+pYZTSbzWhoaAhfQj/+GhoasvKzNVYZ9Xr9hPMYau7KtoQz3v9iU1MTWltbw7edTifMZnO4z0q6SLKc7plasovf78eOHTuwYMGCcPtb5ARw2c7tdsPr9YZn3a2vr8/qbN/n88HpdKK1tRVerxcNDQ0TZoK02+3hMu7fvz/rErOpypjIa6Bmk8Xv9/tRWVkZs40/mz66pjpHfr8fzc3N4ceHRotk0+dOou/D0GOcTicaGhpQU1OTNQnZdM9jts38ncg5DM3cDmSufExUiIiISLXY9ENERESqxUSFiIiIVIuJChEREakWExUiIiJSLSYqREREpFpMVIiIiEi1mKgQERGRajFRIaK0aG5uRklJyZQXu90OADCZTJparoKIUoMTvhFR2kSuC+Lz+VBTU4P29vaoKbdLS0vDC3/q9fqsmaWUiDKDiQoRZYTP50NVVRU8Hk/a1wYhIu1g0w8RERGpFhMVIlKFmpoa2Gy28G2LxQK73Y76+nqUlJSgqqoKbrcbbrcbVVVVkCQJFotlwnEiHx+5QBwRZScmKkSkSn6/HzabDRaLBd3d3TAajbBYLHA4HPB4PPB4PHA6nVHJiMVigc/nQ3d3N1wuF2w2G7xer4KlIKKZYqJCRKplNBphNpuh1+tRX18Pv9+P+vp66PV6GI1GGI1GdHV1ARhbor69vR16vR4GgwFNTU1obW1VuBRENBP5SgdARBRPdXV1+HppaemEfQaDAX6/HwDCNSeVlZVxj0FE2YeJChGpll6vT2hfiNFohMfjSV9ARJRxbPohIk0wGo3wer3hGhYi0gYmKkSkCQaDAVarNdyhFgCcTmd45lsiyk5MVIhIMxwOB4xGI0wmE0pKSuBwODjTLVGW48y0REREpFqsUSEiIiLVYqJCREREqsVEhYiIiFSLiQoRERGpFhMVIiIiUi0mKkRERKRaTFSIiIhItZioEBERkWoxUSEiIiLVYqJCREREqsVEhYiIiFSLiQoRERGp1v8HAfFl2Z1qp2MAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -1733,24 +5199,24 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": null, "id": "f095cf2a", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "\"0.0 * d^2u/dx0^2{power: 1.0} + 0.6334141886464264 * u{power: 1.0} + -0.4036802767762263 * t{power: 1.0, dim: 0.0} + -0.5265121291057969 * t{power: 1.0, dim: 0.0} * sin{power: 1.0, freq: 2.000000359965463, dim: 0.0} + 0.2596251993636941 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 1.9999999936069808, dim: 0.0} + 0.9225210424019804 = du/dx0{power: 1.0}\\n{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005910228093653957}}\"" + "\"0.0 * d^2u/dx0^2{power: 1.0} + -1.0624080820721236 * u{power: 1.0} * sin{power: 1.0, freq: 1.9999996981912465, dim: 0.0} + -0.21710586567205623 * t{power: 1.0, dim: 0.0} + 0.3259807845969001 * t{power: 1.0, dim: 0.0} * cos{power: 1.0, freq: 2.000000424084055, dim: 0.0} + 0.21934994682760084 * u{power: 1.0} + 1.2864748624370526 = du/dx0{power: 1.0}\\n{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.65, 0.35]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0026895703879134727}}\"" ] }, - "execution_count": 89, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "eq2 = epde_search_obj.get_equations_by_complexity(4)[0]\n", - "eq2.text_form" + "eq_2 = epde_search_obj.get_equations_by_complexity(4)[0]\n", + "eq_2.text_form" ] }, { @@ -1758,12 +5224,12 @@ "id": "4a1a1f02", "metadata": {}, "source": [ - "This equation has the first order, thus only a single initial condition $u|_{t = t_{val, 0}}$ is needed, and we get it from the data." + "This equation has the first order, thus only a single initial condition $u|_{t = t_{val, 0}}$ is enough, and we get it from the data." ] }, { "cell_type": "code", - "execution_count": 90, + "execution_count": null, "id": "4e621cd9", "metadata": {}, "outputs": [ @@ -1772,54 +5238,32 @@ "output_type": "stream", "text": [ "Using explicitly sent system of equations.\n", - "dimensionality is 1\n", "grid.shape is (160,)\n", - "Shape of the grid for solver torch.Size([160, 1])\n", - "Grid is torch.Size([160, 1])\n", - "torch.Size([1])\n", - "[2023-10-27 13:26:06.629574] initial (min) loss is 314.85504150390625\n", - "[2023-10-27 13:26:06.673411] Print every 1000 step\n", - "Step = 0 loss = 314.855042 normalized loss line= -0.000000x+1.000000. There was 1 stop dings already.\n", - "[2023-10-27 13:26:13.090162] No improvement in 100 steps\n", - "Step = 175 loss = 0.681533 normalized loss line= -0.670408x+46.723266. There was 1 stop dings already.\n", - "[2023-10-27 13:26:16.941645] No improvement in 100 steps\n", - "Step = 275 loss = 0.661911 normalized loss line= 0.000372x+1.001404. There was 2 stop dings already.\n", - "[2023-10-27 13:26:21.296751] No improvement in 100 steps\n", - "Step = 389 loss = 0.673448 normalized loss line= -0.000640x+1.035446. There was 3 stop dings already.\n", - "[2023-10-27 13:26:35.747094] No improvement in 100 steps\n", - "Step = 799 loss = 0.036245 normalized loss line= -0.000170x+1.011662. There was 4 stop dings already.\n", - "[2023-10-27 13:26:42.428287] Print every 1000 step\n", - "Step = 1000 loss = 0.035423 normalized loss line= -0.000115x+1.022673. There was 5 stop dings already.\n", - "[2023-10-27 13:26:51.195499] No improvement in 100 steps\n", - "Step = 1254 loss = 0.035086 normalized loss line= 0.020060x+0.660700. There was 5 stop dings already.\n", - "[2023-10-27 13:27:04.285616] No improvement in 100 steps\n", - "Step = 1624 loss = 0.034638 normalized loss line= -0.001039x+2.199179. There was 6 stop dings already.\n", - "[2023-10-27 13:27:17.496981] Print every 1000 step\n", - "Step = 2000 loss = 0.033968 normalized loss line= -0.000079x+1.001089. There was 7 stop dings already.\n", - "[2023-10-27 13:27:18.136004] No improvement in 100 steps\n", - "Step = 2015 loss = 0.033847 normalized loss line= -0.009918x+2.230062. There was 7 stop dings already.\n", - "[2023-10-27 13:27:27.216756] No improvement in 100 steps\n", - "Step = 2245 loss = 0.033715 normalized loss line= 0.034518x+0.585910. There was 8 stop dings already.\n", - "[2023-10-27 13:27:38.043199] No improvement in 100 steps\n", - "Step = 2560 loss = 0.033166 normalized loss line= 0.025307x+0.268369. There was 9 stop dings already.\n", - "[2023-10-27 13:27:44.689097] No improvement in 100 steps\n", - "Step = 2756 loss = 0.033120 normalized loss line= 0.041654x+0.022290. There was 10 stop dings already.\n", - "[2023-10-27 13:27:52.201592] No improvement in 100 steps\n", - "Step = 2970 loss = 0.097098 normalized loss line= 0.003246x+0.240966. There was 11 stop dings already.\n" + "target_form shape is torch.Size([160, 1])\n", + "[2024-04-10 13:06:17.713854] initial (min) loss is 484.7067565917969\n", + "[2024-04-10 13:06:19.980834] Oscillation near the same loss\n", + "[2024-04-10 13:06:19.981127] Step = 300 loss = 1.285648 normalized loss line= -0.000000x+1.000003. There was 1 stop dings already.\n", + "[2024-04-10 13:06:20.785312] Oscillation near the same loss\n", + "[2024-04-10 13:06:20.785405] Step = 400 loss = 1.285642 normalized loss line= 0.000000x+1.000002. There was 2 stop dings already.\n", + "[2024-04-10 13:06:22.424131] Oscillation near the same loss\n", + "[2024-04-10 13:06:22.424485] Step = 600 loss = 1.285630 normalized loss line= -0.000000x+1.000005. There was 3 stop dings already.\n", + "[2024-04-10 13:06:23.342758] Oscillation near the same loss\n", + "[2024-04-10 13:06:23.342850] Step = 700 loss = 1.285620 normalized loss line= -0.000000x+1.000005. There was 4 stop dings already.\n", + "[2024-04-10 13:06:24.151050] Oscillation near the same loss\n", + "[2024-04-10 13:06:24.151148] Step = 800 loss = 1.285611 normalized loss line= -0.000000x+1.000005. There was 5 stop dings already.\n" ] } ], "source": [ - "bop_u = get_ode_bop('u', 0, [None], t_test[0], x_test[0])\n", - "pred_u = epde_search_obj.predict(system=eq2, \n", - " boundary_conditions = [bop_u(),],# bop_du()],\n", - " grid = [t_test,], strategy='autograd')\n", - "pred_u = pred_u.reshape(-1)" + "pred_u = epde_search_obj.predict(system=eq_2, boundary_conditions = [bop_u()], # })\n", + " grid = [t_test,], mode='NN', use_fourier = True, fft_params = {'L' : [4,], \n", + " 'M' : [3,]},\n", + " compiling_params = {'tol' : 0.005})" ] }, { "cell_type": "code", - "execution_count": 91, + "execution_count": null, "id": "565e755c", "metadata": {}, "outputs": [ @@ -1827,12 +5271,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "MAPE on the test dataset is 0.40879904876947004\n" + "MAPE on the test dataset is 1.7942529267020053\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAh0AAAGiCAYAAABDFHTaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABnkUlEQVR4nO3deXhTZdo/8O/pQlnbtKwqVZq6b2iCorihtC64odPCDJ1xp3UZRx192+mMo+DPd7B1fEedRVtAUQGFRsUFdWxxQUUrbcB9o4GKCgi0oSxSupzfHw8nSdukTZpzcpZ8P9fFdU6b5Jz7NCG58yz3I8myLIOIiIhIYwl6B0BERETxgUkHERERxQSTDiIiIooJJh1EREQUE0w6iIiIKCaYdBAREVFMMOkgIiKimGDSQURERDHBpIOIiIhigkkHERERxURSJHf2er1YtmwZqqqqUF1d3eP28vJy2Gw2332Li4tVCZKIiIjML+ykw+12o66uDl6vF01NTT1uLy8vBwAUFhYCAGpqalBUVISKigqVQiUiIiIzkyJd8M3lcmHu3Lmor6/v8vv09HRs2LDB19IBAJIkIZLDd3Z24qeffsKwYcMgSVIkYREREZFOZFnGrl27cPDBByMhIfTIjYi6V0LxeDzwer1dEg5FTU0NcnJygj6utbUVra2tvp9//PFHHHvssWqERERERDG2adMmjB07NuTtqiUdwdhsNni93pCPmzt3LubMmdPj9/Pnz8fgwYPVCI2IiIg0tnfvXtxwww0YNmxYr/dTJekIJSMjI+j4D0VpaSn++Mc/+n5uaWlBZmYmpk2bhtTUVFVjaWtrQ3V1NXJzc5GcnKzqsY3A6tcH8BqtwOrXB/AarcLq16j29bW0tOCGG27oc2iEpklHbwkHAKSkpCAlJaXH75OTkzV7krU8thFY/foAXqMVWP36AF6jVVj9GtW6vnCPoUqdDrvdHvT3Xq835G1EREQUX1RLOmw2W9CxHaEGkRIREVF8iTjpCNVlUlpaipqaGt/PLpfLV7ODiIiIKOykw+PxoLy8HBUVFXC73SgpKYHL5fLdXlxcDK/XC5fLBZfLhTVr1rAwGBEREfmEPZDUbrejuLi419Lmgbfl5eVFFxkRERFZChd8IyIiophg0kFEREQxwaSDiIiIYoJJBxEREcUEkw4iIiKKCSYdREREFBOarr1CRGRVO3YAH30EpKUBo0cDhxwCcHFsot4x6SAiClNnJ/DSS8BTTwGvvQa0tflvS0kBbr4Z6KWUEVHcY/cKEVEYdu4ELr8cuPJKkXi0tQFHHglkZwNDhwKtrcA//gEcfXQSli/PhizrHTGR8TDpICLqw5dfAqeeCrz6qmjRKC4GPv8c+OYbYP16oKUFeP114MQTgZ07JSxceDxuvz2BiQdRN0w6iIh6UV8PnHYa8O23QGYm8MEHQFkZcNxx/vtIEnDhhYDbDfzjHx2QJBmPPZaIm28WXTJEJDDpICIKYfNm0aWyaxdw5plAXR3gdIa+f2IicMstnbj11rWQJBmPPw7cckvs4iUyOiYdRERB/PILMG0a8OOPwNFHi66VUaPCe+x5523CE090ICEBePxxYMkSTUMlMg0mHURE3cgyUFgIfPwxkJ4OvPKKmBobiYICGffeK/Zvvhn4/nv14yQyGyYdRETdVFUBixaJ7hKXCzj88P4d589/BiZOFDNfrrmG4zuImHQQEQXweoHbbhP7d98NnHde/4+VlAQ884woGvb228DDD6sRIZF5MekgIgrwpz8BW7YARx0FlJZGf7wjjhD1OwDgnnuAn3+O/phEZsWkg4jogPffByoqxH5FhajJoYZZs4AJE4A9e4D771fnmERmxKSDiAhARwdw001i/7rrgHPOUe/YkgQ88IDYf/xxwONR79hEZsKkg4gIwHPPiSqj6enAgw+qf/wpU4DcXFE+XZnVQhRvmHQQUdxrbwfmzBH7d90FZGRocx6ltWPxYuDTT7U5B5GRMekgori3eDHw3XfA8OHArbdqdx6HA5gxQ9QBmT1bu/MQGRWTDiKKa21twH33if2SEmDYMG3Pd889YvvSS8DGjdqei8homHQQUVx76ikxsHPUKFE5VGvHHgucf74oFPavf2l/PiIjYdJBRHGrs9M/zuJPfwKGDInNeZXiY/PnA7t3x+acREbApIOI4tbrrwMNDYDNJtZaiZULLwSOPFKUR3/qqdidl0hvTDqIKG49+qjY3nBD7Fo5ACAhwT9g9dFHuSYLxQ8mHUQUl776CnjzTZEA3HJL7M9/9dVAairw7bfAG2/E/vxEemDSQURxSRnEedllwLhxsT//sGGi8ikALFgQ+/MT6YFJBxHFHa/XP5biD3/QLw4l6XjlFaCpSb84iGKFSQcRxZ0nnxSLrx1/PDB5sn5xnHACcNJJolbIc8/pFwdRrDDpIKK4IstiqioA/P73YjE2PV19tdg+/bS+cRDFApMOIoora9YAX34JDBoE/PrXekcD/OY3QGIiUFsLfPON3tEQaYtJBxHFlSefFNsrrwTS0vSNBQBGjxZ1OwC2dpD1Mekgorixbx/w7LNi/9pr9Y0lkNLF8swzrNlB1sakg4jixvLlogrooYcC556rdzR+l14qWl02bQLefVfvaMiy6uqA884TW50w6SCiuKF0rVx9tSgKZhQDBwJ5eWL/+ef1jYUs7OmngbffFk1qOjHQfzsiIu388ANQXS32r7lG11CCuvJKsX3hBXaxkIoaG4H6esDtBpYuFb977jlg7VqkrV8vbo+hpJiejYhIJ4sXi+myZ58N2O16R9PTlCmiLPrmzWImy+mn6x0RWUJguV1lfvi2bUieOBGTAeCuu8R/jBhhSwcRxQWl+NZvf6tvHKGkpACXXCL2X3hB31jIQhYtApIOtC8oycWBbWdiItoXLoxpOEw6iMjyvv0WWLdOvPcq3RhGFNjFEsMvn2RlBQWi6SyIVeXlkGfOjGk4TDqIyPKUruycHGD4cH1j6c2FF4qiZR4P8OmnekdDlqOMntZxFDWTDiKyPCXpmDFD3zj6MmSIv1AYZ7GQakaNAsaMAZxO4PHHAacT8ujRaNWhOh6TDiKytC++EP8GDACmTdM7mr4FdrEQqWLsWGDjRtHNUlQE1Naiff167BsxIuahMOkgIktTWjkuuACw2XQNJSyXXCLGnnzxBfDdd3pHQ5aRkuKfvSJJ4mcdMOkgIsuSZfN0rShsNuCcc8T+a6/pGgqR6ph0EJFlffKJmLkycCBw2WV6RxO+qVPFlkkHRcQAZc77wqSDiCzrxRfF9sILgWHD9I0lEhddJLbvvgvs2aNvLGQiBihz3hcmHURkWcuXi+0VV+gaRsSOPloUkmxtFZ8hRCGFKnPudovfx7jMeV9YBp2ILEmpdZGYCFx8sd7RREaSRGvHY48Br7/ur1RK1EOIMudwOv2/N1ClObZ0EJElvfSS2J59trELgoUSOK7DQJ8ZZDS9lDlHUpK43UDY0kG627cPWLsW+OYbYORIIDMTyMoyVx88GY+SdJihNkcw554raots3Cj+bxx9tN4RkSEVFADHHNO1ZUNRWws4HLGPqRds6SBdyDLgcomVNFNTgUmTgGuvFc3I48cD6enAb34DfPSR3pGSGW3fDrz3nti//HJ9Y+mvIUOAyZPFPmexUFgMUOa8L6pHVllZifLyclRWVqKkpARer1ftU5DJvf++SDLy80VS0dYmqvSedx5w8snAiBFAR4cYC3X66cBZZwENDXpHTWby6qtAZ6d4PR12mN7R9J8yi+X11/WNgwwuSJlzjBkjfm8wqnavlJeXo7CwELYDZf+8Xi9mzZqFqqoqNU9DJiXLwAMPAH/+s/h5yBDgzjtFC8dhh/nHQAFiRdBHHgGWLBFJisMBVFRIGDRIl9DJZJRZK2btWlFcdBFwxx3+qbNDhugdERmSUuZ8wADxRlpYCOzfr1vV0d6o2tJRXV3tSzgAwGazsaWDAIjWjMJCf8Jx7bWixPOcOWLwdWDCAQAnnQQ8+SSwfj1wxhlASwvwm98k4YknjuOgOurV3r3Am2+KfbN2rSiOPBI49FDx/+eDD/SOhgzNIGXO+6Jq0mGz2ZCbm+tLNDweD+x2u5qnIBPav1+8+c+fL7oa//lP4IkngIMO6vuxmZmiTkFxsfj55ZcPR3FxAhMPCumtt4BffhGtZyeeqHc00ZEkYMoUsb9ypb6xEKlB1e6VefPmwel0Ij09HcXFxcjOzkZFRUXI+7e2tqK1tdX3c0tLCwCgra0NbW1taobmO57axzUKo16fLAOFhYl4/fUEDBokY9GiDlx6qYxIw7z/fiArqxM33ZSCRx5JRFpaB+6+u1OboHVk1OdRLbG4vldeSQCQiIsu6kB7e+xfI2pf49lnS3jyySTU1HSira1DlWNGy+qvU8D616j29YV7HEmW1f3OWFlZierqarhcLuTk5KCqqqpLl0ug2bNnY86cOT1+v2TJEgwePFjNsEgnL7xwOJ5++jgkJMi4++6P4HD8HNXxXnnFjgULTgAA3HDDp7jkkg1qhEkWIcvArFm52L59MO6++0NMmBDd680ImpoG4rrrLoAkyXj66dcxbJg1PwTJ3Pbu3YuZM2di586dSE1NDXk/VZOOkpIS5ObmIicnBx6PB/n5+fB6vWgIMfUgWEtHZmYmtm/f3mvQ/dHW1obq6mrk5uYiOTlZ1WMbgRGvb/lyCTNmJEKWJTz8cAduvjm6b53KNdbVXYT77x+AxEQZ77zTgYkTrdPXYsTnUU1aX99nnwFOZzIGDpSxZUs79PjuosU1nnhiEr7+WsLSpe244gr9X+9Wf50C1r9Gta+vpaUFI0aM6DPpUK17xePxwOv1IicnBwBgt9tRX18Pp9MJl8uFvLy8Ho9JSUlBSpDBLsnJyZo9yVoe2wiMcn2NjcD114tvnrfcAtx2WyKARFWO/de/Sli/HnjuOQm/+10S1q4Vy4FbiVGeR61odX3V1WJ73nkS0tL0/fupeY05OcDXXwOrViVh+nRVDqkKq79OAetfo1rXF+4xVBtI6vF4gnajFBUVqXUKMglZFgnHrl2iHsfDD6t7fEkSU9GzssQsscJClokmYcUKsTXbWit94WBSsgrVko6cnBy43e4eU2Tr6+uDtnKQdVVUiDfHQYPEtNckDYrtp6WJ4mFJSUBVlZgNQ/GtuRlYvVrsK+uWWMU554iZX19/Dfz4o97REPWfqh8HVVVVmDt3LoYPH+6r0VFWVqbmKcjgNmwA7rpL7M+dK+oMaOXUU4H//V+gpESc87LLxNotFJ/++19RyfbYY7suvGkF6emiQF5dnZgS/Lvf6R0RUf+omnTYbDYmGXFMloEbbxSVE886C7j1Vu3PeeedosVj7VrgT38CFizQ/pxkTMr6JFbrWlFMmSKSjpUrmXTEpbo6UbCovByYMEHvaPrNuKvCkOm8/rqoBDlggOjuiMWaQ4mJwL//LfafeAL48EPtz0nG09kJvPGG2Lda14pCGdfx9tv6xkE6efpp8eQ/84zekUSFSQepoq1NtDoAwB/+ABx+eOzOffrpwHXXif2bbxZN7BRf1q0Dtm0Dhg4VZfOtaNIkMYbp++/F7DCKA42NQH094HYDS5eK3z33nPi5vt6ULwQmHaSKefPEILfhw4G//CX253/gATFtdt06EQvFF2WtlXPPBaw6u3HIELF4KAC8956+sVCMjBsnulKcTpFVA2LrdIrfm3DwEpMOitrOncC994r9OXP0qZkxciRw331i/777xNobFD+U+hznn69vHFo76yyxXbVK3zgoRhYt8k//U+oCKNukJHG7yTDpoKiVlQHbtwNHHy1qZuilsFAs8rV5M/DYY/rFQbG1dy/w/vtiPzdX31i0dvbZYsukI04UFAC1tcFvq60Vt5sMkw6Kyo4dYtVYQEyR1bNpOyUFuOcefyy7d+sXC8XOqlViJeNDD9V2irYRnHmmKI73zTfAz+ZfVoYioYzMj8UIfQ2ZO3rS3SOPiA/38ePF8vV6u+oqMYh1+3bg0Uf1joZiQRnPkZsrPpCtLD0dOP54sc9xHXFi1ChgzBgxjuPxx8V2zBjxexNi0kH9tnOn/4P97ruN8YaflCTGlQDAgw8C3QrkkgXFy3gOBbtY4szYsWK9h9paoKhIbDduFL83ISYd1G///KdIPI49FrjySr2j8ZsxAzjuOJFwKDU8yJp++gn4/HOR8Cp1LKyOSUccSknxf6uTJPGzSTHpoH7ZvRv4xz/E/l/+YqxuxsREUZ0UEInRvn36xkPaqakRW6dTTNeOB8oMlk8+EUk/kZkY6KOCzGTePKCpCTjiCNGyYDQzZgCZmcDWraacVUZhChzPES8OOkiMW5Jl4IMP9I6GKDJMOihiHR3+sRx33SVaFowmORm4/Xax//e/izLZZC2dnf6WjngZz6FgFwuZFZMOitjLL4txTBkZwG9/q3c0oc2aBaSliemFK1boHQ2p7bPPREvW4MGiFH48UbpYlPokRGbBpIMi9sgjYltUJN7wjWrYMBEjIGaykLUos1YmTzb1uLp+UdaXqasTNUqIzIJJB0Vk3Trg3XdFl8rNN+sdTd9uu010tbz3nniDJuuIx/EcisMPB0aMAFpbgbVr9Y6GKHxMOigiSitHXp45pokffDAwfbrYZ2l06/jlF39xLEOM56irA847L2aZrST5u5RWr47JKYlUwaSDwvbzz8CSJWJfGaRpBjfdJLbPPgs0N+sbC6nj/ffFVOiDDwaOOUbvaAA8/TTw9tvAM8/E7JSTJoktkw4yEyYdFLYnnxT9x6ecApx2mt7RhG/SJODEE8W346ee0jsaUkNgFVLdKuE2NgL19YDbDSxdKn733HPA2rVIW79e3K6hwKRDWXiUyOiYdFBYOjuB+fPF/o036htLpCTJ39rx2GN8g7aCmI/nCNZ9Mm4cMGGCqEy2bZv43bZtSJ44EZPvugvJRxyhaUgTJoiy/z/9BGzapOmpiFTDpIPC8s47wPr1YkaIEYuB9aWgQMT+7bfAW2/pHQ1FY+tWUY0TAHJyYnTSYN0nixaJT33An8ke2HYmJqJ94UJNQxo8GDjpJLHPLhYyCyYdFJZ588S2oAAYMkTfWPpj2DDgd78T+xxQam5KQbCTTtJ4oc1Q3Sdut/j9mWeKxbeCWFVeDnnmzK6/1GCwKcd1kNkw6aA+bd8OvPCC2J81S99YoqF0sSxfDmzZomsoFAUl6dC8ayVE9wmcTvH7ceP891UWH+ptESINBpsy6SCzYdJBfXr6aTGA1OEQ/8zq+OPFANiODq7HYlay7O8e07xrpZfuEyQlidtHjQLGjBGJyOOPA04n5NGj0ZqWJu7XV2tJlINNlWmz69YBe/ZEdSjSW4ynXeslSe8AyNhk2d+1UliobyxquPZa4KOPxEycO+/UceYD9YvHA3z/vSj4plTl1ExBgZiP63T2vK221p+Bb9wIDBggXkyFhWjfswf7Vq4UtwW2higvNqW1RBHFyObMTOCQQ4AffxSfVeec0+9Dkd4CW8ImTNA7Gs2wpYN69dFHwNdfi0Frv/mN3tFEb8YMYNAg4MsvgY8/1jsaipTSynHaaTEeW9Rb90lKij+hkKSuNdnDaS2JgiSxi8XUNG4JMyImHdSrp58W21/9CkhN1TcWNaSlAVdeKfaffFLfWChyb78ttueeG6MTBuk+wZgx4Y9gLSgIOdgUtbXi9igpNXNCnYYMLJJxQxbBpINCam31J99XXaVvLGq69lqxfe45UTCMzCFwPMd558XopGPHiu6T2lqxemBtrfi5P2sAhDPYtB8mThTb2lrWoDEdjVvCjIhJB4W0YoUoG37IITH8ZhkD554LHHYYsHMn8OKLekdD4frqK1GjY+DAGFfE7a37JBzRtpb0weEQCzBu2cIiYaYTg5Ywo2HSQSEpXSsFBeJNzSoSEoBrrhH77GIxD6Vr5YwzVF7KXutZA2q2lgQxaJAo8w9wnJKpadQSZjTWvjrqt+3bRUsH4C+qZSVXXy22K1eKMtJkfJp1rcRisbZwWkuiSH4Cu1jIZDRuCTMaJh0U1HPPAe3toun2+OP1jkZ9WVli1L8si2slY+vsFKX4AZWSDiPOGogi+WHSYWIat4QZDet0UFBK14qVBpB2V1AgphkuXgz88Y96R0O9+fRToKlJlLNXpYSBxvUzwtbYKJoVJalr8nP11eL8I0aIAUh9UJKO+nrxZSGJ7+zmEtjy1Z9xQybClg7qYf16YM0aMY7j17/WOxrtTJ8u3pzdblGLhIxL6Vo56yyVPlCNMmtApSmTRx0lprTv3Qt88YVm0RJFjUkH9aB84ZoyBRg9Wt9YtDRiBHD++WJ/yRJ9Y6HeqT6ewyizBlRKfhISgFNOEfvsYiEjY9JBPShjHMy4hH2klM+WxYtZ48Co2tuBVavEvib1OfScNaBi8sNxHWQGTDqoi88/F/+Sk4ErrtA7Gu1dfrkop+3x8M3aqOrrgV27gPR0YPx4FQ9stFkDUSY/TDrIDJh0UBdK18qFF4o3easbMgSYNk3sL16saygUgtK1Mnmyyo0RRpk1oFLyc+qpYvvllyJJIzIiJh3kEzh91MoDSLtTFrJzucTUTDIWpSiYJl0r0VYbVYNKyc+YMcChh4r/xxZfHZ1MjEkH+bjdYubKoEHAZZfpHU3s5OaKheC2bAE++EDvaChQayvw/vtiP2brrehBpeRH6WJZs0aluIhUxqSDfJRWjksuAYYO1TeWWBowQIztAERrBxlHba1YlG/0aOCYY/SOxviUMiP19frGQRQKkw4CIJpkq6rEfjzMWukuL09sn3+eXSxGEriUvdIQQKEphdPYvUJGxaSDAABr14riiIMGARddpHc0sZebK6pd/vgj8NFHekdDipgvZW9yDofYejxihWgio2HSQQCAF14Q24suAgYP1jcWPQwc6B/Hwi4WY9i7F/jwQ7F/7rn6xmIW6elAdrbYZxcLGRGTDgLgTzquvFLfOPSkdLG4XCwUZgSrVwNtbUBmpv+DNO6FsRItx3WQkTHpIHz1lfiXnAxcfLHe0ejnggvEANpNm4CPP9Y7GlK6VqIezxHFkvGGE8ZKtBzXQUbGpIPw4otiO2UKYLPpGoquBg0SM3cAdrEYgWr1OaJYMt4QGhtFs4Xb3XUlWrdb/L6xscvd2dJBRsYFkMmXdMRz14oiL0+8n1dVAeXlnDGhl127/LUm+jWeQ6Ul4w0hcKVZ5QWprESrCOgPVAaTbtgA7NgBDB+ufYhE4WJLR5z7/nvRDCtJ/loV8UwZSKt8uSR9vP8+0NEB2O2iymbEVFoy3hAiXInWZgMOP1zsu92xCZH6YKUuvigx6YhzSivHWWfpt86VkQwe7B/Xwi4W/QSO5+gXlZaMN4R+rETLcR0GY/YuPhUx6YhznLXSkzKLpaqKs1j0ElgUrF9UXDLeUMJciZbjOgwgwrE48YJjOuLY1q3Ae++J/XhYxj5cU6eKQaUeD7BuHXDyyXpHFF+8XlGsDlCpPkdCgigzq2zNSFmJNjMTuP56YMECMc0qRPMkWzoMIMKxOPGCLR1x7OWXxWt+woR+9ptb1NCh/qqs7GKJvVWrRG5w1FHAwQdHcSCVlow3hAhXolUSZWU8LenASl18KmLSEcfYtRJafr7Ysosl9qLuWlGotGS8YUSwEm1aGnDkkWKfXSw6sWoXX5SYdMSpnTuBlSvFPpOOni6+WLynf/cd8OmnekcTX6IeRBpIpSXjzYjjOgwkzLE48UCTv0BJSQkqKytRWVkJF9unDWnFClFi+thjRTM2dTVsGHDhhWJfmeFD2tu+3Z/kTZ6sayimx3EdBmClLj6VqDqQ1Ov1YsqUKVi5ciVsNhvcbjecTidktk8bDrtW+jZtGvDSS+Lf7Nl6RxMf3n1XbI87Lq7fl1XBlg4DULr4BgwQLW2FhcD+/XHV4tadqi0dJSUlmDFjBmwHamk7HA5UV1ereQpSwd69wOuvi30mHaFdcoloDV23TrxvkPZUG89BOPlk8Tn3/ff++mikgzju4gtG1aSjsrISeXl58Hg8qKmpAQDk5OSoeQpSwZtvisRj3DjgpJP0jsa4RowAzjxT7L/8sr6xxAvV1lshpKZyMCkZj2rdKx6PBwDgdrtht9tht9tRVFSE/Pz8kIlHa2srWltbfT+3tLQAANra2tDW1qZWaL5jBm6tJpLrW748EUACLrusA+3t5qlboMdzeOmlCVi1KhEvvtiJm27q0Px88fw63bIF+PLLZEiSjEmT2mHWP4Gez6FUX4+E0lJ0zp0L2emEw5GIb75JQG1tB6ZMUe//utVfp4D1r1Ht6wv3OJKs0oCLmpoa5Obmorq62pdkeL1eZGVlobm5OehjZs+ejTlz5vT4/ZIlSzB48GA1wqJuOjuBa6+9ADt3DsR9932AE0/kJP7ebNkyGDfemIuEhE489dQbGDbMmm9ARvDee4fgoYcmICvLi3/84129wzGlE+bNg33FCjRccgk+v+EGvPyyHU88cQImTtyM0tKP9Q6PLGzv3r2YOXMmdu7cidTU1JD3Uz3paG5u9o3pAABJkrokIoGCtXRkZmZi+/btvQbdH21tbaiurkZubi6Sk5NVPbYRhHt9H38s4cwzk5CaKmPz5naY6U+h13PocCTh888lPPFEO377W20HRcfz6/TmmxMwf34ibrutAw8+aJ4WuO5i/hw2NorlZCUJSZdcAmnbNsgjR6L91Vexbq2MvBvHoHPsofB42lU7pdVfp4D1r1Ht62tpacGIESP6TDpU616x2+1Bf2+z2XxdL92lpKQgJcigmuTkZM2eZC2PbQR9XZ8ygPTCCyUMHmzOv0Osn8Np04DPPwdWrEjCtdfG5pzx+DpVZq5MmZKI5OREHaJSV8yewyOO8O8fGLAobd+O5IkTcQqARgDSDzKampIxerS6p7b66xSw/jWqdX3hHkO1gaTKOI7uCYbX68UEZcI46e7VV8X20kv1jcNMpk0T2zfeAH75RddQLOuHH0QhtoQE4Oyz9Y7GZPoot118sCi3zcGkZASqzl4pKyvDUmU1PQAulws5OTlwOBxqnob66fvvgU8+EW/sytoi1DeHQ0y337PHX8WV1KXMWnE6RQlvikAf5bY3nyfKbTPpICNQNenIy8tDdnY2ysvLUV5ejjVr1rBOh4GsWCG2kyYBw4frG4tPXZ2YH2ngsomSBFx+udh/6SV9Y7GqftfnMMHrJ6aClNtmZVIyEtWXti8sLFT7kKSSV14R20su0TeOLp5+WnziPPOM/93RgKZNA/79b1Gv4/HHgUTzDzkwlH4nHSZ5/WhOKbedmQlcfz2wYAGwaRMwahQrk5KhqJ50kDHt2eNfSEv38RzKetuSBCjdcc89B1x9teiLHjECOOwwfWPs5pxzRLP/zz+LluxJk/SOyDo2bhT/kpL8xdh6ZcLXj+Z6Kbd9kk386scfga1bofpgUqJIcMm7OFFTA7S2AllZwDHHxOikoZq+x40T30qdTn995m3bxM8TJojb+zpGjCUni5VnAWD5cl1DsRylleOUU4ChQ8N4QCSvn3gSotz20KH+yqRr1+oUG9EBTDriROCsFeV9SXOBTd+B+hhtj0WL+j6GDpRxHcuX+8Ol6CktcGGXPo/k9UMAxDosAJMO0h+TjjjQ2elPOjQfz9HYKDqP3e6uTd9ut/h9Y2Ofo+1x5pnBj7F2LdLWrxfH0MGFF4rW6+++A77+WpcQLEeW+zGeo6/XT0GBKrFZiTKBkEkH6Y1jOuKA2y3WtRg6VIxN0FRg07bSpKI0fSsCmwkSEkRWpGx7OUbyxImYDAB33aVLU0Nqqvg2/sYbYhZLzLqpLGz9ejHWYMCAfo6TCfb6oR7Y0qGxujqguBgoL4/vAc1hYEtHHFBmrVxwgXhz11S4Td/KaHunU0wHcTrFz6NG9XqMzsREtC9cqPFFhKYUCuO4DnUorRynnQYMGhTBA3t7/VAPStKxfj1wYF1NUpOBuoGNji0dcUBJOmIya6WgQDQBBLZsKGpr/e28vYy27+0Yq8rLccbMmdpeQy8uuwy48UZxKT/9BBx8sG6hWEK/p8r29vqhHoYPF7NpN20SBQLPOkvviCyAs6j6hS0dFvfjj6JJVZJ0qEIapFBRFyFG20d0jBg76CBg4kSxryRz1D/9Gs8RKJzXD/korR0/LDfGjDDT4yyqfjHGOzlpRhlAetppMWx5VqPpO8gx5NGj0WqAGtnsYlHHV1+JuhEDB4rXJ2lLSTpGvMauAFVwFlW/sHvF4mI2ayWQGk3fQY7RvmcP9hlg8ZPLLwdKS8VUz5YWMcCUIqe0cpxxBhspNNfYiMnDtuNlSHB8x64AVYTblUxdsKXDwvbuFUXBAI3Gc/RWuEuNpu9wjxHjAmJHHy2KLe3fL2ayUP9E1bVCkRk3DpPvmgA3nEjvYFeA6gzWDWxk/AtZ2FtvAfv2AYceChx/vAYnMMqI7RjHwQXgotfRwaQjphYtgnygKyAB7ApQDWdRRYzdKxYWOGtFtSqkRhmxrXMc06YBDz4oVu5taxNl0il869ZJaGoSXVOnnqp3NHGgoAASuwLUx1lUEWPSYVGyrNF4jkiLf2lF5zgmThQLZ23dCrz7LpCTo9mpLKmmRjxn557rH4tHsdGBBCSCBdVUE5hgcBZVn9i9YlFr14o6EkOGAJMnq3hgo4zY1jmOxET/OBnOYoncypUi6cjN1TmQeDJqFH5JG4N6OFFmZ1cA6YNJh0UprRy5uWJKomqMsu6FAeJQps6+9BIXgItEa2siVq8WSQdbiGJo7Fg0vrsRE1GL+7YUoWN1regaGDtW78gojjDpsKiYVCE1yohtneKYMkW0JP3wg1jfhsLz5ZcZ2L9fQmamf8l1io0jjk/B4MES9u4FvlvPrgCKPSYdFrR5s3/26NSpGpzAKCO2dY5j4ECx8izALpZIrFsnnp/cXBUHOFNYEhOBE08U+1z8jfTApMOCVqwQ21NPFZ/BqlNGbNfWAkVFYqtHM60B4mB10sh98slIABzPoReuOEt6YtJhQTGpQmqUdS90juPii8W3x88/Fyt4Uu+2bgU2bhSl7KdM0TmYOMWkg/TEpMNi9u0DqqvFfkxWlY1z6en+2UEsFNa3t94SCeL48TJGjtQ5mDgVmHRwADTFGpMOi3nnHTFIbOxYYPx4vaMxCI3LpLOLJXwrV4q3nClTwqwPEeMS9/Hg+OPFrPIdO8RS90SxxKTDYlasEN8kL7mEg/R8NC6TrpREX70a+PlnTU5hCbLsr8+RkxPmV2yjlNq3kIEDgWOPFftBu1iY6JGGmHRYiCwDr70mntKYriprRI2NQH29mMsaWCbd7Ra/b2xU7VSZmaKKdGenfzwN9fT118CPP0pITu7AGWf0knTE8LmLV72O62CiRxpiAWIL2bgxFZs2SRg0SHxRiUpdHVBcDJSXi1UozSbGZdKnTROficuXA9ddp9phLUUZa3TMMU0YNMgW+o5GKbVvYSefDDz1VEDSYZQ1lcjy2NJhIXV1Yn5sTg4waFCUBzP7t50Yl0lXxnW8+Sawe7eqh7aMmhqxPemkPvqgjFJq38J6tHSMGye+XDidIsED/InehAldE8E4I9XXs7tJRUw6LGTNmtEAopi1YqVm7RiXST/+eMBuB1pbReJBXbW1Ae+8I/bHj9/W+50NUOLe6k46SWw3bRIDSpnohSYtWmTuL2AGw6TDIrZuBb77Lh2AqB3RL1b9thODMumSxFksvamtBXbtAoYPl5GVtTP8Bxql1L7FpKYC2dlif+1aMNHrrrERcLuR1tCAhGXLxO/M+gXMYPg/2SLeeEOCLEtwODpx8MH9PIjVvu3EuEy6knS8+qr4Zk9+yniOc8+Vw8sfjFJq38JCDiZlogeMG4fk007D5DvvFGNdAGt8ATMADiS1iFdfFW8QU6dGMcCuoAA45piuA/YUtbViioaZKGXSBwwQTRGFhcD+/ZpVLZ00SYy3274deO89FQbzWogyniMnJ8z6HDF+7uLRyScDLldA0qEkepmZwPXXAwsWiP6XeEz0Fi2CfM01kNrbIQX7ArZwoW6hmV0cp7LW0doK1NQo9TnCfFPvi1W+7cSwTHpion88DauT+u3c6W+5nzIlgqTYKKX2LapHS4cB1jIyjIICtH/wQfDb4rG7SUUm/0QhQAzQ27NHQkbGL743kn5js3ZUAsd1cFan8M47QEcHcMQRnHVpJMp7xTffAHv2HPglE70eZKt8ATMI/hUt4JVXxNbp3Bp9FVJ+24lKbi4weDDw/ffAunV6R2MM//2v2HJVWWMZMwYYPVokx599pnc0BjRyJPbZbJBPPplfwFTEpMPkZNmfdJxyyhZ1DspvO/02aBBwwQVi/8UX9Y3FCGQZeP11sX/RRfrGQj1xxdlejB2L6nnz0LF6Nb+AqYhJh8l99pn4Vj1woIzx47frHY55qbjexJVXiq3LFfWhTO/rr/3jQc89V+9oqDsmHb3rTE7mFzCVMekwOaWV47zzZKSkdOgbjJmpWIH10kvFh+xXXwFffqlCbCb22mtiO3kyMGSIrqFQEErSEWlXoFRfj0l//auo1kkUASYdJqckHarNWoknGlVgTUsDzj9f7FdVqRSrSbFrxdiUyqSffQa0t4f/OGnRIoz87DNIixdrEhdZF5MOE9u6Ffj4Y7EfVX2OeKVhBda8PLGN5y6WXbuAVavE/tSp+sZCwWVnA8OGAfv2ia6wXgUk6UqVzoSlS1mlkyLCpMPEVqwQA/WcTvS/Cmk807AC62WXAcnJwOefh/FmblFvvSUqs9rtYrosGU9CAjB+vNjvs4slMElXqnRu384qnRQRJh0mpnSt9HuBt3in4XoT6elitV8gfls7lPEcU6ci+qncpBmli6XPwaQBSbpSpVMy8zIJpAsmHSa1b59/NVMmHSrQoABQfr7YxuO4Dk6VNY+wZ7BYdVE4FWeuUd+YdJjU228De/cChxyC6KuQxjMNK7Befrn4Avjpp8C336oQq4l88YVYtmPgQDFzhYwrMOkIt4quUqVTtkKVThVnrlHfLPCKiU/+WSv9bLpmdi9oWIE1IwOYMkXsP/981IczlVdfFdvJk0WF1qD4GjSE444T44+8XlHzp1cHknT55JOx7qabRLVOM1bp1GjmGvWNSYcJybL/Tb3fXSvM7v00rMCqzGKJty6Wl18W28sv7+VOfA0awoABwLHHiv0+u1gOJOkdq1ej8YILRLVOM1bp1HDmGvWOSYcJffKJaLoeNCjC5dOZ3cfctGli9dm1a4GGBr2jiY2tW4GPPhL7l1zS7cbGRqStXy/+IHwNGkZElUmtsEyChjPXqHdJegdAkVO6VnJzReIRtsDsXXnTULJ7BZdGVdWIEaL8d02NmMVSUqJ3RNoLnMrd/Qtw8hFHYLLyA1+DhnHyycDChSouUlhXBxQXA+XlouXAaAoKgGOO6fq6U9TWAg6HmO9NqmNLhwn1u2uF2b0ulFks8TJ1Vulaueyynre1L1yIzsRE8QNfg4YR9rTZcJmp64xL18cU/8oms2WLvwrpxRdH+GCrTnmLhSgGPU6bJt7P6uqADRvUD81IfvnFP5U7WNIhz5yJVeXlwR/M16BulKRj0yZgx45+HsRs3bcazlyj0Ni9YjIrVojtKacABx0UxYESEoDOTv+Wehf4zS3C5uJRo8QsjrfeErNY7rpLmxCNYOVKkXgceqi/0mVIfA0aRmqqKIne0CC6WJRZVxExW/etMnNtwAARb2EhsH+/OceomAhbOkzmxRfFNti3yLAwuw+fit/c4mUWS2DXSqip3K1paZBHj+Zr0GCiXubejN23VhgUazJs6TCRXbuA6mqxf8UV/TwIs/vwqfjN7corgVtuEV1jjY3AYYepF6ZRdHb6Bzn3lhTvGzEC7evXI3nIEL4GDeSkk8S4o34nHeEMzqS4x5YOE3n9dfHefMQR/nn1/cLsPjwqfnMbPdpfmfPZZ9UL0Uhqa8WYo9RU4Jxz+rgzX4OGE3VLRyCjDM5kATrDYdJhIkrXyhVXcAGtmFB54O3MmWK7ZEmUcRmUMjvn0ktFQxqZi5J0fPONWGKhX4zWfWumWTRxgkmHSbS2+geR9rtrhfpPhW9ueXniw/izz8Q/K5Flf9KhjF8hc1Fyg87OKF6fkSwroFUrhNlm0cQZTZOO3NxcLQ8fV956S4zpOOgg4NRT9Y4mjqj4zc1m809zXrxY3TD1Vlcn1u0YMgS44AK9o6H+kCSVuljC7TrTqhWCJc4NTbOkw+VyoaamRqvDxx2la0Wp+UAxovKCcEqPzJIl1polqrRyXHJJhFVyyVCUpEO1yqTdxaIVwoyzaOKIJh9fXq8XTU1NWhw6LnV0AC+9JPbZtaIDFQc9XnwxkJYmijC9/75K8emMXSvWoepg0mAiaYUIp/sl2H1YBNHQNEk6li1bhunTp2tx6Lj04YfAzz+L5nllBgSZ08CBwK9+Jfat0sWybh3g8YgWjosu0jsaioZSmfTTT4H2dg1OEEkrRDjdL33dxyizaMhH9TodNTU1yMnJCeu+ra2taG1t9f3c0tICAGhra0ObyovtKMdT+7ix8PzzCQASMXVqJ4COoOsQmfn6wmWVa5wxQ8ITTyShqkrGQw+1d2k4MeM1Ll0qXp8XXtiJAQOCvz4VZry+SJn5Gg87DBg6NAm7d0v4/PM2HHdc8Pv1+xqnTweOOALJEyf2POYHHwAZGaI1QpKQ9NxzkADIzz6L9oICkZwMHy7uvGNH7/dJT0fS6NGQx46FfN11kJ54AtIPP6A9PT3shdzM/DyGQ+3rC/c4kiyrW5fW5XIhLy8PXq8X6enp6O3ws2fPxpw5c3r8fsmSJRg8eLCaYZmWLAM33piDrVuHoKTkY5x++ma9Q6IodXQAhYXnY8eOQfif/1mDM874Se+Q+k2WgVtumYKffhqKO++sw1ln/ah3SBSl0tIz8dVXw3HHHfU455wfVD9+WkMDJt95J2RJgiTLvu07Dz2EyXfe6bufDIiE4sA2mFD3eWn5ciS0taEzKUl0icoyEtrb0ZmcrPr1kLB3717MnDkTO3fuRGpqasj7qZp0VFZWorCwEADCSjqCtXRkZmZi+/btvQbdH21tbaiurkZubi6STfTC++QT4JRTkjFwoIzNm9sxZEjw+5n1+iJh9GuU6uuRUFqKzrlzIQeryhjgr39NQFmZaB14+eUO3++Nfo3dud3AaaeJ1+ePP7Zj2DD/bcH+Hma7vv4w+zXedlsCHnssEXfc0YGysuCjnaO6xh9+QNLpp/dshfjwQ0irViHxhhsgBenbkZOS0DF/PgD0eR9ZKYoTBbM/j31R+/paWlowYsSIPpMO1bpX3G43JkS4EFZKSgpSggzKS05O1uxJ1vLYWlCWsb/gAgk2W99xm+36+sOw17hkCfDOO0h49lngtNN6vev11wNlZcCbbybg558TcMghXW837DV289xzYnv55RIyMrrF28vfwyzXFw2zXqOSL3/ySSKSkxN7vW+/rjErC2hshKQsxXDTTcD+/UhOSRG3nXBC0FLqUm0tkpRS6uHcRyVmfR7Dpdb1hXsM1ZKOpqYmuN1u3zTZhoYGAEB5eTnsdjvyOKy9XwKrkIatrg4oLgbKyyNeEZUi1NgIbN8u3jwDpwBefbXoexgxIuhCK0ccAZx1FvDee2IsXGlpjONWQXu7P+n47W8P/LKvv0dami6xUvgCp83KskbVjwO/bIaaERbOKsRcqdh0VEs6cnJyugwgdbvdqKysRHFxsVqniDsejxhFnpgoSkuHLYpl2ClCUSwKd+21Iul48kngT38yX2n7t94Sa60MHx5QEKyPv0cyACxfHrsgKWLHHScmkjQ1iandhx4a4wCUgnyZmaJJcMECEUhgQb5w7kOGpMk8IpfLhblz5wIASkpKWCSsn5RWjnPOEYO6e9XYiLT168UEe5b+jZ0oChHl54sKnt99B6xerXGcGlAubcYMwNey2sffo33hwliGSP2QkuJfUFKzeh29Cacgn8pF+yh2NFnaPi8vj90pKoikayX5iCMwWfkhymXYKQJRLOc9dKhIPBYuBJ54AjjjDO3CVNuePf7Xp69rBejz7yGfcALw2msxiZH67+STRSvrunXA5ZfrEEA43S/h3IcMhxVTDGrLFv+332nT+r5/+8KF6Ew8MOiLpX/10Y9CRNddJ7ZLlwIHytSYwssvA7t3A3Z7L2NmWZjJtDSvTEpxi+8GBvX88yJnOPXU8FoM5Zkzsaq8PPiNLP2rrSgWhTvzTNEwsGePuVbfVnLYgoIgY1GMtrw5RUypTMqkg9SmSfcKRW/ZMrGdMaMfD+aI7thS+peVKYCFhcD+/WE190oScPPNwK23Av/5DzBrlvbhRuuHH4A33hD7XbpWFL39PSxa3dFqlKTj++9F8U+lEChRtNjSYUA//SRmNQCizz9crWlpkEeP5jdMPUSxKNzvficGlH75JfDee8afwvLEEyKXPecc4MgjQ9xJxUXyKPbS0kTXGSAKFBKphUmHASldK5MmiRlh4do3YgTa16/niG6TSUvztxg8/rix/0t2dAAHikLiQPFhsiiO6yAtGPsdLk4pM177tVAvv2EaVy9Ldd90k9guXy6hqcm4z9l//yvKIWRkAFdeqXc0pCWO6yAtMOkwmB9+AD74QOxz1rHF9LIM9/jxYspse7uE6uqeFUyNorJSbK+6Chg4UN9YSFuBlUmJ1MKkw2BcLrE980z0WI+DTKixURRnc7v7LNp2881i+8YbWdi3T4dY+/DTT/61gMww4JWioyQdX38N/PKLvrGQdTDpMBhl1kq/ulbIeMaNE6XonU5RrA3wF22bMKFL2fD8fCAzU0Zz80AsWWK8AaVPPCHGdJx5JnDs3tBdRWQNBx0EjBwpnvPPPtM7GrIKJh0GsmED8OGHYijGr36ldzSkigjKpCcnA7feKqY4/9//JRpqtvP+/WJKL3BgAGkvXUVkDZLEwaSkPiYdBrJkidiedx5w8MH6xkIqKSgQs4iCCVK07frrOzF4cBu+/VbCK6/EIL4wPfsskLy5EReMqMevj+y7q4isgeM6SG0sDmYQsgwsXiz2WTzUosIo2jZsGHDRRRvw/PNH4sEHdVr3ohtZBh56CGjEOGA7gNPA9X3iBFs6SG1s6TCIdeuAr74SM1w5FdFiIiwLfvHFHgwYIOODD/wzmfRUXS369K9PWQS5nyvqkjkp02Y//VSM7SCKFpMOg1Dery+9VBSLIguJcBnujIxWFBSID/P/9/9iF2Yof/+72A67sQBSBF1FZH5HHCGq5f7yC/Dtt3pHQ1bApMMAOjpEnzkQYi2L7nopMkUGFWbRNqm+HpP++lfcM/VjJCWJYlyrVsUwzm4+/VS0dCQkALffHnADV5CNCwkJooYMwC4WUgffMQzgnXeAzZuB9HTgoovCeABnDliWtGgRRn72GTLfXYwbbhC/+8tf9Bsqcd99YpuXd2B2L1eQjTusTEpq4kBSA1AGkObni4U5g2psBLZvF9+SA2cOXH21+ERin4x5BTy3CQcKtSQsXYo5T1+LT56Q8f37I/DGG4eFl5CqaM0asQ6QJAH33HPgl1GsqEvmxMGkpCYmHTrbsweoqhL7vXaJBxSRCjZzIBkAli9XP0DSXrDndvt2jLrIidUHfu34i4wLLohtb0ZpqdhedRVw3HEBNwQmGFzfx/ICp83Ksv8lStQf7F7RmcsF7N4NZGcDZ53Vyx37KDLVvnChlmGSlgKeW+nAc6ps5aQk3DBwEdau9Y/7iYXaf9fhzyvPw2lJdZg9O3bnJeM57jjx8tyxQ6wNRRQNJh06e/JJsb322j6+QfRRZEqeOVP12ChGenlupdpaZN0tmsDuvBPwerUPR5aBDXOexnl4Gw8c/0yXhhiKPwMHAsccI/bZxULRYtKho4YG4N13RbJx1VURPJAzByxLPvCcygHP7V13AUcdBWzdCjx+g4Yzlw4sTvfa/W6cu02MGzrrB1YcJVYmJfVwTIeOlB6R888HMjPDeIAycyAzE7j+emDBAmDTJs4csIIDz618yCH45NRTceLHH0P68Udg1CikpIiJIueeCwx5/mkAB2YuTZigbgwHmjQuBtAJ0eyWsIMVR0kkHU8/zZYOih6TDp10dABPPSX2r7suzAf1NnOgrU2rUCkWDjy3HZKExtdfx3EPP4wEWRbPbWMjJg/bjr9eLGH6CtECIT/7HCRl5tKIEcBhh0Ufw6JFaP/dNUiS25GAIBVHOW4obnHaLKmF7fM6WblSNFKkpwOXXRbBA8MsMkUmFOq5HTcOmDAB961wYiS2id8pM5cmTECPQRd9FY8LcfuLgwtwqsyKo9STknQ0NgJNTbqGQibHpEMnCxaIbUGBGKhFFFLA7BalBUJCL2ue9FU8LsjtW7YAN98ccB+OG6IANhtw+OFiv75e11DI5Ni9ooMtW4AXXhD7StVJopAKCsT0gcCxFQdsctUi83JH38Xj9u0T2W2Q2/f9IuPG34/Ali2H4ejDR0HeNQbSoRw3RF1NmACsXy8ayCZP1jsaMismHTqYPx9obwcmTfKva0AUloQEoLMTnUhAAjpx113A42cD6X0Uj+ui2+0DASwHkJEuo/K1sZAO3ciKo9TDhAkiT+WSTxQNtp3GWHs7UFEh9rs0ZxP1ptuaJ+3jndgqjcEH60fhvPOAlv/0XjwON90U8vY2JOHqxEV44QWxqijHDVEwymQpJh0UDSYdMfbqq6Kq34gRYhEtorAoM5dqa4GiIgxYW4vtdRvRNmos1q0DTv9XAbav6GUQ6H/+E7IA2UTUYvK8AjaZU69OPlnkoN9/D/z8s97RkFkx6Yixxx4T2+uv5xdIilC3FojjHCl4913g4IOBL7/0Ty6R+xgEKkvi9x0H/vv/z12iIi5Rb1JTRZE6AHC7uQAL9Q+Tjhj67jvgzTfF50ZRUYg79TXdkSjA0UcDq1aJbpEvto/CZozBt0Od+O7Ox9F5ctdl57/fNwo7B49BPZwowuP4JNGJ1vQx+M1tHCRK4fF3sTDpoP7hQNIY+ve/xfaii4CsrBB3CpzOqHbFSbKk7Gzgk0+AuXPH4oi5G7GnZQDwkIQhgwtxxin7sWt6CjZvBr7/fiySOjdiPwZgwgQJQxcVImUcB4lS+E45RczQrq+X4HDoHQ2ZEVs6YqS5WcxaAYDbb+9244E1L+B2d53OyDUvKEyDBgH33QfUf56CmTMljBgB7Nkr4c13U/Dhh2I4SGcnMPn8FFRXS/j4Y+DIozhIlCKjfA9i9wr1F1s6YuTxx4E9e4ATTwRycrrdGM50R655QWE46ihg8WKRYHz+uchZ09JEL8thhwGHHKJ3hGRmJ50khgpt3iyhqYlVDSlyTDpioLUVePRRsX/XXUGWsF+0CLjmGjGfNth0R655QRFKSBAJ7okn6h0JWcngwcBxxwGffQasX2/TOxwyIXavxMDixaIK6SGHADNmBLlDQUHI6Yxc84KIjETpYmHSQf3BpENjnZ3A3/8u9m+/XRR67BXXvCAiA2PSQdHgJ5vGXn0V+OorYNgwYNasXu7YreIknF2nOxIRGYGSdDQ02DjUjCLGMR0akmXg3nvF/k03iQF9ISkVJ7nmBREZ2IknAklJMnbuTMGmTW3IztY7IjITtnRoaPlyYN06YOhQ4H/+J4wHcM0LIjK4gQOB448X+/X1nDpLkWHSoZHOTuCee8T+bbeJtVaIiKzA6RT9Kkw6KFJMOjTicok6CampwJ136h0NEZF6nM5OAEw6KHJMOjTQ0QHMni32//hHID1d13CIiFQV2NLBwaQUCSYdGpg/X8xYSU8PUvKcC7oRkckddxyQlNQBr1eCx6N3NGQmTDpU1twM/OUvYn/27CAzVgIXdCMiMqEBA4CsrBYA/P5EkWHSobJ77wV27BDfBG666cAvuaAbEVnM4Yd7ATDpoMiwToeKPv8c+M9/xP4jjwDJyQdu4IJuRGQxTDqoP9jSoRJZBv7wBzGI9MorgSlTAm5ctEgs3KbcMXCblCRuJyIykezsZgCisbazU+dgyDSYdKikokIM1Rg40L/Wig8XdCMii8nM3I1Bg2Ts2gV8953e0ZBZMOlQwfr1/locDzwAZGX1cmcu6EZEFpCYKOOkk0SLLbtYKFz85ItSRwdw9dXA3r3AuecCt94a4o5c0I2ILEap18Gkg8LFgaRRevBBYPVqsYrs4jvqkJBTDJSX+5diVHBBNyKyGIdDJB0ff6xzIGQabOmIwhtv+GtyPPIIcFB1HzU4uKAbEVnIqaeKpMPtBtradA6GTIFJRz99+SUwYwYwtrMRcy6rxzUnsgYHEcWXI44QlZf37QM+/VTvaMgMVO9eKS8vBwA0NDQAACoqKtQ+Rb9I9fWY9Ne/Qho9GjjttKiOtX07cOmlQEsLsBPjgJch/rEGBxHFEUkCTj0V+O9/xUS8wLc9omBUbekoKSlBcXExiouLfclGbm6umqfoN2nRIoz87DNIixf3/yB1ddh/5nm47cw6eDxilsqux1iDg4ji18SJYvvRR/rGQeagWtLh9Xrhdrvh9Xp9vysqKkJNTQ08eq0IdKD8eFutG61PLwMAJCxdGrzrI4yF2Fr+9TQGfPA2Tv3mGYwaBbz6KjDsRtbgIKL4pSQdod4GiQKp2r1SV1cHj8cDh8MBALDb7QDQJREJ1NraitbWVt/PLS1iAaG2tja0qTAqKflA+fFkAIlQuj62d2kDbNu/HwCQsHAhEt9+Gx1PPYXO8eP9B2lsBHbswOefSxj7jBizUZDwHC79dwEyd8loWz8caG9HMgA5IQFSZ6dv29beHrPRVcrfS42/m1HxGs3P6tcHxN81nnwyACTj22+Bn39uQ3q6rqGpxurPo9rXF+5xJFnWbsCBy+VCfn4+mpubYbPZetw+e/ZszJkzp8fvlyxZgsGDB0d9/rHvvouTH30UCR0dPW7rTEzEF9dcgx3HHANIEk6/7z6k7NyJ1rQ0fHjPPYAsY39qKs4vLPQ/BhISIEMGlBQGAPDf+fNxzl134ZcRI9CYm4vDqqsxaPt2vPv3v2PfiBFRXwcRkZHdeOMUbNkyFPfeuxonn7xN73BIB3v37sXMmTOxc+dOpKamhryfpkmH0+lEUVERCgM+uAMFa+nIzMzE9u3bew06ImvXIllp/wtQNGENKupO8f0sSxIkWfZtFX/IeAYPNV2LZLT3OIaclISO+fMhz5wJtLb6a3DIcsxrcLS1taG6uhq5ublI9q00Zy28RvOz+vUB8XmNV12ViOeeS8A993Tg7rutsRCLEZ9HjwdYtUrCqlUJ2LxZFGc74wzxL9KPTLWvr6WlBSNGjOgz6dCsOFhJSQlmzJgRMuEAgJSUFKQE+WBOTk5W70k+MMhT6fLoRAIS0Ik1dQkowCI8hWuQhHZfoqFs25CEa7AQS5oK0DDmWKzY0nNYtlRbi6QDXUnoHu+AAerEHyFV/3YGxWs0P6tfHxBf1zhpkqgUUFeXiOTkRL3DUpURnsft24GbbgJcrq6/X7lSbFNTgXnzgOnTIz+2WtcX7jE0STpcLheys7N7TThi5kD5cfmQQ/DJqafixI8/Rsf3P+KSS0bhwWcd+GrfMXCjZ0IxEbUYfIYDC64DfnMUgDMh1kvp7PRviYioy2BSWfZXD6DovfoqcMMNwNatQGKiqPgweTJw6KHi7/3228CGDaJu1NtvA//4h1h41KhUTzpqamoAwJdweL1eNDU1+QaVxtyB8uMdkoTG11/HcQ8/jGRZxn0pKbi1DFj3BIA/wdcComyfdwFZvzpwjB8OrJuSmQlcfz2wYAGwaRPXTSEiAjB+vGjc3bEDaGgADj9c74is4aGHgLvuEvvHHiuKXSuN64BYTaO9HbjnHmDuXLGs19q1IvkYNEifmPuiap0Ot9sNt9sNh8MBj8cDj8eDyspKZGRkqHmayIUoPz5yJJBbIBKKhFPEQmwJp4iF2LImBiQUyroptbVAUZHYbtwofk9EFOdSUnBgFgunzqpl/nx/wvGHP4gqD4EJhyIpCfjb38SyHOnp4u9/yy3GrUmpWkuH1+vFlClT4PV6UVJS0uW24uJitU6jvnAXYgv8meumEBF1MXGi+MD76COWJ4qWyyW+3wJAcTFQVtb3Yy64AKiqAs4/H3jySdENY4QRDt2p1tJhs9nQ3NwMWZZ7/DM8LsRGRBSVSZPEdvVqfeMwu48+AmbOFMMGZ80CHngg/MdOmSJaPQDg1luNufovF3wjIqKoKUnHJ58Au3frG4tZ7dolWona2oArrgAeeyzyQbnFxeKx+/cDv/ud8Vb/ZdJBRERRy8wU/zo6gDVr9I7GnP7wB1GL47DDgCeeELNVIiVJontl5Ejg22/FcYyESQcREamCXSz9t2wZsHChqMjwzDNAkCLeYUtLA+6+W+zPmQPs2aNGhOpg0kFERKpQko4PPtA3DrPZutU/cPTPfwbOOiv6YxYViZXQN28GHnkk+uOphUkHERGpQkk6PvyQ9RMj8ac/AV6vmBJ7zz3qHDMlBbj/frFfViZqqBgBkw4iIlLF+PHA4MHiA/Trr/WOxhw++kh0qwDAf/7Tc0WNaPz618BJJwEtLUB5uXrHjQaTDiIiUkVyMnDqqWKf4zr61tEB/P73Yv/aa/3l5NWSkCDGdACikHbA+qq6YdJBRESq4WDS8C1YICqNpqaKMuZamDpV1MDcsQN48UVtzhEJJh1ERKQaDiYNT0sL8Je/iP05c4DRo7U5T1KSWDIMACortTlHJJh0EBGRak4/XWy//VYsyU7BPfyw+PsceaRYK0VL110nulrefls8L3pi0kFERKrJyACOOUbss4sluKYmsYIsANx3n7qDR4M59FDgwgvF/vz52p6rL0w6iIhIVWecIbbvvadvHEZVXi66V8aPB/LzY3NOZfG3hQtFiXS9MOkgIiJVnXOO2K5apW8cRrRlC/Doo2L//vtFt0csXHwxcNBBwLZtwEsvxeacwTDpICIiVZ19ttjW14tFzMjvb38DfvlFLD1/8cWxO2/ggNLa2tidtzsmHUREpKpDDwXGjRN1KD78UO9ojGPLFv8Mkvvvj3wF2Wjdcgvw1VfA3/8e2/MGYtJBRESqU1o73n1X3ziM5OGHRYGu008Hzjsv9ucfMwY4+ujYnzcQkw4iIlKdMq6DSYfg9Yoy5wBQWhr7Vg6jYNJBRESqU5KOjz8WYxji3b//Lca3HH98bMdyGA2TDiIiUp3dDhx8MNDWJhY1i2d794quFUCsKBurGStGFMeXTkREWpEkTp1VzJ8vqo9mZQEzZugdjb6YdBARkSY4mBRobwf+8Q+x/z//I6auxjMmHUREpAmlpePDD/Wtgqmn5cuBjRuB4cOBq6/WOxr9MekgIiJNHH00MHIksG+fGFAaj/7v/8T2ppuAwYP1jcUImHQQEZEmJMlfj2LlSn1j0cOHH4p/AwZov5KsWTDpICIizUyZIrY1NfrGoQdlLEdBgSjMRUw6iIhIQzk5YvvRR/G1DsvGjcDzz4v9O+7QNRRDYdJBRESaycoSNTva2+Nr6uy//gV0doqk64QT9I7GOJh0EBGRppTWjnjpYtmzB1iwQOzffruuoRgOkw4iItJUvCUdixaJtVays4GLLtI7GmNh0kFERJo691wxk+Xzz8Xy7lYmy8A//yn2f//7+C55Hgz/HEREpKkRI4CTTxb7Vp86+/bbwBdfAEOGANdeq3c0xsOkg4iINBcvXSyPPiq2V18NpKXpG4sRMekgIiLNBSYdsqxvLFrZsAF45RWx//vf6xuLUTHpICIizZ15JpCSAvzwA/DVV3pHo43//EdMk83NBY45Ru9ojIlJBxERaW7QIGDyZLH/2mu6hqKJPXvEEvYA8Ic/6BuLkTHpICKimLj4YrFdsULfOLSweLGYJmu3c5psb5L0DqC/ZFlGR0cH2tvbw7p/W1sbkpKSsG/fPnR0dGgcXexZ/foAXqMVWPn6kpKSkJiYqHcYhqZ8GL//PrBzp3UGWnafJsuXQWimSzpkWYbX68W2bdsietOSZRljxozBpk2bIEmShhHqw+rXB/AarcDq15eYmIiMjAy9wzCsww8HjjwS+PZbMaD0V7/SOyJ1vPOOqEHCabJ9M13SsWXLFni9XqSmpiI1NRVJSUlhvXl1dnZi9+7dGDp0KBIsWK3F6tcH8BqtwKrXJ8sy2tvb0dLSgp9//hnDhg3TOyTDuvhikXSsWGGdpENp5bjqKsBm0zUUwzNV0tHR0YGdO3di5MiRGDFiRESP7ezsxP79+zFw4EBLvdkprH59AK/RCqx+fcOGDcOAAQOwe/dudHR0IDk5We+QDGfqVLHk++uvi5keZn8ZbNwIvPSS2Oc02b6Z6ulua2uDLMsYMmSI3qEQEQU1ePBgJCYmhj3eLN6cdRYwdKgoh752rd7RRC9wNdljj9U7GuMzVdKhsGJfMBFZg/L+JFu1AlaUUlL8hcLMPnV21y5g3jyxf8cd+sZiFqZMOoiIyLymThVbs0+dXbgQaGkBjjoKuPBCvaMxByYdBlVSUgJJklBj4YUKnE4nioqKYn7e3NxclJSURH0cveIPpqamBtnZ2ZAkKeJry83NRXl5uUaRGYeRnq94p9TrqK0FfvxR31j6q6MDeOQRsX/bbeYfmxIrphpIGk8qKytht9tRVVWFHKUt0mJKS0thM/FQb6PE7/V6kZ+fj5UrV8LhcMDr9eodkiEZ5fki4OCDgdNPBz78EHjxRXMOwFyxAmhoANLTxawVCg9zMwNyu93IyMhASUkJli1b1q9jKN98jSBULHl5eaZIqIwef01NDTIyMuBwOAAg5AerkV4TWjL680WCMl32+ef1jaO/Hn5YbAsLRX0OCg+TDgOqqKhATk4OcnJy4PV6Ld3FQkTx6corxXbVKmDbNn1jidS6dcDbb4vKo7fconc05sKkw4CWLVuG/Px82O122O12VFRU9LhP93EJbrcb6enpAID8/Hzk5ubC4/FAkiRIktSlyb2kpATZ2dlIT0/v0cedn5+P8vJyFBUVIT09HdnZ2aipqekyZiA/P7/LY1wuF5xOJyRJQnZ2NlwuV5fjhYol2NiK8vJy33mcTmevCVdJSQnS09OD3re3awznb6nMQIg0/v78bcMR6rglJSXIz8/3xRfqWvt6TezYsaPXuAJvq6ysDCvm3h4zffp0pKenw+l0orKyssuMtN6eD0Dd15tWzxf1LSsLcDjEdNPly/WOJjLKWI68PCAzU99YzMYSSYcsixX+jPIvmplyNTU18Hq9vmbgvLy8Lm+q4aiqqkJVVRXsdjtkWYYsy74m9/z8fLjdblRXV2PDhg1oampCbm6u77Fer9f3QbZhwwY4HA7k5+ejoqIC9fX1qK+vh8vl6vIh0tTUhHnz5kGWZVRUVPjO0Vcs3RUVFWHp0qWoqqpCc3MzysrKQo5PqKmpgcvlwoYNGyDLMsrKynzlp/u6RrX+lt31528bzsDG3o5bVlbWJb5gCWo411FeXh4yLiWp2bBhA6qrq1FSUuJ7fnuLOdRjrrjiCqxduxYrV67EypUrQ8YcilqvN62eLwqfGbtYtm4FliwR+5wmGzlLDCTdu1cUm+ldAgCb9sEA2L27/318SteKYsaMGSgvL4fL5UJeXl5UcbndbrhcLjQ3N/veiKuqqpCeno6amhrfeR0Oh2+/qKgILpcLRUVFsNlscDgccDgcaGho8B23sLDQt5+TkwO73Y6amhrfGINweL1eVFZWoqGhAXa73Xes3u7f1NTkuw7lvuFeo9r6+7ftKxmK1fWEisvj8XQ5v81mQ1lZGZYuXRry+e3tMampqXjnnXewZs0a3+NLS0t7tJ71Ro3Xm1bPF0XmV78C/vIXYOVKoLlZDMo0usceA/bvB047DZg4Ue9ozMcSLR1W4nK5urwBOxwO2Gy2iL8NBlNXVwe73d7jm9+ECRNQXV3d5WeF0noQ+Du73d6jBaKyshL5+flwOp3weDwRx1ZTUwObzeZLOPqSk5ODjIwMSJKE3NxcX2tQuNeotmj+tmocN1qh4lJaELKyspCeno709PQ+Wzp6e4zb7UZaWlqXBCHc5zxQtK83rZ4visxRRwHHHQe0twOvvKJ3NH3bt08kHQBw++26hmJalmjpGDxYtC70prOzEy0tLUhNTdV8zYfBg/v3OOWDs6SkpEvfszKY1Ov1RjXlL9yplMHO0dt5nU6nb7ZNTk4OnE5n/wKMgM1mQ0NDAyorK1FdXY38/HyUlZVpft5QovnbqnHcaPUWl8PhQH19fUTHC/WY/s7GCqTG602r54sid+WVwBdfAFVVxp96+uyzwM8/i3EcVlmsLtYs0dIhSaI7wyj/+lulvaKiAnl5eWhubu7yT3nz7u0Nu6mpqc/j5+TkwOPx9HjDraurwymnnNKvmD0ej69fPJqmfqW+RKTfWgsLC1FVVYWKigosXbpUlWsM52/ZnRZ/Wy2PGy6HwwG32x1R8tPbY+x2O3bu3Nnlee7r7x14u1qvN73/ruT361+L7RtvGHsWiyyLheoAUVckyRJf2WNP9aSjvLwclZWVqKysjIsqh2pRWjOCDVRTxlEEdrHY7XZfM7bH40FpaWmXx9jtdt+bak1NDTwej69/esqUKb7blFky/R0vojQ5KwNLXS5Xj6b3YLF0Z7fbUVhY6BuA6PV64XK5QlbXdLlcKC8vh9frhdfrRXV1Nex2e7+usfvfsvs5w4lfi7+t2scN5zqCPSbweQH8f/v+PMbhcGD8+PGYMWOGL5Zgf+9Qz4darzetni+K3LHHAhMmiC4WZYCmEa1YAXz2mRg/OGuW3tGYl6pJh/JGVFhYiMLCQo72jsCyZctgt9tDfnsrKirq8u2xqKgIdXV1vql+s2bNwrhx43z3VxKVrKysLt0OyjdEp9OJrKwsZGRkRNx0Hshms6G4uNg3rVA5fmCzdKhYulMG0ebm5iI9PR0VFRWYMWNG0Pva7XZUV1f7xg14vV7MO7DyUqTX2P1vWVRU1GWcQbjxq/23Vfu44V5HdxUVFXA4HHA6nb7npa9Wht4e89JLL/mmy+bn5/d4jnt7PtR8vWn1fFHkrr5abBcu1DWMkGQZ+NvfxP7NN5tjwKtRSbKKSyGmp6djw4YNXd4AJEkKe7XFlpYWpKWlYefOnUhNTe1x+759+7BhwwZkZWVh4MCBEcUWyzEderD69QG8RisIdn1utxtOp9Myq7Lu3bsXX331FY488kgMGzZM73A00dbWhtdeew1Tp05FcnJy1MfbsQM46CCgrQ345BPgxBNVCDJKgde4enUyJk8WK+Ru3AiMGaN3dNFT+zns6/Nbodq7mtJEGWzgFQvqEBFRKMOHA5dcIvafekrfWIKZO1dsr73WGgmHnlQbChOqf9hms4UchNba2orW1lbfzy0tLQBEBtbW1tbj/m1tbZBlGZ2dnejs7IwoPuVblPJ4q7H69QG8RisIdn3dt2anXGN7e3vQ9zErUK5LzesrKJDw4otJWLxYxv33t+s+UFO5to8/bsd//5uMxEQZt9/eDqs8pWo/h+EeR/OnNSMjI+To9Llz52LOnDk9fv/mm29icJB5p0lJSRgzZgx2796N/fv39yueXbt29etxZmH16wN4jVYQeH179uwB4P/SYXbKe9Pq1avR3t6uczTaUrf2jYS0tAuwdWsK/va3ekyYsFXFY/ffXXc1AxiEM8/8AV9/7cbXX+sdkbrUeg737t0b1v00Tzp6mw5XWlqKP/7xj76fW1pakJmZifPPPz/kmI5NmzZh6NChEY/pkGUZu3btwrBhw7qs42AVVr8+gNdoBcGu76yzzkJHR4fOkannl19+AQBMmjQJQ/sulWxKbW1tqK6uRm5urirjARRXXZWAf/4TcLtPxT336PuaaGtrw2OPfYyPPjoYkiTjkUfG4Nhjp+oak5rUfg7D/dKgWtIRqqqg1+sNeVtKSgpSUlJ6/D45OTnoH6GjowOSJCEhISHiQXZK063yeKux+vUBvEYrsPr1AfAlU0lJSap+IBtRqPfq/rrlFuCf/wRefz0BGzcm4IgjVDt0vyxZcgwA4Le/lTB+vDWfS7Wew3CPodr/eqWkcLCxHVqtd0FERNZx1FHAxReLKaqPPqpvLKtXS3C7RyMpSca99+obi5Wo+lWjtLS0y0wVl8vVZXEmIiKi3ihrmjz5JBCjVQB6kGXgnnvEx+M113QiO1ufOKxI1aSjuLjYV0nS5XJhzZo1qixURkRE8WHKFOCEE4A9e4D58/WJYeVKYNWqBCQldaC01BqzqoxC9YGkxcXFvn2WEyYiokhIkmjtuP56Mb7j9ttju85JezugzG+48MKNyMw8NHYnjwPWHMlFRESmNXMmMHIk8P33gAoLE0ekslKssZKRIWPGjG9ie/I4wKSDiIgMZeBA4LbbxP499wD9LMsUsaYm4K9/FfuzZ3di2DCLVAIzECYdRERkOLfdBoweDTQ0AAfWctTcPfeIxOOEE4AbbuBYDi0w6TCI8vJySJKE9PR0pKenQ5IkZGdno6SkJGQZ+WBqamqQnZ0NSZJCLguvB6fTGdGKw7m5uRHH73a7LVlQiygeDR0qkgAAuO8+YPdubc+3bh3w2GNi/5FHYjuOJJ4w6QhUVwecd57Y6sBms6G5uRnNzc2QZRnV1dXweDxwOp1hJR47d+7EjBkzUFVVBVmWUVpaqn3QYSotLUV+fr7eYRCRicyaBRx+OPDzz8D//Z9252ltBa66CujsBPLzgXPP1e5c8Y5JR6Cnnwbefht45hm9IwEgCq5VVVWhqakJy8IYTfXOO+8gIyMDDocDAIKu+Ks1paWlu7y8PMMViQsVKxEZQ3IycP/9Yv/BB4HNm7U5z+zZYvDoyJHAv/6lzTlIYNLR2AjU1wNuN7B0qfjdc8+Jn+vrxe1ERKSL/HzglFNE98qsWaJwl5pWrwbKy8V+RQUwapS6x6eumHSMGwdMmAA4ncC2beJ327aJnydMELfrxOPxID8/HxkZGV0quxYVFSE9PR3Z2dmorKwEAPzpT3/CNddcA4/HA0mSuoyfCHZ/AMjPz0dlZSUqKyuRnZ3dpZpsb48pLy/vcrvyuPz8fOTm5vpikCTJ1y3UfYyGy+WC0+n0jV1xuVwR/328Xi9yc3MhSRKcTmeX+Ps6R2+xqhEbEakjIQF44glgwABgxQpgwQL1jt3SAlx9tehW+d3vgCuuUO/YFByTjkWL/COGlBRa2SYlidtjxOv1+j4AlQ88u92O+vp6333y8/Ph8XiwYcMGVFdXo6SkBG63Gw888AAWLlwIu90OWZZ9lWBD3V85X0VFBcrKylBWVubr/ujrMSUlJcjPz8eGDRvgcDh8CU5VVRWqqqp8MciyHLKLp6mpCfPmzfPFmp+f7ztHuPLz89HU1ISGhgasXLkSa9asCfscvcWqRmxEpJ7jjwf+93/F/h13AEGW+IpYezswfTqwfj0wdqz+a73EC47PLSgAjjlGtGx0V1sLHBgfEQvKQFJAzMRwOp0oLS31fRh6PB64XC40NzfDZrPBZrOhrKwMS5cuxUknndTjeL3dXxn3oSQX4ZxDeYzD4fAlKEVFRcjNzY34WgNbbnJycmC321FTU+M7R188Hg9qamrQ0NDgW8W4tLS0S6tEf88RbWxEpL477gBefhl47z0x6HPlSiDIIuVhkWXg978H/vtfYPBgYPlyQIchcHGJSUeghATRzqZsdaR8sJeUlPhaLZRv21lZWV3uO2HChKDHCOf+OTk5XVojwnlM4H5GRkZY1xNMZWWlb4ZOsNWJe+N2u2Gz2XwJh9rniCY2IlJfYiLw1FPA+PHABx8AM2YAVVVisGmkHnpIjN+QJGDJkuDfOUkbTDoAMXJozBggM1MU/F+wANi0SfcRRWVlZXA6nSgpKfF9uDocji7dLYrOEElSqPsrgn1o9/UYNWbFOJ1OZGRkoKSkBDk5OXBq8L++v+eIRWxEFLmsLODFF4GLLwZeekmMw1i8WCQk4ejsBP78Z6CsTPz80EPA5ZdrFy/1xKQDEB16GzeKkUqSBBQWirq7/W27U0lga0dVVRUcDgfcbje8Xm9YH/yR3r+/j4mUx+OB2+2GHMUwdLvdDq/XC4/HEzRx6u851IiNiLQzZQrwwgvAtGliwmFnpyjqNXx474/bs0ckKS++KH6+916xmBzFFgeSKlJSRMIBiK3OCYeirKwMLpcLbrcbdrsdhYWFvoGegJhpUa7M9+om0vv39zHdH+/xeOD1elFTUxO0a0LpklFmxSjXFwmHwwGHw4H8/Hxf8jFr1qyIzhEsVjViIyJtTZ0qKhskJooulmOOAZ59Nvh02rY2MfvlhBNEwjFggJgfMHu2/y2fYodJh8EFtnYAQEVFBRwOB5xOJ9LT01FRUdFr0a1I79/fxwTG63A4kJWVhTKlDbMbm82G4uJi37Tb6urqHmNLwrFy5UpkZGQgPT0dRUVFKCoq8rV6hHOOYLGqFRsRaevKK8Wg0mOPFVUOZs4U1Uuvv16sFFteDtx4I3DUUeJ3GzaIXvS33hLzB0gfkmygduSWlhakpaVh586dSE1N7XH7vn37sGHDBmRlZWHgwIERHbuzsxMtLS1ITU1FQoL1ci2rXx/Aa7QCq18fAOzduxdfffUVjjzySAwbNkzvcDTR1taG1157DVOnTkVyf0Zyqmj/fjFG43//V5QzD2bUKKCkRCQhgweHd1wjXaMW1L6+vj6/FRzTQUREpjVggFiO/rbbgPffB959VyzeNnIkYLeLlo4rrgg/2SBtMekgIiLTS00VYz2mTtU7EuqNNds3iYiIyHCYdBAREVFMMOkgIiKimDBl0mGgCTdERF0o708Si0AQ9WCqpCM5ORmSJGHPnj16h0JEFNTevXvR0dGBpCSO0yfqzlT/KxITE5GWloZt27ahtbUVqampSEpKCusbRWdnJ/bv3499+/ZZsj6A1a8P4DVagVWvT5ZltLe3o6WlBTt37sTu3buRGO6CIERxxFRJBwCMGTMGgwYNws8//4yWlpawHyfLMn755RcMGjTIks2eVr8+gNdoBVa/vsTERIwaNQrfffed3qEQGZLpkg5JkmCz2ZCWloaOjg60t7eH9bi2tjasWrUKZ599tmWry1n5+gBeoxVY+fqSkpKQmJgY9nsSUTwyXdKhkCQJSUlJYfebKm8GAwcOtNybHWD96wN4jVZg9esjot5Zp1OViIiIDI1JBxEREcUEkw4iIiKKCSYdREREFBNMOoiIiCgmDDV7RSkfHEn9jXC1tbVh7969aGlpseSoeatfH8BrtAKrXx/Aa7QKq1+j2tenfG73tUyJoZKOXbt2AQAyMzN1joSIiIgitWvXLqSlpYW8XZINtHpaZ2cnfvrpJwwbNkz1aoUtLS3IzMzEpk2bkJqaquqxjcDq1wfwGq3A6tcH8BqtwurXqPb1ybKMXbt24eCDD+51iQNDtXQkJCRg7Nixmp4jNTXVki8ghdWvD+A1WoHVrw/gNVqF1a9RzevrrYVDwYGkREREFBNMOoiIiCgm4ibpSElJwb333ouUlBS9Q9GE1a8P4DVagdWvD+A1WoXVr1Gv6zPUQFIiIiKyrrhp6SAiIiJ9MekgIiKimGDSQURERDFhqDodWqisrITX64XNZkNDQwNKS0ths9n0DktV5eXlvv0dO3agrKxMx2ii5/V6sWzZMlRVVaG6urrH7eXl5b7n0Ov1ori4OMYRRq+va+zrdqML5zkEgIaGBgBARUVFTONTQ2/XqNwGiGv0eDyYN2+e6d57Inkd5ubmmu612tv11dTUoKKiArm5ubDb7aiursYpp5yCvLw8naLtn3Cew5KSEmRnZwMAMjIyNL1GSycd5eXlKCws7PIBNWvWLFRVVekbmIry8/ORm5uLwsJCACLJKikpMW3i4Xa7UVdXB6/Xi6amph63Kx9WyvXW1NSgqKjIVB9afV1jX7cbXV/xd399FhUVme4DK5xrLCkpgd1uByCuMT8/31LXGMjlcqGmpiZGkamjr+vzer2oqamBy+WC3W5HSUmJ6RKOcK5xypQpWLlyJWw2G9xuN5xOZ5/rp0RFtrCcnJywfmdWDQ0NMgC5ubnZ97vm5uYevzOjqqoq2eFw9Pi9zWbrcW1mfRmHusZwbze6YPE3NzfLOTk5XZ7D+vp6GYDc0NAQ4wijF+o5ysnJkcvKynw/l5WVyTabLZahqaav12Fzc7NcUVFhuf+HVVVVpn8fVYS6xsLCwi6vU1mW5erqak1jsfSYDpvNhtzcXHi9XgCAx+PxffOwAo/HAwBdmmyV/bq6Oh0i0pbH4/F1lXVntm9Z8ayurs732gXg+z+p/D+1gurq6i7dfmvWrEFOTo6OEWln2bJlmD59ut5hUD9UVlYiLy8PHo/H9x6q9evU0t0r8+bNg9PpRHp6OoqLi5GdnW2qZvi+BL5Zd/8gDnxTt4pQ12Sz2Sz1gWVlNpsNzc3NXX6nvNlZ6QtBIJfLBa/Xa6luXUVNTY1lkylAJFQZGRloampCQ0ODabutg1HeT91uN+x2O+x2u68bUMvn1NJJh81mQ0lJCaqrq1FeXo6cnBxMnz7ddIO5QrHb7cjJyUFNTY2vrzEev/ErbwpkTnPnzkVFRYVl/l8qlAF8Xq8X+fn5lrs+QFyj3W63ZNLvcDgA+JPhyspK5OfnWyZ5DGwpV661rKwMWVlZPb4YqMnS3SvKQK6qqio0NDSgqakJTqdT77BUVV1djTVr1qCyshIulwsZGRkArPutMRgmHOZVUlKCGTNm+AYGW4nNZkNhYaGvmyU9Pd1SH85K07xVKd/+FdOnT/e1WlnJhAkTfPtKq7GWX14tm3Qo/f9KM5Hdbkd9fT1sNhtcLpfO0amrrKwMhYWFyMvL8/0nCXwhWUWoREr5tkXm4nK5kJ2dbcopz73xer0oKSnp8uGUk5Oj+Zt5LLndbku+xwTq/jmhtFRZpes61HumzWbT9BotnXQEa84sKiqKfTAacrvdXX5Wulqs2JRrt9tD/oewcr+yFSkfvkoLh9frtcybucfjQXl5eZcWOCUBscr/y6amJtTU1KC8vBzl5eUoKSkBIKa0W+FLndIlFviaVJ5Dq3zBUVpyuv+/83q9miaUlk06cnJy4Ha7ezSF1dfXW6pJMD8/v8u3p4qKCksMdgrVZVJaWtrlel0ul2mb5vvqFjJ7t1Go+N1uN9xuNxwOBzweDzweDyorK31dg2YS7BodDgeKi4u7fDgtXboUDofDlMlxsGvMyclBcXGx75/yZa64uNh076/Brs9ms/V4DpXuJDMmjqH+L5aVlWHp0qW+n10uF3JycnxjPLRg6VVmvV4v5s6di+HDh/v6qgKLhVlBTU0N3G63r+JqUVGRqTNxj8cDl8uFpUuXwu12o7i4uEcVwPLyct81rlmzxnRJVl/XGM7fwMh6i9/r9SIrKytov7iZ3or6eo68Xi8qKyt991dmPpjpvSfc16FyH5fLheLiYuTm5poiuYr0OTRjtedwnkOlajcQm2u0dNJBRERExmHZ7hUiIiIyFiYdREREFBNMOoiIiCgmmHQQERFRTDDpICIiophg0kFEREQxwaSDiIiIYoJJBxEREcUEkw4iIiKKCSYdREREFBNMOoiIiCgmmHQQERFRTPx/3KXoUH54KN0AAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGeCAYAAAC6gypNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABO9UlEQVR4nO3de3xT9f0/8FfalHItacHSKZ2SemNOpimg001RUpywuS+zERXndI523vDe2k2nePnVZs6hm84W/cocotB4nzhJUNRNV9rGqfuCU5vC8A62p+Xa6/n98eGEtM3lpD3JOTl5PR+PPpLmcvL+5Hbe+Xzen8+xyLIsg4iIiMiAMvQOgIiIiCgSJipERERkWExUiIiIyLCYqBAREZFhMVEhIiIiw2KiQkRERIbFRIWIiIgMi4kKERERGZZV7wBGqr+/H5999hkmTJgAi8WidzhERESkgizL2LVrFw499FBkZETuN0n5ROWzzz5DYWGh3mEQERHRMGzfvh1Tp06NeH3CExVJkrB27VrU19fD6/UOud7tdsNmswVvW1FREdf2J0yYAEA0NCcnZ8TxKnp6erB+/XrMmzcPWVlZmm3XSNhGczB7G83ePoBtNAOztw/Qvo2dnZ0oLCwM7scjSWii4vf70dTUBEmS0NbWNuR6t9sNACgrKwMA+Hw+lJeXo7a2VvVjKMM9OTk5micqY8eORU5OjqnfdGxj6jN7G83ePoBtNAOztw9IXBtjlW0kNFFxOBxwOBzweDxhr6+urkZra2vwf6fTiZKSkrgSFSIiIjIv3Wb9BAIBSJIUHPYJ5fP5kh8QERERGY5uxbSBQCDs5TabDZIkRbxfV1cXurq6gv93dnYCEF1SPT09msWnbEvLbRoN22gOZm+j2dsHsI1mYPb2Adq3Ue12DDfrJy8vL2w9i6K6uhrLli0bcvn69esxduxYzeMJVwBsNmyjOZi9jWZvH8A2moHZ2wdo18a9e/equp3hEpVoSQoAVFVV4frrrw/+r1QNz5s3T/NiWq/Xi5KSElMXRrGNqc/sbTR7+wC20QzM3j5A+zYqIyKx6Jao2O32sJdLkhTxOgDIzs5Gdnb2kMuzsrIS8uZI1HaNhG00B7O30eztA9hGMzB7+wDt2qh2G7oV09rtdthstrC1Kk6nU4eIiIiIyGiSkqhEGs6pqqoaMMPH4/EE11QhIiIiSmiiEggE4Ha7UVtbC7/fj8rKygFrqlRUVECSJHg8Hng8HjQ2NnINFSIiIgpKaI2K3W5HRUVF1GXxQ68rLS1NZDhERESUYnSrUSEiIiKDa2oCzjxTnOqEiQoRERGF9/jjwGuvAX/5i24hMFEhIiJKV+F6TLZtA5qbAb8fWLNGXPbUU8A772Dixx+L65PIcAu+ERERUZKE9pjMnCkuO+KIg9crRzbesQNZJ52EOQBw442ALCctRPaoEBERpZNIPSZ+v7h8+XLAeqAfQ0lIDpz2Z2aid+XKpIbLHhUiIqJ0EqHHBMXFBy9vbh74/wFvuN049cILExvfIOxRISIiSierVkXsMYHVKq5XZGQMPNUBExUiIiIzijS1ePFioKEh/H0aGsT1+flAQYHoVXn4YaC4GPKUKeiaODHxcQ/CoR8iIiIzClcoO1hGBtDff/BUMXUqsHUrMGqUGB4qK0Pvnj3Yv2FDUkIPxUSFiIjILLZtA3buFMlFaKHsz34mhncmTwYOP/xgj0lhIXDZZcCjjwLbt4vLFdnZB89bLAP/TyImKkREemhqAioqALcb+M539I6GzEJNoawsh+0xQXe3bslINKxRISJKhFhLj6tZ8dMAy5dTiomnUDY7+2Ayo2OPSSxMVIiIEiFcIhLvip8GWL6cDCxcIqumUDbFcOiHiCheocM2oUWKseoDQm8bacXPrVvV1RgQxSqWjVQom2KYqBARxSvSDkJNfYDVCvT2hl3xs//RR2FVW2MARE6YyLzUFMuqKZRNIUxUiIjUULODWLUKuOSSsIkIrFZg5Upg+vToK35arbG3oVAz/ZTMRW0imyKFsmowUSEiUkPtDiJCIoKGBsDhEPUpQORu+cWLo29j0iRR58KhofSkJhkGDDO1WAsspiUiGixckWI8symAyEuPx7PiZ7htHHGE6D0pLhaJEnAwYZo5c2BCFaktlLpMWCwbCxMVIqLBws22UbuDCJOIoKDgYH2Asn5FQwNQXg40NKD344+xf/Lkg9uLto14EybOHEpNahJMAxyHJxk49ENEBKirQVFEm02hZiGtWN3y0bYRa2jI4VC/OikZV7T6I5MVy8bCRIWICFBXg7J9u7odhBb1AWq2ESlh4syh1KQ2wUyhVWW1wESFiAhQV6RolB1ErF/UagsuAc4cMpJ4EkwTFcvGYu6BLTInFgfSSER6/6itQTHCsuNh6lywdau4HIjdlu99L/wKuX6/uHzwCrmUHPHWH6UJJipkLGqSkFjFgUxkKBo1xaWpUKSoNmHSYuYQaS9Nlr/XgoE/hZSWIu1EIh0jJdwvQM5yoMHUvn9izdhJJVrOHGLyr71Y31OpkCwnCWtUKLnCFe6pKSBTM3bb1DR0G4sXHzzY25FHRo+DzEvt2L9RalC0MNKZQ6FYx6KNNFz+XgtMVCi5Bn3h7dkDjAvZiciwwAJA/moHLCFforcduQq3tlwCqxyhOLC39+AXaKSDvSm3DxMHmVw8xaVmKlIcycwhTnHWXhouf68F9ilR4oV0u8sHvvB2P/IUFn/LjzNymnENlqPnQM5sgTzgtAdWLMYq3PHxYsyWw4/d/umSBnx8+yrIEbqy+zMz0btyZXzDR+zqTl0c+1cn1jBXPHUs/Lyoo3bIzQgF2wbCRIW0E+nLKuQLT/5KfOGN3bsDT2wpxqb+mbgf1+LmOeF3Ilsea8Cl3sV4+mngxhvEZf0H3rZ9B05XPAIcdftilEwIv4033G7IF14Y3xcv61xSF8f+1Yk1cyieOhZ+XgYa6cwyGiDNP6mkqUFfVv39wPPPA3cesyrYY5JxoKdEOZUPfOH97ncHtjFoJzJjBuB0Aj/5CXDhteIXYMasYsh/ehg9xxdjb04BZs3PR04O0NYu7qokMLJl0Ns71hfv8uWcspmq1PSWmalQVivRfrlzivPwmWVmmUGwRoVGJsw4tvzkU3jB9jM8/mcZTdsm479YjOct09EkDy3csyiFe598EruALKQ40GKxYHS5GLutzc7GH3uAN5/Mh1RegI+7CrFCvgyXyY/im5btWLluNgp/DBTFKiAMvTzWYksAC3KNhGP/iRWujiWexcnSwbZtonD/nXei1/SwWDZuTFRoZEILYS2iEBY7duDHdxTjxwcur7pZxrWnAZiPyIV7amdbRCgOzMoCzrx4KrBoK47ePwozVlnwsz+UIfCfbnRvyMYTx8q46CJg2Y+BbyJCHPEUXAIsyDUSta+dmQplkyHaTjXOz4uluRmn3HorLFOmACefnNRmJEPWUUeJwn0gfWaWJQn7nEi9cOOuIcMpFnlgIWxfhhV7V6xCdTUw5XgV3e5aFJBlZyNnogVXXgls3mLBsy9l4vjjd6C314KVK4HTSvPRMaYA3ceHiUPN+HE8BbmkPY79J1e0OpY4n3PLqlU45P33YXniiYSHnXBh3oe9K1eiPzNT/BOrpofFsnFhokLqhRl3/e/3F+PmM8J/WWU2NmDsLw58WcUq3EsAiwUoKZFx551v4e9/78X8+cC2vqnI37cVuR824NZPytHpjRBHpPFjzoTQF8f+k0/NTjXScx6S2GesXStusmZN6s+0C/M+lC+8EG+43eFvz2R5RPhJpugi9CD0Nfrx5I3NOOvYbVjvFRcrs3Ei7iB0/BUxe7aMl14CNm4ETjwpG3v3WXDXXUDRkRbc/3A2ursP3DBWwSVnQiRfpLF/M68qmyrimeK8c6e4bOdOY8+0i5QwxdObymRZW3KK6+jokAHIHR0dmm63u7tbfu655+Tu7m5Nt2skqtoodsPiz2KRZUDuF4M8wb+Fs7bL3ZMKZHnWLFl++GFxWlAgy9u3J68xEYRrY3+/LD/zjCwfc8zBZhx7rCx7vQdusH+/uJFy4/37B260uXng86L8NTfL8tatstzUJM7n54vL8/PF/01N4voktNFUwrwHg6fKnyLWa2dQKf0aRnvOV62SZas1/OfFapXl5cvj+7w0NsryGWeI00S5+moRx9KlAy9X8T782yOPyP1Tphjyu1ALWr9P1e6/me5RdGF6EEIXY9tw6Sp4/jkVWZ9uTeqwzkhYLMDChcC//w3U1Ykffx98AJSUAC4X8N8vk3iwt1Tq7k4Gjv2nnpFMcb722vg+L1ockDTcbdT0lsToTe1duRL7J09G78cfp8x3YapgokLCMIoU29Y1YO7/Lhb76RTcQVitwJIlwH/+AyxdKvINj0fMYP5//w/o6opwRy0P9maU7m6j4Ni/ackHEno5NLFX83nR+oCk4W6j5gdGjKRLvvBCcT4FvwuNjtOTSYgw1Xb3buBPvwVuglhILRP9kC0ZsMj9mDJFv3C1ZLMB998vZl9eeSXw978Dv/61mF1ZWwucccagO4z0YG88hspAsZ6PiRMP3jbS9HYyrgOJvXzYYXh39mzM2LQJlk8/FZc7HLE/L8pOH4jvgKShnycg+nts+XJxPDC1SxPwfZhUTFTSWYwFil7/v8n42W8OR++2fFyEAuybVIj8qsswfo05FyiaMQN44w1g9WrxnfXRR6KT6dJLgd/+Fpg0KeTGIznYWzwLZaXDonIxno8sAF2PPAJ5yhRYvvlNLpKVag4k9n0WC7a9/DKOW74cGbI89DMT6fMSa72WCAckDZv8RLtNc3Pso0lzsTZdcOgnjWUddRTm3Hgjsk46KWx35+k/OwLbtgFZR0zFv1/cCvuOBoy/wdzjrhaL6BT54APg8svFZY89Jn70rV6tcqFNzhwKL9LwIsf+zS/acEisz0usOhc1n6d4PnPRZuzosMwCMVFJDxF2ENGKFHtgxU8tq3DddaLotOSH6TXuOnEi8NBDYhjoW98S+dvixcDZZwOtrTHuHOvLTONjqARX/GxuHkmTEy9S0sWx//QWz84/XBKhZuE5NbdRO72d78OkY6KSDiLsIKIVKS4uasDV/1yM++4Dxo1LRpDGdOqpYmTsrrvE99ErrwDHHQfce6/ocY5I7ZeZBjOHDLPi53BnU4Ti+hPpKdbnRW0Soeb9E+k27C0xLH4bmJXKHUR/v/hyUI44rJw+8QQwe3bywzaiUaNEce177wFz5gD79gE33SSen2F3Yox05pDWK37Guo2abQx3NkWs54MoVhKh5v2j5jbsLTEkFtOalYqCzbc29uDX95+FZ1CA7SjE60WX4aoxj2LMzu3IOow7iMGOPhp49VUxAeCGG0RPy+zZYimIO+6Is+dppDOHws2EUFb8VCjJjZqDJ8a6TaTrY83YUTubItrz0dMT+Xmk9BGtgF3Ngf54MMCUxUTFDMLNDIlSKS9brVhx6kqUz7EC+BaK81pxlzsbN1xqQYaFH95oLBYxC2jBApGgPPkkcN99wNNPix9pP/hBHBsbycyhkNc3eDDI0ATg3ntFz8pIpmzu3w+MHh19G2pmMKmZTaH2+SCKRM37h++xlMRExQzC/dqN8qv8zLEN2Pi62EE4ndvw+OOH4hvfUH6h88OrRn6+mAX005+K2UHbtolC2wsvBH7/ew1GLGJNg4zV6xJ6+UimbMa6PtbU0dD1J7j2BBENA2tUUlUcRYrKSpDKQQM7OoHjjwc2buzFVVf9K/jjmuJ39tliVtT114v97+rVIn/4859VTmWOJI7CvmGv+BnrNpdfHnsbWs6mICIKg4mK0UUqYlRTpJifj67cAmweXYxyPIwmFONLSwGuviMffj9wyikj2ZOSYvx44He/E/vlE04A2tpEJ0NJCfDxxyPYsMqZEPKJJ+Jfl18O+cQTDyYAWkzZfOih2NsIxdkURJQATFSMLtLaE1F+DctWKz64ZRVKLp2KnPat+PbeBqweX46/3d6AcTu24tJbpwbvStqZORPYtAmoqRGlHRs2iJ6re+5JUD2osuLnW29h21lnoe+tt8InACOZsqnmes6mIKIEYqISQdIW0Rru2hNRfg3//FsNmH7XYvh8gJyVjaVLLWhpAX5zmwXjJ3EHkUhZWaKu+d//BpxOUY9aVQXMmgU0NibgAUey4qea26jZBntMiCiB+Ls6AmURrb4nngBOPjn8jWIdh0XNcVrCFcLGcywYIHiQwH5kIAP9ePc9MQPvkkuAm28Gpk1T3WzSSFERsH69eFmvuw54913xNlq6FLjzTjFclHBaTNlUO6WTsymIKEHYoxIqnkW0gNjHYYl0fawek+XLYxYx7tsHvNiQj7bsAjTJA2tQSi/Px9at4si/TFL0Y7EAF18sjhu0eLGY6LJ8uVjZdt26JAWhZsgl1m04bENEOmKPSqgBS5MrPRmDFtHaulXXtSde+HUDHvM4sL4M2Lt3KkZhK7oxCrNmWbD58jIcv7Abv7JxR2IkhxwiSop++lPgl78Ub6EFC4DzzxeJy5QpekdIRGRcTFRChS6ihQOLaOHgQfqqClbi3pAkQ4ZFpDMJWHuit1e8OIOHdW5fBrxzYDOHHw4sXpyNiy4SU2JFcsUkxajOOkvUrtx2m1hr5amnxLGD3G7g5z/n4W2IiMLhV2OoKAWqJ6EBv/tiMRZjFXoO5HdKEqMkGX0WK5pmX47+jMizcToeXIXPzliMT54O/zg3ndaAGTWLcexp+fgcBWgMGdb5HAUoOD4ft98ulm9vbQXuvltJUigVjBsnFo3dtAk48USgvR1YsgT4/vfFsYSIiGgg9qhEIGdkwNLfHzzd4APe7gIaGhaj6s3puPe1oT0os+QGvLPJgRPxC/gx9Pri3ga8c6UDuBI4EYAf4iCAmegPnm54FXgfADAVx4/fiuNOHIWZsyz4/PtlOOrkbqwrYI+JGRQXi2TlgQdED8tbb4nV5K+5Brj9dmDCBL0jJCIyBiYqgymLaB12GN6dPRszNm2C5dNPkXtMPuZPBebPh8gwikOSmQPDMz+/FHg3Exj/IYA3EByuUZIQRWYmsCsrH191F+CLrEK8fNhlOLf9UeR3bccVN+XjsJPEAfCmTcsOGQ7gsI7ZWK1iRdvzzhMzgzwecdygNWuA++8HfvKTgcceJCJKR0xUBlMW0bJYsO3ll3Hc8uXIkOWw61NYDhyHxXLgOCxX3ZEPTAXwST4wqwAZB67PfPRRyNu3o/Gf+cj4prLzmQp0bUX+qFGYYbEAspj2+QvOqEg7U6cC9fXAyy8DV10FBAJAaalYnv+PfwTsdr0jJCLSDxOVcEIPLW+xiDUkQg1j7QlLdzcyufYERaEcN6i6Wqxu+/LLYirzr38N3HQT3x5ElJ5YTDtcXHuCEmDMGOCOO0Rh7dy5Yrb7rbcC3/kO8OqrekdHRJR8TFSIDOiYYwCvVxyNuaAA+M9/ROKyeDHwxRd6R0dElDxMVIgMymIBLrhArGx71VXi/9WrgWOPBR58EOjr0ztCIqLEY6JCZHATJwJ/+IM4qOHMmUBHh0hcTj5ZLGBMRGRmTFSIUkRxMfDPf4relIkTxTEvZ88GrrsuA3v2sC6eiMzJEN9ubrcbNpsNACBJEioqKvQNiMigMjOBK64Qa6zccIMYCnrwwUysXj0XgAUXXMC1V4jIXHTvUXG73QCAsrIylJWVweFwoLy8XOeoiIytoAB44glRcHvkkTLa20dj8WIrfvAD4OOP9Y6OiEg7uicq1dXVKCsrC/7vdDpRV1enY0REqcPpBPz+XlxwwRZkZ8tYvx749reBO+8Eurr0jo6IaOR0TVQCgQAkSQoO+4Ty+XzJD4goBY0eDSxa9CH8/l6UlIgE5Te/AWbM4NorRJT6dK1RCQQCYS+32WyQJCnsdV1dXegK+anY2dkJAOjp6UGPspqsBpRtablNo2EbzUFp2xFH9OCvfwXWrrXgppsy8eGHFsydC1xwQT/c7j5MmaJzoMOUTq8h25i6zN4+QPs2qt2ORZZlWZNHHAafz4eSkhIMDqGoqAiVlZUDhoQUt99+O5YtWzbk8tWrV2Ps2LEJi5UolezZY8UTT0zHyy9PgyxbMHZsDy6+eDPmzdsacqBLIiL97N27FxdeeCE6OjqQk5MT8XaGTFRyc3NRU1MTNlEJ16NSWFiInTt3Rm1ovHp6euD1elFSUoKsrCzNtmskbKM5RGtjc7MFV1yRiXfeEVOBZs3qx4MP9uGEE3QIdJjS/TU0C7O30eztA7RvY2dnJyZPnhwzUdF16Mce4bCwkiRFvC47OxvZYY6bk5WVlZA3R6K2ayRsozmEa+PJJ4uF4h56SBzcsLExA9/9bgZuuknUsYwZo1Oww5Cur6HZmL2NZm8foF0b1W5D105gu90Om80WtlbF6XTqEBGR+WRmAldfLZbid7nE0vv33CMOdPjGG3pHR0QUne6j1VVVVQNm+Hg8nrBDPkQ0MoceCqxdCzz7LPCNbwAffQScfrpYQO5ATToRkeHonqhUVFRAkiR4PB54PB40NjaitrZW77CITOt//gfYvBlYskT8/6c/AccdB7z0kq5hERGFZYgl9EOXzC8tLdUxEqL0YLMBdXXi6MxLlgAtLcAPfyj+v/9+4JBD9I6QiEjQvUeFiPRzxhnAe+8BN94IZGQATz4JTJ8uhoiIiIyAiQpRmhs7Fvjtb4GGBrGa7ddfA4sWid6Vr7/WOzoiSndMVIgIADBzJtDUJKYtZ2YCTz0ljhu0bp3ekRFROmOiQkRBWVnAsmXA228Dxx4LfPEFsGCBqGPZtUvv6IgoHTFRIaIhZs0C/H7guusAiwV45BExLLRxo96REVG6YaJCRGGNGQPcdx/w2mvAEUcAW7eK4tvrrgP27dM7OiJKF0xUiCiq008XM4OUdVeWLwccDuBf/9IzKiJKF0xUiCimCRPEuisvvSRWtf3gA+Ckk4Df/x7o79c7OiIyMyYqRKTa/Pmid+Wcc4DubuD668VlX3yhd2REZFZMVIgoLpMnA889J47IPHo08Mor4gCHnMZMRInARIWI4maxAJdfLtZdOf544KuvxDTma68F9u/XOzoiMhMmKkQ0bMcdB2zaBCxdKv6//35Ru7J5s75xEZF5MFEhohEZPVokKH/9qziY4XvvAcXFQG0tIMt6R0dEqY6JChFpYsECkaScdZYY/vnlL4HzzgMkSe/IiCiVMVEhIs0UFIii2t/+FrBaAY8HOOEEsSQ/EdFwMFEhIk1lZAA33gj84x+A3Q5s2wZ8//vAPfdwzRUiih8TFSJKiNmzxfGCzj8f6OsDqqrEsBDXXCGieFj1DoCIzGviRGD1aqCkBLjqKsDnE2uuPP64SFooMWQZ+PJLoKUFaG0FduwA2tqAjg7Rq2WxAJmZQG4uMGkSkJ8PHHmk+MvJ0Tt6ooGYqBBRQlkswM9/Dnz3u6J35b33gB/8ALjpJuCuu4BRo/SOMPV1doojW7/1lljbpqlJJCXDcdhhojfspJOA004T5zMzNQ2XKC5MVIgoKaZPB/75T1G/8tBDouD29deBJ58UtSwUnw8/FMXKL70ENDSI4bVQGRlAYaF4bgsKRO+JzXYw6ejpAdrbRU/Lp58CH38sFu779FPg2WfFHwDk5QHz5gELFwI/+pE4qjZRMjFRIaKkGTMGePBBwOkUvSybNgEnnigOeLhokd7RGd/nn4thsyeeAN5/f+B1Rx0FzJkjekBmzhSJYXZ2fNvv6ADefVckPm+/Dbz2mkhknnpK/E2YAJx7LlBWBpx8sugtI0o0JipElHQLFwIOB7B4sZgddP75on7l/vuBsWP1js5YZFk8N3/4g5j6rfScWK0i4Vu4UNT7HH74yB9r4kQx3HPaaeL/3l6RtLz4okhUtm0DVq4UfzNnihWJzz8fyMoa+WMTRcJZP0Ski8MPF3UVt9wifpk/8ojY+Q3uKUhX3d3An/8s1qGZN08kC319wCmniOfqq6+Al18WvRtaJCnhWK3AqaeKqeWBAPDGG8All4iemqYm4OKLgWOOAf73f8VQElEiMFEhIt1YrcCdd4oeg298A9iyRQxd/OlP6bv8fns7UFMDTJsmkoL33gPGjRO9F1u2iB6oyy4TNSfJlJEh1sN57DFg+3bg7ruBKVPErKLLLgOOPRZ45pn0fd0ocZioEJHuzjxT1EbMny+W37/iCqC0VNRHpIs9e6xYtiwDhx8O3Hwz8NlnInm75x6RGNx/v0gGjOCQQ4Bf/Ur0stx7r5jeHAiI+pUzzxTJFZFWmKgQkSEccogY3rjvPlHz8Mwz4ujML75o7orNPXsAtzsD5eUluPvuTOzaBRx/vBj22boVqKxMfu+JWmPHAjfcIJKUW28VB6jcuFHUH/3qVyLpJBopJipEZBgZGcB114n1QI49Vqxie+65Vvz+9w58/bXe0Wlr/37RS2K3A7fckondu0fh2GNleDzAv/4l6j9SZY2ZceOAO+4APvhA9Kr09QHV1WJG1z//qXd0lOqYqBCR4cycCbzzjuhNyMiQ8frrhTjhBCuee07vyEaup0dMxz7qKODaa0VRrN0u49prm/HOO70491yRsKWiww8Xa7s884xYu+WDD4DvfU/Uswxe54VIrRT9OBCR2Y0eLeoz3nyzD1On7sKXX1qwcCFwwQWpebygvj7gL38RPUXl5cAnnwBTp4qk5f33ezFnziemWQF24UJg82Yx/byvT8zsmj8/E21tcS7sQgQmKkRkcLNmybjvvo246aY+ZGSI9TyOOUasK9Lbq3d0sfX3A08/DcyYIYZzAgFRfHr//cBHHwFLlphzHZLcXGDVKlFrM24c8NprGbjhhjloaDB3zRFpj4kKERneqFH9uPvufjQ0iGGhzk4xXbe4GFi/Xu/owuvrE4cH+M53xAymzZvFzru6WiQrS5eKXiOzu/hioLkZOO44Ge3tozF3bib+/Ge9o6JUwkSFiFLGzJmiOPPhh8VO/733xKqsZ50lClCNoLtbLIA2fTpw4YXAv/8tlp7/zW/EmiM33yx6GNLJMccAb77Zi5NP/gzd3RZccomoP+KaK6QGExUiSimZmaLG46OPxAyhrCzRq3LiicA55wCNjfrEtWuXGI468kixANpHHwGTJokF7f77X2DZMrFEfboaPx6oqGjELbeIqlq3WyxoxxVtKRYmKkSUkiZNEmuubNkiei4yMsQ6LLNnA2ecIWafJGMn+P77wFVXAYcdJoZztm8XM17uvVesg3LLLeKoxSReo9/8ph+PPSYSzscfB378Y7GWDFEkTFSIKKUVFYmjCW/ZAvzsZ2IHuHEj4HIBRxwB3HST6GXRcpihtRX47W9F/cmMGeKI0Lt2iRk9Dz0krr/hBtGLQENdcgnw/PPiaNovvwz88IdMVigyJipEZApHHy2O6tvaCvz612JmzWefiZ6N2bPFwmpLlgCrV4teD7WJiyyLoZtnnhFDTcceK7ZVUSFqZLKygJ/8RByvaPNm4PLL06NIdqQWLBDP2YQJIrFcsIDJCoVn1TsAIiItFRYCd90lildfeglYs0YMCW3dKo46/Mgj4nY5OaLgddo0UZiblyeGJrq7xaqxn30mEpSWFmDnzoGPkZkpFjK78EIxoycvL+nNNIVTThH1RWedBbz+ujjW07p16VdsTNExUSEiUxo1Siw8tnCh+KW+cSPw2mvAq6+KnpDOTqChQfzFkpkpjr8zezZQUgI4naw70crJJ4tkZd484I03xJDd88+bc20ZGh4mKkRkeuPGiaGFBQvE/11dwMcfi6GaTz8F2tvFkZr7+4HsbPFXUCB6Zw4/HPjWt0Q9BSXGSScBf/sbMHeuqFn5+c/FQnGpeigB0hYTFSJKO9nZ4sjMxx2ndySk+O53xQq+55wjVrSdPBn4/e/1joqMgPkqEREZwtlnA489Js4vXy5mUxExUSEiIsO46CJxMEoAuOYawOvVNx7SHxMVIiIylIoKcYygvj5RXPvBB3pHRHpiokJERIZisQB1dWL6ckeHqFvp7NQ7KtILExUiIjKc7Gzg2WfFzKuPPhLHT+JBDNMTExUiIjKk/Hxg7VqxporHAzzwgN4RkR6YqBARkWGdfLI4DAIA3Hgj8Pbb+sZDycdEhYiIDO3qq0VRbW8vcMEFom6F0gcTFSIiMjSLRRyjado0YNs2YOlSvSOiZGKiQkREhpeTA/zlL2JZ/ccfFzUrlB6YqBARUUo49VTg5pvF+fJycYRrMj8mKkRElDJuuw1wOMRBJJcs4ZTldMBEhYiIUsaoUeKghaNGAevWAU89pXdElGhMVIiIKKVMnw7ccos4v3QpsHOnvvFQYjFRISKilFNZCXz72yJJuf56vaOhRGKiQkREKWfUKDFl2WIRs4HWr9c7IkoUJipERJSSTjrp4JoqV18NdHfrGw8lBhMVIiJKWXfcAUyZAnz4IbB8ud7RUCIwUSEiopSVkwPU1Ijzd97JtVXMiIkKERGltJ/+VBy8cPduoKJC72hIa0xUiIgopWVkAH/8oyisfeIJ4B//0Dsi0hITFSIiSnnFxcBll4nzN93EFWvNhIkKERGZwrJlwJgxwNtvA889p3c0pBUmKkREZAqHHnpw8beqKqC3V994SBtMVIiIyDQqKoDJk4H//Ad49FG9oyEtJDxRkSQJdXV1KCkpCXu92+1GXV0d6urq4Ha7Ex0OERGZWE4OcOut4vzttwN79ugaDmkgoYmK3+/H2rVrIUkS2trahlyvJCZlZWUoKyuDw+FAeXl5IkMiIiKT++UvgWnTgC++AP70J72joZFKaKLicDhQVlYGu90e9vrq6mqUlZUF/3c6nairq0tkSEREZHKjRh3sVXG72auS6nSrUQkEApAkCTabbch1Pp8v+QEREZFpXHQRYLcDO3YADz2kdzQ0EromKuHYbDZIkpTcYIiIyFSystirYhZWvQMYLC8vL2w9i6KrqwtdXV3B/zs7OwEAPT096Onp0SwOZVtabtNo2EZzMHsbzd4+gG1MlEWLgLvusqKlxYIHHujDjTf2J+yx+BoOf3uxqE5UPB4P1qxZE/N2VVVVcDgcajc7RLQkBRB1LcuWLRty+fr16zF27NhhP24kXq9X820aDdtoDmZvo9nbB7CNibBgQSEeeMCBe+7pRVGRF9nZfQl9PL6G6u3du1fV7SyynPiFhj0eD6qrq9Hc3By8LBAIoKioCIMf3mKxwOv1wul0ht1WuB6VwsJC7Ny5Ezk5OZrF3NPTA6/Xi5KSEmRlZWm2XSNhG83B7G00e/sAtjGRenuBb3/bikDAguXL+3DFFYnpVeFrGL/Ozk5MnjwZHR0dUfffug392O122Gw2BAKBIbOCIiUpAJCdnY3s7Owhl2dlZSXkzZGo7RoJ22gOZm+j2dsHsI2JeTzghhuAK68Eli/PxJVXZsKawD0fX8P4tqNGUoppIw3nVFVVDZjh4/F4BkxXJiIiGqlLLwUOOQTYuhWor9c7GopXQhOVQCAAt9uN2tpa+P1+VFZWwuPxBK+vqKiAJEnweDzweDxobGxEbW1tIkMiIqI0M2YMcPXV4nxNDY+snGoSOvRjt9tRUVGBioqKiLcJva60tDSR4RARUZq68kqRpLz7LrB+PXDWWXpHRGrxoIRERGR6eXnAkiXiPA8rl1qYqBARUVq47jogMxN49VXg3//WOxpSi4kKERGlhW9+E1i4UJx/4AF9YyH1mKgQEVHaWLpUnK5aBXz9tb6xkDpMVIiIKG1873vACScA+/YBjz6qdzSkBhMVIiJKGxbLwV6VBx8UK9eSsTFRISKitHLBBcDkycB//ws8/7ze0VAsTFSIiCitjB4NlJeL8w8+qG8sFBsTFSIiSjtlZUBGBvDaa8CHH+odDUXDRIWIiNLON78JnH22OL9ihb6xUHRMVIiIKC0px8BduRLo6tI1FIqCiQoREaWl+fOBww4Ddu4Enn1W72goEiYqRESUlqxW4Be/EOdra/WNhSJjokJERGnrsstEUe3GjSyqNSomKkRElLYKC8UQEAA88oi+sVB4TFSIiCitXXaZOF21iivVGhETFSIiSmvz54uVaj//HPB69Y6GBmOiQkREaW3UKODCC8X5lSt1DYXCYKJCRERp75JLxOnzzwPt7bqGQoMwUSEiorR3wgnAjBli4bc1a/SOhkIxUSEiorRnsRzsVeHwj7EwUSEiIoKoU8nMBBoagC1b9I6GFExUiIiIAEyZcvBAhatX6xsLHcREhYiI6IDFi8Xp6tWALOsbCwlMVIiIiA740Y+AceOAQADYtEnvaAhgokJERBQ0bhzw4x+L8xz+MQYmKkRERCGUxd/WrAH6+vSNhZioEBERDTBvHjBpEvDll8Brr+kdDTFRISIiCpGVBbhc4jyHf/THRIWIiGgQZfjn6aeB/fv1jSXdMVEhIiIa5NRTgalTgc5O4JVX9I4mvTFRISIiGiQjAygtFec9Hn1jSXdMVIiIiMJQ6lReeEEcrJD0wUSFiIgojJNPBg47TAz/rF+vdzTpi4kKERFRGBkZwLnnivMc/tEPExUiIqIIlOGf55/n8I9emKgQERFFcMopwDe+AXR0AD6f3tGkJyYqREREEXD4R39MVIiIiKJQhn+eew7o6dE1lLTERIWIiCiKU08F8vMBSQJef13vaNIPExUiIqIoMjOBc84R5597TtdQ0hITFSIiohj+53/E6XPPAbKsZyTph4kKERFRDHPnAuPGAZ9+CjQ36x1NemGiQkREFMPo0cDZZ4vzHP5JLiYqREREKixcKE6ffVbfONINExUiIiIV5s8HrFZg82bgww/1jiZ9MFEhIiJSwWYDzjhDnH/+eV1DSStMVIiIiFQKnf1DycFEhYiISCVlPZW33wZ27NA3lnTBRIWIiEilqVOBE04Qa6m8/LLe0aQHJipERERx+NGPxOmLL+obR7pgokJERBSHH/5QnL7yCtDdrW8s6YCJChERURxmzgSmTAF27QLefFPvaMyPiQoREVEcMjKABQvEeQ7/JB4TFSIiojgpwz8vvsiDFCYaExUiIqI4lZQAo0YBgQDwwQd6R2NuTFSIiIjiNH78wVVq//pXfWMxOyYqREREw6BMU37pJX3jMDsmKkRERMNw9tni9B//EDOAKDGYqBAREQ2D3Q4cdRTQ2wu8+qpF73BMi4kKERHRMP3gB+J0/XomKonCRIWIiGiYDiYqGZymnCBMVIiIiIbp9NOB7Gxg2zYLPv10vN7hmBITFSIiomEaNw447TRx3u/P1zcYk2KiQkRENALK8M877zBRSQQmKkRERCOgJCr/93+TsW+fvrGYkTXRD+B2uwEALS0tAIDa2toh19tsNgCAJEmoqKhIdEhERESamT4dKCyUsX17Jt54Qw4eB4i0kdAelcrKSlRUVKCioiKYoJSUlASvV5KYsrIylJWVweFwoLy8PJEhERERacpiAebNE1N+OE1ZewlLVCRJgt/vhyRJwcvKy8vh8/kQCAQAANXV1SgrKwte73Q6UVdXl6iQiIiIEmLevH4AwCuvsKJCawl9RpuamoJJCQDY7XYAIokJBAKQJCk47BPK5/MlMiwiIiJNnXmmjIyMfnz4oQWtrXpHYy4Jq1Gx2Wxob28fcJmSgNjtdjQ1NUW8X2gvzGBdXV3o6uoK/t/Z2QkA6OnpQU9PzwijPkjZlpbbNBq20RzM3kaztw9gG81g7NgeHHtsBzZvnox16/pQVtavd0ia0/o1VLudhBfThqqurkZtbW3YXhRFXl4e2traom5j2bJlQy5fv349xo4dq0WYA3i9Xs23aTRsozmYvY1mbx/ANqa6E088Cps3T8bjj3+FqVM36R1Owmj1Gu7du1fV7VQnKh6PB2vWrIl5u6qqKjgcjiGXV1ZWYtGiRQNqUsKJlqQo27/++uuD/3d2dqKwsBDz5s1DTk5OzPjU6unpgdfrRUlJCbKysjTbrpGwjeZg9jaavX0A22gGPT09CAQ24YknvoXNmwvgdM7HqFF6R6UtrV9DZUQkFtWJSmlpKUpLS4cVjMfjQVFR0YAkRalXGUySpIjXAUB2djays7OHXJ6VlZWQN3+itmskbKM5mL2NZm8fwDamuiOO6MCUKTK+/NKCTZuycMYZekeUGFq9hmq3kfDyZKUuRUlSlEJau90Om802oNhW4XQ6Ex0WERGRpjIygJISMU35b3/TORgTSWii4vf74ff74XA4EAgEEAgEUFdXh7y8PABiGCd0ho/H44k5NERERGRUyjRlJiraSVgxrSRJmDt3LiRJQmVl5YDrlNVnKyoq4Ha74fF4AACNjY1DVq4lIiJKFU6nDIsFeO894LPPgEMP1Tui1JfU6cnhhC6ZP9waGCIiIiOYPBmYNQvYtAl45RXg0kv1jij1cQk9IiIiDc2bJ05NPBM7qZioEBERaUg5pJ3XC/Sbb923pGOiQkREpKGTTwbGjwd27gTefVfvaFIfExUiIiINjRqF4Boq69frG4sZMFEhIiLSWOjwD40MExUiIiKNKQW1b74JqDykDUXARIWIiEhjRx8NFBYC3d0iWaHhY6JCRESkMYvlYK8K61RGhokKERFRArBORRtMVIiIiBJg7lzRs/L++8Dnn+sdTepiokJERJQAkycDxcXiPHtVho+JChERUYJw+GfkmKgQERElSOhxf2RZ31hSFRMVIiKiBPnud4GxY4EvvxS1KhQ/JipEREQJkp0NzJkjznOa8vAwUSEiIkog1qmMDBMVIiKiBFLqVN54A9i3T99YUhETFSIiogSaPh049FBg/37g73/XO5rUw0SFiIgogUKX0+fwT/yYqBARESUYj/szfExUiIiIEmzuXHH67rtiqjKpx0SFiIgowfLzgRNPFOd9Pn1jSTVMVIiIiJKA05SHh4kKERFREoTWqXA5ffWYqBARESXBqacCo0cDn38O/N//6R1N6mCiQkRElASjRwOnny7Oc/hHPSYqREREScJpyvFjokJERJQkSkHt668DXV36xpIqmKgQERElybe/DRQUiGP+/OMfekeTGpioEBERJYnFwmnK8WKiQkRElESsU4kPExUiIqIkcjrFqd8P7NihbyypgIkKERFREhUUADNmiPMbNugbSypgokJERJRkSp0Kh39iY6JCRESUZEqditfL5fRjYaJCRESUZN//PpCdDXzyCfDBB3pHY2xMVIiIiJJszBiRrACcphwLExUiIiIdcJqyOkxUiIiIdKAU1G7cCHR36xqKoTFRISIi0sGMGUB+PrBnD/D223pHY1xMVIiIiHSQkXFw8TcO/0TGRIWIiEgnodOUKTwmKkRERDpRelSamoCvv9Y3FqNiokJERKSTww4DjjtOLPr26qt6R2NMTFSIiIh0xGnK0TFRISIi0pEyTZnL6YfHRIWIiEhHp50GjBoFbNsGfPSR3tEYDxMVIiIiHY0bB5x6qjjP4Z+hmKgQERHpjNOUI2OiQkREpDOlTuW114CeHn1jMRomKkRERDo78URg0iRg1y6goUHvaIyFiQoREZHOMjIO9qqwTmUgJipEREQGEDpNmQ5iokJERGQASqKyaRPQ3q5vLEbCRIWIiMgACguBY48F+vu5nH4oJipEREQGwWnKQzFRISIiMggW1A7FRIWIiMgg5swBsrKA1lagpUXvaIyBiQoREZFBjB8PfPe74jx7VQQmKkRERAbCOpWBmKgQEREZiJKobNgA9PbqG4sRMFEhIiIyEIcDyM0FOjvFmirpjokKERGRgWRmHpz9s26dvrEYgVXvAJJJlmX09fWhV0VfWk9PD6xWK/bv34++vr4kRJd8bKM5mL2NZm6f1WpFZmam3mGQAZ1zDrB2LfDCC8Bdd+kdjb7SIlGRZRmSJGHHjh2qv+hkWUZBQQG2b98Oi8WS4Aj1wTaag9nbaPb2ZWZmIi8vT+8wyGDOPlv0rLz/vpiqPG2a3hHpJ6GJiiRJWLt2LQCgpaUFgUAAK1asgM1mC97G7XYH/5ckCRUVFZrH8cUXX0CSJOTk5CAnJwdWqzXmF15/fz92796N8ePHIyPDnCNkbKM5mL2NZm2fLMvo7e1FZ2cnvvrqK0yYMEHvkMhA8vKA730PeP114MUXgaVL9Y5IPwlNVCorK1FZWQm73Q4AKC8vh8vlgvfAnCu32w0AKCsrAwD4fD6Ul5ejtrZWsxj6+vrQ0dGBQw45BJMnT1Z9v/7+fnR3d2P06NGm+nIMxTaag9nbaPb2TZgwAaNGjcLu3bvR19eHrKwsvUMigzjnHJGovPBCeicqCf3UBwIBeDye4P9FRUVoamoK/l9dXR1MUgDA6XSirq5O0xh6enogyzLGjRun6XaJiLQyduxYZGZmqqqfo/Txox+J09dfByRJ11B0ldBExev1DhjKaWxshNPpBCCSGEmSBgwDKXw+n+axmHFsm4jMQfl+kmVZ50jISI46Cpg+Xayl8re/6R2NfpJWTOvxeCBJEurr6wGIRCUcm80GKUrq2NXVha6uruD/nZ2dAETPSU9Pz5DbKz0q/f396O/vVx2v8oWh3NeM2EZzMHsbzd4+4GAbe3t7w36PmYHSLrYvPgsWZGDLlkw891w/zj1X31lvWrdR7XYSnqgoBbWSJMHlcoXtQQmVl5eHtra2iNdXV1dj2bJlQy5fv349xo4dO+Ryq9WKgoIC7N69G93d3XHHv2vXrrjvo5fbbrsNDzzwAJ599lnMmTNH9f1SqY1z5szBCSecgOXLl8d1v5G2ceHChZgxY0bY9148hhu/GvG2cePGjbjuuuuwdetWLF26NK62LVy4EHPmzME111wTb5jDpsf7NJGvVyjlu+mtt94y/fCP1+TrwmvdvsmTcwGchhdf7MMLL7wMq1X/Xjet2rh3715Vt1OdqHg8HqxZsybm7aqqquBwOIL/22y2YB1KXV0dcnNz0draGvH+0ZIUZfvXX3998P/Ozk4UFhZi3rx5yMnJGXL7/fv3Y/v27Rg/fjxGjx4dM36FLMvYtWsXJkyYkDLDRo8//jjsdjvWrVuHc845J+btU7GNv/rVr2Cz2cK+1uFo1cbMzExkZ2erftxI4o1fjeG0UZIkXHrppfB6vXA4HMFZcWplZmZi9OjRmrYjEj3fp4l4vcLZt28fAOCUU07B+PHjE/pYeunp6YHX60VJSYkpC4YT1b6zzgJ+9zsZO3ZkYezY+XA69UtUtG6jMiISi+pEpbS0FKWlpaoDkCQJ1dXVqKqqCvaiOJ1OSJIEn883IJkZfD9lllA42dnZyM7OHnJ5VlZW2Ceur68PFosFGRkZcc0YULqYlfsand/vR15eXnCmlZqi5MFtVGZdtRjg2OKRYjnvvPPi2o5Wr6PFYolrG1rFr8Zw2vjqq68iLy8PM2fOBICI63hEake8z8dIJOOzmMzXKxwlAbNarabciYeK9F1tFlq3LysLWLgQqKsDnn3WirPP1mzTI4hJmzaq3UbCvmUCgQDcbveAHhKl9sRms8Fut8Nms4WtVVEKbkm92tpaOJ3OAckgERGlPpdLnD77bHoepDBhiYrD4UBFRcWA3pE1a9bA4XAEE5GqqqoBO1SPxzNgujKpt3btWrhcLtjtdtjt9rBr0ZSUlKCysjL4v9/vR25uLgDA5XKhpKQEgUAg+Gs5tKi5srISRUVFyM3NRXl5+YDtulwuuN1ulJeXIzc3F0VFRfD5fPD5fCgqKoLFYoFL+aQd4PF4UFxcDIvFgqKiogHT2KPFMrgNgFiPR3mc4uLiqElaZWUlcnNzw942WhvVPJfKr+J44x/Oc6tGpO1WVlbC5XIF44vU1ljvia+//jpqXKHXqV12INp9zjvvPOTm5qK4uBh1dXUDhoGivR6Atu+3RL1eRJHMmQNMmgTs3CmmKqebhPbbVlVVwe12B/8kScKGDRuC11dUVECSJHg8Hng8HjQ2Nmq62Fs0sgzs2WOcv5HMSvT5fJAkKZgAlpaWDvgiVqO+vh719fWw2+2QZRmyLAeH7FwuF/x+P7xeL1pbW9HW1oYS5YhZED1lys6vtbUVDocDLpcLtbW1aG5uRnNzMzwez4AdT1tbG1asWAFZllFbWxt8jFixDFZeXo41a9agvr4e7e3tqKmpiThrzOfzwePxoLW1FbIso6amJjjkEauNWj2Xgw3nuY2VRMXabk1NzYD4In3mYrXD7XZHjEtJhFpbW+H1elFZWRl8faPFHOk+CxcuxDvvvIMNGzZgw4YNcX9PaPV+S9TrRRSN1SqGfwDgwMTZ9CKnuI6ODhmA3NHREfb6ffv2yZs3b5b37ds34PLdu2VZpAfG+Nu9e/jPQWlpqex0OoP/Nzc3ywDk+vr6AbdzOp1yRUVF8P/GxkYZgNzX1yfLsizX19fLdrt9wH2UbbW3tw+43GazyV6vN7hdh8MRvM7r9coAgtfLsiw7HI4Bjz2Y3W6Xa2pqgv+Hi2VwG9rb22UAcktLS8Tt9vX1ye3t7XJfX59cX18v22y2IbdR28bQ+Af/r2wjnvhH8txGaqPa7UaKb7Bo7YgUV0tLy5DHr62tjfr6R7vPRx99JAOQGxsbB8QV+jzEej0GG877TavXK5w9e/bITU1NcmdnZ8zbpqru7m75ueeek7u7u/UOJSES3b5XXhH7ikMOkeWenoQ8RExatzHW/lth/CpRisnj8QwYWnE4HLDZbJr0TjU1NQXriULNnDlzwBQ1pSgTOFiYGXqZ3W4f0tNRV1cHl8uF4uLiiOvqROPz+YL1Tmo4nU7k5eXBYrGgpKQk2Oukto1aG8lzq8V2RypSXEpPxbRp05Cbm4vc3NyYPSrR7uP3+zFx4sQBBfhqX/NQI32/Jer1IlLjjDPE8X927ADefFPvaJIrLY6eHM7YscDu3ZGv7+/vR2dnJ3JycpIysyHMEjCqKDtbZbaPQimojbT6r1rRFt8LFe4xoj1ucXFxcJaS0+lEcXHx8AKMg81mQ0tLC+rq6uD1euFyuVBTU5Pwx41kJM+tFtsdqWhxORwONDc3x7W9SPdRDmw6Elq83xL1ehGpocz+efRRMfxzxhl6R5Q8adujYrEA48YZ52+4y0PU1taitLQU7e3tA/6UL/xoX/Kx1qwBRC+EcriDUE1NTZg1a9awYg4EAsFx/pHM8FLW/4j313FZWRnq6+tRW1uLNWvWaNJGNc/lYIl4bhO5XbUcDgf8fn9cCVO0+9jtdnR0dAx4nWM936HXa/V+0/t5JVI6zp9+Or1m/6RtomIGSq9JuGI9h8MBh8MxYPjHbrcHu9gDgQCqqqoG3Mdutwe/iH0+HwKBQHCW1ty5c4PXKbOL4llXJ5TSHa4U13o8niHDAuFiGcxut6OsrCxYhKkUZg+epaHweDzBom5JkuD1emG324fVxsHP5eDHVBN/Ip5brberph3h7hP6ugAHn/vh3MfhcOA73/kOFi1aFIwl3PMd6fXQ6v2WqNeLSK0zzwQmTwa++gpIp8lkTFRS2Nq1a2G32yP+SiwvLx/wK7W8vBxNTU3BaZVLlizBEUccEby9ktxMmzZtwJCI8ku0uLgY06ZNQ15eXtzd+qFsNhsqKiqCUziV7Yd2mUeKZTBl/ZiSkhLk5uaitrYWixYtCntbu90Or9cbrIOQJAkrVqwYVhsHP5fl5eUD6ibUxq/1c6v1dtW2Y7Da2lo4HA4UFxcHX5dYvRnR7vP8888Hpya7XK4hr3G010PL91uiXi8iNbKygAsuEOf//Gd9Y0kmiyyn9uE6Ozs7MXHiRHR0dERcQr+1tRXTpk2Lawn9ZNeo6IFtNAeztzFc+/x+P4qLi01ztOG9e/diy5YtOProozFhwgS9w0mInp4erFu3DvPnzzflyrTJal9zMzBzJjB6NPDFF8DEiQl7qCG0bmOs/bfCfN9qREREJuVwAMcdB+zfD2hQZ54SmKgQERGlCIsFuPhicT5dhn+YqBAREaWQiy4CMjKAf/wDMMAxZBOOiQoRpRyHw2Ga+hSieB16KKAcueHxx/WNJRmYqBAREaWYn/1MnK5caf41VZioEBERpZiFC8WaKv/9L/Dii3pHk1hMVIiIiFLM6NFAWZk4/8AD+saSaExUiIiIUtDllwOZmcDGjcB77+kdTeIwUSEiIkpBU6cC554rzv/hD/rGkkhMVIiIiFLU0qXidNUq4Ouv9Y0lUZioEBERpahTTgGKi8VKtQeOu2k6TFRSmNvthsViQW5uLnJzc2GxWFBUVITKysohh6KPxufzoaioCBaLJeKRh/VQXFwc9sjQkZSUlMQdv9/vh8ViiTc0IiJDsFiAa64R5++7D9i1S994EoGJSoqz2Wxob29He3s7ZFmG1+tFIBBAcXGxqmSlo6MDixYtQn19PWRZRlVVVeKDVqmqqgoul0vvMIiIDO2CC4CjjgJ27gSWL9c7Gu0xURmppibgzDPFqQHY7XbU19ejra0Na1UcsWrjxo3Iy8uDw+EAIBKfZFN6dAYrLS2F0+lMejzRRIqViEgvVitw553i/L33mq9WhYnKSD3+OPDaa8Bf/qJ3JERElKZcLuCEE4DOTqCmRrvtdncDra3abW84mKgMx7ZtQHMz4PcDa9aIy556Svzf3Cyu10kgEIDL5UJeXh7KlNWAAJSXlyM3NxdFRUWoO1BxdfPNN+OSSy5BIBCAxWIZUA8S7vYA4HK5UFdXh7q6OhQVFcHn86m6j9vtHnC9cj+Xy4WSkpJgDBaLJThkNbjmxOPxoLi4OFiL4/F44n5+JElCSUkJLBYLiouLB8Qf6zGixapFbEREw5WRAdx9tzj/hz8An36qzXZvvx04/ngxq0gvTFSG44gjgJkzRan1jh3ish07xP8zZ4rrk0SSpOBOU9lJ2u12NDc3B2/jcrkQCATQ2toKr9eLyspK+P1+3HPPPVi5ciXsdjtkWUZtbW3U2yuPV1tbi5qaGtTU1ASHZmLdp7KyEi6XC62trXA4HMGkqL6+HvX19cEYZFmOOPzU1taGFStWBGN1uVzBx1DL5XKhra0NLS0t2LBhAxobG1U/RrRYtYiNiGgkzj4bOPVUMQPohhtGvr233hK9M3v2AGPHjnx7w8VEZThWrRKDggCgHMFVObVak5p62my24E5TSU6qqqqCO9BAIACPx4P6+nrYbDbY7XbU1NRgjdITNIia2wcCATQ3N6O0tFT1fRwOB5xOJ2w2G8rLyxEIBOJua1lZWbCWxul0wm63D+kRiSYQCMDn8wWTDZvNNqR4eLiPMdLYiIhGymIRxbSZmaKz/8knh7+t3buBiy8G+vvF6U9+olmYcbPq99ApbPFiYPp00YMyWEMDcGCHlWxKMlBZWRnsHVF+1U+bNm3AbWfOnBl2G2puryQc8dwn9HxeXp6q9oRTV1cXnNkUb7Lj9/uDiVQiHmMksRERaWHmTODWW8WQzRVXAN//vljBNl433QS0tACFhcD992seZlyYqIxURoZIOZVTndXU1KC4uBiVlZXBHbLD4RgwFKTojxBvpNsrwu3oY91Hi9lExcXFyMvLQ2VlJZxOJ4rDJYo6PUYyYiMiUuNXvwLWrQM2bQIuuQRYv17sotR65hng4YfF+cceA3SYDDoAE5Xhys8HCgpEunnZZcCjjwLbt4vLdRTaq1JfXw+HwwG/3w9JklQlC/Hefrj3iVcgEIDf74esDLENg91uhyRJCAQCYZOt4T6GFrEREWklK0tMRD3hBGDDBrHM/gMPqEtWXngBOP98cf6aa4C5cxMaqiqsURmuqVOBrVvFUE95uTjdunV4fWwaq6mpgcfjgd/vh91uR1lZWbDYFRAzVNxud9j7xnv74d5n8P0DgQAkSYLP5ws7bKIMFymziZT2xcPhcMDhcMDlcgUTliVLlsT1GOFi1SI2IiItHX00UFsr6lYefBD4xS+Avr7o93n+eaC0FOjpAc47T6zJYgRMVEYiO1u8CwBxmp2tbzwHhPaqAEBtbS0cDgeKi4uRm5uL2traqAupxXv74d4nNF6Hw4Fp06ahJsICADabDRUVFcEpzl6vd0itjBobNmxAXl4ecnNzUV5ejvLy8mDviprHCBerVrEREWnppz8VS31lZoohnPPPBz75ZOjtdu0Cfv1rsRZLTw+waBHwxBMH54zozSKneH91Z2cnJk6ciI6ODuTk5Ay5fv/+/WhtbcW0adMwevRo1dvt7+9HZ2cncnJykBHP4F4KYRvNwextNHv7AGDv3r3YsmULjj76aEyYMEHvcBKip6cH69atw/z585GVlaV3OJozcvueflokKb29IvlwuYAFC8Ribl99JWYKffGFuO3ixcDKleGTFK3bGGv/rTBIvkRERESJcO65olblN78BXn9dTFsePHX5yCPFUM855xwcKDAKJipEREQmd9ppwMaNYgH1Bx8Uy+KPGQOMHg2cfjrwy18Co0bpHWV4TFSIiIjShMMhJqmmEnMO+BIREZEpMFEhIiIiw2KiQkRERIaVNolKis/CJiITU76fLEabbkFkAKZPVLKysmCxWLBnzx69QyEiCmvv3r3o6+uD1SgrbBEZiOk/FZmZmZg4cSJ27NiBrq4u5OTkwGq1xvzl0t/fj+7ubuzfv9+0i0yxjeZg9jaatX2yLKO3txednZ3o6OjA7t27kZmZqXdYRIZj+kQFAAoKCjBmzBh89dVX6OzsVHUfWZaxb98+jBkzxrTdsWyjOZi9jWZvX2ZmJvLz8/HRRx/pHQqRIaVFomKxWGCz2TBx4kT09fWht7c35n16enrwxhtv4LTTTjPccshaYRvNwextNHP7rFYrMjMzVX0nEaWrtEhUFBaLBVarVdU4sPLlMXr0aNN9OSrYRnMwexvN3j4iis48A75ERERkOkxUiIiIyLCYqBAREZFhMVEhIiIiw2KiQkRERIaV8rN+lKWn1a6PolZPTw/27t2Lzs5O0840YBvNwextNHv7ALbRDMzePkD7Nir77ViHuEn5RGXXrl0AgMLCQp0jISIionjt2rULEydOjHi9RU7xo/X19/fjs88+w4QJEzRdtbKzsxOFhYXYvn07cnJyNNuukbCN5mD2Npq9fQDbaAZmbx+gfRtlWcauXbtw6KGHRj08Rsr3qGRkZGDq1KkJ235OTo5p33QKttEczN5Gs7cPYBvNwOztA7RtY7SeFAWLaYmIiMiwmKgQERGRYTFRiSA7Oxu33XYbsrOz9Q4lYdhGczB7G83ePoBtNAOztw/Qr40pX0xLRERE5sUeFSIiIjIsJipERERkWExUiIiIyLBSfh2VRKirq4MkSbDZbGhpaUFVVRVsNpveYWnK7XYHz3/99deoqanRMZqRkyQJa9euRX19Pbxe75Dr3W538DWUJAkVFRVJjnBkYrUv1vWpQM1rCAAtLS0AgNra2qTGp4VobVSuA0QbA4EAVqxYkXLfPfG8F0tKSlLu/RqtfT6fD7W1tSgpKYHdbofX68WsWbNQWlqqU7TDo+Y1rKysRFFREQAgLy8voW1kojKI2+1GWVnZgJ3akiVLUF9fr29gGnK5XCgpKUFZWRkAkZhVVlambLLi9/vR1NQESZLQ1tY25HplB6e01+fzoby8PGV2dLHaF+v6VBCrDYPfn+Xl5Sm3k1PTxsrKStjtdgCijS6Xy1RtDOXxeODz+ZIUmTZitU+SJPh8Png8HtjtdlRWVqZckqKmjXPnzsWGDRtgs9ng9/tRXFwc83g9IyLTAE6nU9VlqaqlpUUGILe3twcva29vH3JZKqqvr5cdDseQy20225C2peJbP1L71F6fCsK1ob29XXY6nQNew+bmZhmA3NLSkuQIRy7S6+R0OuWamprg/zU1NbLNZktmaJqJ9V5sb2+Xa2trU/JzKMuR21dfX5/y36OKSG0sKysb8D6VZVn2er0JjYU1KoPYbDaUlJRAkiQAQCAQCP7CMYNAIAAAA7qTlfNNTU06RJRYgUAgOIw3WKr9mktnTU1NwfcugOBnUvmcmoHX6x0wJNnY2Ain06ljRImzdu1anHfeeXqHQcNQV1eH0tJSBAKB4Hdoot+nHPoZZMWKFSguLkZubi4qKipQVFSUMkMEaoR+wQ/eeYfuCMwiUptsNpupdnJmZrPZ0N7ePuAy5QvSTD8iQnk8HkiSZKohZ4XP5zNtAgaIJCwvLw9tbW1oaWlJ2SH1cJTvU7/fD7vdDrvdHhyiTORrykRlEJvNhsrKSni9XrjdbjidTpx33nkpV9AWid1uh9PphM/nC46dpmPPgvJFQqmpuroatbW1pvlcKpQiRkmS4HK5TNc+QLTRbreb8oeCw+EAcDCBrqurg8vlMk3CGdojr7S1pqYG06ZNG/JjQksc+hlEKWarr69HS0sL2traUFxcrHdYmvJ6vWhsbERdXR08Hg/y8vIAmPfXaThMUlJXZWUlFi1aFCyONhObzYaysrLgEFBubq6pdujKsIFZKb0MivPOOy/YO2YmM2fODJ5XeqcT+YOXiUoIpZ5B6cKy2+1obm6GzWaDx+PROTpt1dTUoKysDKWlpcEPVuibzywiJV/KrzpKLR6PB0VFRSk3vTwWSZJQWVk5YIfmdDoTvgNIJr/fb8rvmFCD9xNKj5hZhtUjfWfabLaEtpGJSohAIBC2q7W8vDz5wSSQ3+8f8L8yDGTGbma73R7xQ2TmcXIzUnbYSk+KJEmm2QEEAgG43e4BPX1K0mKWz2VbWxt8Ph/cbjfcbjcqKysBiOUDzPBDUBmuC31PKq+hWX4UKT1Ggz93kiQlNAllohLC6XTC7/cP6aZrbm42VXely+Ua8CuttrbWFAVfkYZzqqqqBrTX4/Gk5LBBrOEqMwxnRWqD3++H3++Hw+FAIBBAIBBAXV1dcNgylYRro8PhQEVFxYAd2po1a+BwOFIyoQ7XRqfTiYqKiuCf8gOwoqIi5b5fw7XPZrMNeQ2Voa5UTDYjfRZramqwZs2a4P8ejwdOpzNYs5IIPHryIJIkobq6GpMmTQqOvYUuAGcGPp8Pfr8/uPJueXl5Smf8gUAAHo8Ha9asgd/vR0VFxZDVIN1ud7CNjY2NKZWYxWqfmvYbXbQ2SJKEadOmhR3nT6Wvr1ivkyRJqKurC95emTGSSt89at+Lym08Hg8qKipQUlKSEglZvK9hKq76reY1VFZvB5LTRiYqREREZFgc+iEiIiLDYqJCREREhsVEhYiIiAyLiQoREREZFhMVIiIiMiwmKkRERGRYTFSIiIjIsJioEBERkWExUSEiIiLDYqJCREREhsVEhYiIiAyLiQoREREZ1v8HQl2oB9R1jzQAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -1856,7 +5300,7 @@ "id": "fbeda78f", "metadata": {}, "source": [ - "In the result we can see, that the equation $1.42 t - 3.81 u -0.96 u'' + 0.007 = u' \\sin{(2.0 t)}$ can decently describe the process, while the alternative equation is not representative enough." + "In the result we can see, that the equation like $1.42 t - 3.81 u -0.96 u'' + 0.007 = u' \\sin{(2.0 t)}$ can decently describe the process, while the alternative equation is not representative enough." ] }, { @@ -1877,7 +5321,7 @@ }, { "cell_type": "code", - "execution_count": 98, + "execution_count": null, "id": "ad06186e", "metadata": {}, "outputs": [], @@ -1913,7 +5357,7 @@ }, { "cell_type": "code", - "execution_count": 126, + "execution_count": null, "id": "e0a8ad36", "metadata": {}, "outputs": [], @@ -1921,37625 +5365,169 @@ "def epde_discovery_as_ode(t, x, use_ann:bool = False):\n", " dimensionality = x.ndim - 1\n", " epde_search_obj = epde.EpdeSearch(use_solver = False, dimensionality = dimensionality, boundary = 50,\n", - " coordinate_tensors = [t,])\n", + " coordinate_tensors = [t,], verbose_params = {'show_iter_idx' : False})\n", " if use_ann:\n", " epde_search_obj.set_preprocessor(default_preprocessor_type='ANN',\n", " preprocessor_kwargs={'epochs_max' : 35000})\n", " else:\n", - " epde_search_obj.set_preprocessor(default_preprocessor_type='poly',\n", - " preprocessor_kwargs={'use_smoothing' : True, 'sigma' : 1, \n", - " 'polynomial_window' : 3, 'poly_order' : 3})\n", - " popsize = 12\n", - " epde_search_obj.set_moeadd_params(population_size = popsize, training_epochs=100)\n", - " \n", - " factors_max_number = {'factors_num' : [1, 2, 3], 'probas' : [0.4, 0.3, 0.3]}\n", - " \n", - " epde_search_obj.fit(data=[x,], variable_names=['u',], max_deriv_order=(2,),\n", - " equation_terms_max_number=6, data_fun_pow = 2,\n", - " equation_factors_max_number=factors_max_number,\n", - " eq_sparsity_interval=(1e-12, 1e1))\n", - " \n", - " epde_search_obj.equations(only_print = True, num = 1)\n", - " \n", - " return epde_search_obj" - ] - }, - { - "cell_type": "code", - "execution_count": 127, - "id": "f0f50bff", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAi8AAAGeCAYAAABcquEJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAADc6UlEQVR4nOyddXhTZxuH79SpUMEdijsUGW4rOtxh2GC4zmBs35ijU3SFDYYPKO6juA13py1QtNA2dU++P96maSEFGnqapLz3deXKycmRp22a8zuPqrRarRaJRCKRSCQSC8HK1AZIJBKJRCKRZAYpXiQSiUQikVgUUrxIJBKJRCKxKKR4kUgkEolEYlFI8SKRSCQSicSikOJFIpFIJBKJRSHFi0QikUgkEotCiheJRCKRSCQWhY2pDchqNBoNDx8+xMXFBZVKZWpzJBKJRCKRvAZarZbIyEgKFy6MldXLfSs5Trw8fPiQYsWKmdoMiUQikUgkRhAUFETRokVfuk2OEy8uLi6A+OFz585tYmskEolEIpG8DhERERQrViz1Ov4ycpx40YWKcufOLcWLRCKRSCQWxuukfMiEXYlEIpFIJBaFFC8SiUQikUgsCileJBKJRCKRWBRSvEgkEolEIrEopHiRSCQSiURiUUjxIpFIJBKJxKKQ4kUikUgkEolFIcWLRCKRSCQSi0KKF4lEIpFIJBaFFC8SiUQikUgsCileJBKJRCKRWBRSvEgkEolEIrEosmUw48yZMwHw9/cHwMfH57X2cXNzA0CtVjNx4kTF7JNIJJI3QqOB2CiIjYSYCIiOEM9x0eDiDm4FwL0AOLvBawydk0gkL0dx8TJp0iRmzJiR+nr48OG0bNmSPXv2ZLiPTuwMGzYMAD8/P4YPH/5aokcikUhem8QEITh0YiPtQ7dOJ0jSrnvhEfl657O1A9f8QsjoHm5pl/PrX+fOA1bSOS6RGEKl1Wq1Sh1crVbTo0cP1q1bl+pFOXv2LLVq1cLf3x9PT0+D+7m7uxMYGJi6D4gR2a9jakREBK6uroSHh5M7d+6s+DEkEoklEBsFd6/C3SsQ+ujlYiM6RZQkxJna6oyxsga3fM+JGwPLbvnFdtbZ4kiXSBQjM9dvxT/tp0+fJiAgAC8vL4BUwaJWqw1uHxAQgFqtTidcdPj5+eHt7a2UqRKJxBJIiIOg6xB4Ge5ehjtX4M5leHLH1JZlLZpkCH0sHq9CpRKemoyEjnsBKFYRCpVS3m6JJBtQVLy4ubkRFhaWbp2fnx9Ahl6XgICADI9lSPDEx8cTHx+f+joiIsJIayUSiVmRlAj3bwpPyp3L4nH3Cjy8LXJMTIGDEzi5gmNu8fz8ctrXNnYQEQLqYPEIf6pfVgcLD1BWodVC+DPxuHsl4+0KeUJNb/BqCTVaQG6PrLNBIslGst3POG3aNHx8fAx6Vl6Gh4cHoaGhBo/37bffZpF1Eokk20lOhscBKZ6UNELlwU0hYLICK+uXi47XWnbJ2tBMQvyLgsaQyNE9siLE9SgAHi2EHQuFt6ZsLb2YqdwA7Bze/BwSSTaQreJl0qRJ9OrVKzURNzMYEi4AkydP5uOPP059HRERQbFixYy2USKRKIRGA8H39AJF93zv2ptdmJ1coVRVKFlFPAqXASe39ALEwdH8qnzs7CFfUfF4FVqtqFxKJ2heIXw0ya8+5s3T4rFmOtjngiqNhZip1RJKVZMJwxKzJdvEi6+vL6VLl36lcMkonKRWqw2+Z29vj729fZbYKJFIsgCtViTMpvWk3L0iHrFRxh/XzgGKV0ovVEpWgbxFsk+YJCdDdDhEqyFKLZaj1Gleq8HWHnLnBde8Ig9Ft+ziATa2xp1XpYJczuJRyPB3ZDo0GmGPOlh4tc7vh/N7wf+8+PsYIj4WzvwrHn8Crvmg5rt6MZO/uHG2SyQKoGi1kQ4/Pz/UajXdu3cHhBAJDQ19abXRmTNn0r0vq40kEjMlLgYuH4FzfnDtuBApkWGv3i8jrKyhaLkUcZIiVEpVgYKeYG39ZrY+Lz7SCo/nRYihda9bEp0RTq4pouY5YaN7/fzymwgeQ0SEwIUDcG6vEDP3b77+vkXL6UNM1ZuJnjUSSRaSmeu34uLl7Nmz+Pn5pQoXEF6YYcOG4ebmRkBAAL6+vuma0Oka1Om8NL6+vuzZs+e1+rxI8SKRKExyMtw6I8TKWT+4elT0SzGGAiX1HhSdR6VoeRFSeV3CguHOJVEmHf5UWfGhw8FJXLyd3MSzbtkxNyTGQ0RK8mxEiFiODM3Y4/EqnN2e8+CkETnPe3dy5xGP183NCQ6C8/uEkDm3F0Ievt5+VlZQvq5ezFSsJ3rYSCRvgNmIF7VaTalSpQxWCelOu3DhQmbMmJHafVfHzJkzUz0vp06dStfo7mVI8SKRZDFaLTy4JYTKOT+4sF+IgczgXiB9qKdkFShRWSTBvi5xMXDvKgReSvO4KEIjmSUj8ZHRunTvu2beG5KcDFFhejGTVtiEPxPPESHpl99E8BQuDdWaQbWm4jn/a+QBarUQdEMvZDLzd3ZwEufSiZmSlc0vx0hi9piNeDEFUrxIJFlA6GNxR67zrjwNer39HF3Sh3t0D7d8r3/u1OqjNCLlzqWMS6RVKpEHUrKqyH9RQnyYAp3gCX8GkSH6UuiI54VPmuWoMMOCp5CnXshUawoFSrze+f3P68XM5cMiL+Z18CioFzI13xV/F4nkFUjxIsWLRJI5YiLh0iG9d+XO5dfbL5ezuCB6eYuLVYlKmbvjVj8V3pO0IuXOFYiPMby9a14hUjyriedSVYUHJ5fT658zJ5OcLMTMrdMit+XiQRHie77yqEBJIWKqN4OqTaFgyVf/3RLi4fp/Qsic2wvXT7y6oklHiUri8/FOe/FZkV4ZiQGkeJHiRSJ5OUmJcOOkXqxc+w+Sk169n5U1VHhHXIC8Woq8h9fJdYiPFTkpzwuVsCeGt09bWVSqWspzVRF+khe+zBETCVeOwsUDQtDcPP2i6MhfXO+Zqd4MCpZ69e9ZJ3h1yb8BF1/PHs/q0Pd/0KirLMWWpEOKFyleJJL0aLVCPOjCQBcPvH7ZctHyolS2pre4sDm5ZrytRiMaoenyUTIT8knrTSlc5s0riySGiYmEq8eEV+biAbhx6kXhmreo+FvrBE3h0q8WM2HB4ng6MfPQ/+Xbl6gEfb6Epj3lXCYJIMWLFC8SCcDT+yku/hTvyuvMyAEx6K+mtz4U9LJkz/hYuHQYzu4Rd+F3Lr865JPWmyJDPqYnNvo5MXPyxc7GeQrr82WqN4MiZV8tZp7c1YeYTmzNuNKrcBno8wW8288ycpEkiiHFixQvkreR6AhRIaLzrgRdf7397HOJvAddKKhklYzd+VqtCA+c+VcvWBLj02+TLuSTRqjIkI9lEBcj+vXocmZunHixFN6jYPpqpmLlX/63jVLDlnmw4VeRk2OIAiWg1+fQ6oPMlcpLcgxSvEjxInlbiI+FE9tg/2o4ueNFIWEIKysoW1svVirWf/nFIuSRECq6x/N5KnmLQK1WwktTtpYM+eQ04mNFTtTFA0LMXPvvxc+ZewEhZKqmeGaKVzQsZmKjYeci8P0Jnj0wfL48haHnRGg7VIx1kLw1SPEixYskJ5OUKETE/tVwbNPr5a4ULqMXK9Wbg4t7xtvGxYiyWJ13JfBS+vftHcUFqlYrcbyMLlSSnElCnKg00nlmrh57UcwUKQvdPoGWA4Rn74VjxMPe5WKmUka5MW75ofsn0H5k5voBSSwWKV6keJHkNDQaISj2r4bDvhm73nXkziP6a3ilJNoWLPnyYwde1M+1uXwk/cVIN33Yq6UQLK/y1EjeLhLi4PpJfc7MteP6fjCu+aDTWOg4Snwmnyc5CQ6tg9VTMy7Pd/GArh9BpzFyJEEOR4oXKV4kOQHd1N/9q+Hgmle3bi9fBxp2FQKjdI2Xl6GGPIQze/TelfCn6d/PW1Qcp1YrIYJc877xjyN5S4iNgl1/wfpfxBRxEN66NoOh68dQqNSL+2g0cGI7rP5ReHUM4ZgbOo+DrhMMCyGJxSPFixQvEkvm7lUhWA78I0qMX0bR8tDifWjeW7jqMyIuRiTX6rwrd6+kf9/BSYSTdN6VVyVgSiSvIilReFXWzRKdekEI6sbdocdnUK72i/totWIC9j9TRZWSIRycoMMoEVJyL6CY+ZLsR4oXKV4klsbjO0KsHFj96mZfeYtC8z7iUbqGYZGh0YgLxtkU78qVI+krRlQqcfHwaiV6uFSsLwfrIWauxcYmEh+fSHx8UupzXJzutX5dcrIGa2srrKxU2NhYpSyLZxsbsd7a2gp7e1ty53Ygd+5cuLg4YG39ljVm02qFEFk3S3wWdVRvLkRMnTaGP8PX/oN/psHxLYaPa+cA7YaJ5F45fiBHIMWLFC8SSyDsCRxcKwTL1eMv39bFQzTzatYHqjQyHBKKDBNf9Gd2p4SCnqV/P39xfSioRou3wvUeH5/E48dqHj5U8/RpJGp1DGFhMYSFRaNWxxAaGk1YWEzK+mjCw2NJTjbQTC8LcXKyJ3duB1xchKDRiRoPDycKFXKjcGE3ChVypVAhNwoWdMXBIQf1PvG/ICqNDvyjb4xXsgp0/1SIcUMCOvAS/DMdDv5juNGhrZ0or+71+ctzuyRmjxQvUrxIzJUoNRzZIMJCF/YZ/jLW4eAE9TtBi74inGPoiz0hTpRI710BJ7en967kchZ3tzrB8jqNxSwIrVbL48fh3LsXysOHah49EiJFPMJ4+FDNs2ev2UXYAFZWKhwcbLG3t8XBwQZ7e1vs7W1SHrZYWalITtag0WhJTtakLiclJZOcrEWjEevi4hKJiIgjLi7x1Sc1QFpRU7iwG0WLulO2bAHKlStIsWIelunJCQ6Cjb/BjoX6arm8RaDzeHhvmOEuzg9uw9qZsOfvF5vogejS690fek9+eQhVYrZI8SLFi8SciI3W92I5vfPFhl9psbGF2m2geV+o18Fw91mNRuSv7Fspcgqiw/XvlawCDTqnVAXVyxEdSyMiYgkIeEpAwFNu3w5OWRbPMTEv+V2mYG9vQ+HCbuTL54K7u1PKwxE3N0c8PMRrNzdH3N0dcXd3wtnZHgcHW2xssrZXTUJCEpGRcURExBIZGUd4eGzq64iIWEJConn0SJ3yCOfhQ/UrBY+Dgy2envkoV65giqApQNmyBShZMi92dhbQcj9KDdt9YOPvEPpIrHPMLcJBXcZDvqIv7vP0vkgG3u5juJuzlRU07S269pasrKj5kqxFihcpXiSmJjEBTu8WIaHjWyAuOuNtVSrR4Kt5X2jUDXJ7GN4u8JLwsOxbBc/u69fnLSq8My3eF/OBLJTo6HiuXXvE1asPuHr1ITduPMbfP/il3hNrayuKFHGjcGH31HCLbln38PBwQmWBHietVotaHcOjR+Gpoubhw3Du3HnGzZuPCQh4mqG4sbGxoly5gnh5lcDLqwS1apWkdOl8WJnrIMSEeNi/SuTF3Lsm1tnYiv+J7p9CqSov7qN+Cptmw+Y56QV8Whp1g75fQpmaytkuyTKkeJHiRWIq/C/A1vlweJ3IQXkZZWuJL+dmvTJOOAwOEl/q+1ambxbn5ApNegjBUrWJRU3n1Wq1PHyo5soVIVJ0j8DAZ2T0dVSgQG48PfOlPPJTurR4Ll7cwzI8DAqQnKwhKCiUW7eecPPmY27depKy/ITo6Bc7LefO7UDNmnoxU7NmcdzdzWyulEYjwqDrZgnvoo46bUVyb/VmL4Y+oyPE/9z6X14s+dfxTnsY+J0UMWaOFC9SvEiyk6RE0el20xzRSO5lFC0nBEvzPmLZEFFq0Yhu7wrxBa77F7W1g7rviQF2dduJagsLQK2O4dy5u5w5c5dz5+5y/vw9wsIMD28sUCA3lSoVplKlwlSsWJgyZfJTqlQ+XFws42c1B3Ti8OLFIM6cucvZs+J3bshL4+mZj4YNy9CsWUUaNSprXr/n6yeFiDm6QZ8bVraWEDGNu704iTouBnYvFnkxT4NePJ61DQydJcJRFuiJexuQ4kWKF0l2oH4qEg63Lch4TguIWS3N+4hKobJehr84E+JFwu2+lSI/Jm1eTLWmQrA06vbytv5mQFJSMtevP+bs2TucPSsEi79/8Avb2dhYUbZsgRShUoRKlQpTuXJh8uaVbeCVIDExmevXH3H27F3Onr3DmTN3CQhI76WwsbGiVq2SNG9egWbNKlClShHzCDM99Bdeld2LRYI6QMFS0O1jUWX0fF5YYoLwVv4zDe7ffPF4DbvAJ4tlt14zRIoXKV4kSnLztPCyHPwn4+RbF3do1F3kolRpbHhQoa7l/94VwtMSpda/V6pqSvO5PqLE2UyJj0/i3Lm7HDt2m+PHb3P27D1iY1/8nZQqlTcl/6IkXl7FqVChMPb2b2e4x1wIDY3m9OlADh68wcGDN14QM3nyONO0aXmaNStPs2YVTC8s1U/FZOotc/XjMVw8oONo6DgG3POn3z45GfyWwbyxL+acFfKE/60TNxMSs0GKFyleJFlNYoIQGJvniOZZGVHGS7ilm/bMOKwTeEl4WPatSu/etoDE24SEJM6du8fx47c5duw2p0/feSEc4exsT82aJahVS+RX1KxZgjx5nE1kseR1uXv3GQcO3GD//uscPXorXd6MlZWKBg3K0KlTTdq2rYaHhwlzZeJi4N+/Yf3P8ChArLNzgJYDoc+XkL9Y+u2DbsCPPV9s/mhrByN+g/YjZBjJTJDiRYoXSVYR+liUZG7/QywbwspaxOA7j4NKDQx/EQYHicqjvSssKvFWo9Fw8eJ9Dh68wbFjtzl1KvAFsZI3rzP165ehQYMyvPOOJ2XLFrDM3iOSVBISkjhz5g7791/nwIHrXL6sD4va2FjRpEl5OnWqSevWVcid28DU6OwgORmObhR5MTdOinXObjDeR9w8pCU+Fnw+hm1/vHicZr1hwkI5udoMkOJFihfJm6DViuFwm+aIqiFDDbFAdKh9bzi0H2m4H4Uu8XbfSjFx10ISb0NCojhw4HrqhSs0NL3LPU8eZxo0KE39+mVo2LAsZcrkt8hSZMnrc+9eCFu2nGfz5nNcuaIXMvb2NjRvXpFOnWrSsmUlHB1NMG1cq4VLh+HPifqhjq0GwajZLwqSg2vh1w8hJjL9+qLl4CtfEa6VmAwpXqR4kRhDQryY3rx5jshryQjP6iI01Kw32Bu467x7FTb8BnuX6xMMQSTetnhfDKYzo8TbpKRkzp27x/7919i//zoXL95PV7Ls4uJAo0ZladSoHA0alKZcuYJSrLzF3L4dzJYt59i06Sy3b+uTsXPlsqNjxxoMGtSQ6tVNkKeVlAgrvxeTqTUaKFwaPl8FFeqm3+6hP/zYC26dSb/ezgHGzIPWH8gwkomQ4kWKF0lmeHpfuJN3LMy4T4SVFTTsKkJDVRq9+OWm1Yqhcxt+Fc3pdJSsIjwsZpZ4GxUVx75919i16zIHDlxHrU5fuly5chFatKhI8+YVqFWrJLa2WdttVmL5aLVarl17xObN59iy5Rx374akvle9ejEGDGhIp041cXTM5oGflw7DjH4QfE+EdAd8K+YepU2aT4gXnppNs1/cv+VAIWIMdbeWKIoUL1K8SF6FVguXjwgvy5ENoEk2vJ2LB7QbCh1GGRYf8bHgt1zMadF1BrWyEi36u34ElRuazV3c06eR/PvvZXbtusThwzdJSND/zG5ujjRpUj6lTLY8BQoYmC0jkWSAVqvl1KlAli07xrZt51M/W66uuejZsw79+zekTJn8rzhKFhKlhtkjxQBIEPlkk1a8mMx7ZCP8Mjh9pR9Aicrw1TooXjE7rJWkIMWLFC+SjIiPFVU+m+dAwIWMtytVVXhZmvcFB8cX3w95JMo2t/+hL9t0dIHWQ8R+hUopY38mCQx8ys6dl9i16xJnztxNFw7y9MxHmzZVad26Cl5eJWSSrSRLCAmJ4p9/TrB8+THu3QtNXd+oUVkGDGhA69ZVs8eTp9WKG4u5o8XwR2c3kZjbpEf67R7fEWEkXdKvDgcnkfz77vvK2yoBpHiR4kXyIk/uihbiO/+EyFDD21hZiSnOnceJ/BRDHpPb50Ro6MA/+kTeAiXFPm2GgJPpP3N37z5LTa68evVhuvdq1ChOmzZVaNOmKmXLFpC5KxLF0Gg0HDhwg6VLj7J371U0GnGpKVbMgzFj3qVnz7rZ0+vnoT9M66sXJ60/EMm8udKU7ycmwJIvwPfnF/dvNwxG/mY4v02SpUjxIsWLRIf/BZHEd2yjvsX48zi7QduU0FDBki++n5wsut5u+FVUDemo3FCEhhp0NtyELht59EjN1q1CsJw7dy91vY2NFfXrl6FNm6q0alWZIkXMJ1FY8vZw/34oK1YcZ+XK/wgJEYM2CxVyZeTI5vTtW1/5vJikRFj+LfwzVXhkCpeByaugfJ302/23DWYNfPEGx7O6CCMVKausnW85UrxI8SK5exWWfwOH1mW8TYnKwmPS4n3DyXmxUbB7CWz6Xdy9gZiP0qQndJ3w4hdfNhMSEsW2bRfYvPkcJ04EpIaErKxUNGxYNqWhWFXzG74neWuJiUlg1ar/WLBgH48eiUnQefI4M3x4MwYNaoizs8JtAy4eEsm8T4PE//KA76DnxPQ3H8FBMK0PXDmafl9HF/j4rxfDTpIsw6zEi1qtZu3ataxbt449e/a8cns/Pz98fHxo2bIlnp6e7Nmzhzp16tC9e/fXOp8UL285D26JO6z9q/R9VdKiUkG9DkK01GhhODT05K7Iidn5J0SLL1hc3KHdcNGK3FBPl2wiLi6RPXuusHbtSQ4cuEFyst6bVKdOKTp39qJ9++rkyycbbknMl/j4JNatO8mcOXsJChJeDjc3R4YMacyQIU1wczOQZ5ZVRIbB7BGi5wuIEPHE5emTeZOTYOkUMR/peTqNFQMe7UzQ0yaHYzbi5ezZs5w+fRq1Ws2aNWs4c+bMK/fx9fVl6NChqNVqPD09mTRpEsOGDXvtc0rx8pbyKBBWfQ97lhmuHHJ0EaGhjqPFXBNDXD0uQkNpq4+KloMuE8B7gMlKJ7VaLRcuBLFmzUk2bz6Xrqy5WrWidOrkRYcONShaVIaEJJZFYmIymzadZfZsv9QBns7O9owY0ZwRI5orF07SasWIAd3cIxd3GL8Qmjx3k3x6N8zo/2ILhfJ14Is1ZpOYn1MwG/Giw9fXl2nTpr22ePH29sbNzc2oc0nx8pYRHCSaUu36S9wtPY+9o/Cy9PgMcnu8+H5yEhxeL0SLrjsnQM13RT5LnbYma9n/5Ek469efYe3ak9y8+SR1faFCbvToUZvu3etkb/mpRKIQyckatm+/wO+/7+HatUeAyImZPPk9unatpdx06we3YPr7cOOUeN16MIz6PX0yb8hDsc2FA+n3dXaDz5ZC/Y7K2PYWIsWLFC85n5BHwqW7w8fwZGdbO9G2v9fn4FHwxfej1LBjkQgP6YYj2tqJ/JcuE0w2GDEpKZk9e66yatVx9u+/nlqh4eBgS9u2VenZsy6NGpWVZc2SHIlGo2HLlvNMnbqN+/fDAOFd/OabztSrV1qZkyYlwrKvYc104ZEpUlYk85arrd8mOVkk/q/87sVwdPdPYfBUsLFVxr63CIsXL6GhoXh4eBAaGoq/vz8zZszIcPv4+Hji4/XTTyMiIihWrJgULzkV9VNYO0P0WEnbel+HlbW4e3r/qxcbUoGYQrvhV5GIG5cys8c1n6g06jAS3Asoa38GPHyoZtWq/1i9+r/UREaA2rVL0qtXXTp0qGG6AXgSSTYTF5fIn38eYvbsPURFie/3du2q8b//daBkybzKnPTCAREienZfJPMO/F54bNMm857bBzPef3FIa6UG8MU/hr9zJK+NRYuXgAAx4tzTU+QlLFy4kD179rBuneGqkW+++YZvv/32hfVSvOQwIkLB9yfRzjsu+sX3VSrhNen3NRQp8+L7YU/EndN2H314qVRV4WVp0dckwxGTkzUcPHiDZcuO4ed3JdXLkiePM7161aVPn3coXVqGhSRvL8+eRfLTT7tYseI4Go0WW1trBg9uzIQJrXB1VUDMR4TC78PFQFWA6s1EMm/aJP2wJ0LknH2uACV3HtHFt06brLfrLcGixcvzqNVq3N3dCQsLMxhKkp6XHE50OKz/VXhLYiIMb9Oom5hfUrKygf0jhOhZ/4te9NRqJe6oar5rktb9T59G8s8/J1ix4nhqpQVA/fql6d+/AW3bVsue5l0SiYVw/fojvvtuCwcOXAfAw8OJ77/vSufONbO+0aJWKzyz88fpk3knLILG3fTbaDQibL1syov9o/p8CQO+Ed4bSaawaPHi6+v7Qlm0SqXizJkzeHl5vXJ/mfOSQ4iNEl4W359EaaMh6rQVrt1ytV58LyEeti0Qybzhz8S68nXhwxnibsoEXL78gD//PMimTWfTzX7p0aMO/fs3oGxZ04SsJBJLYf/+a3z77ebUBHZv70pMm9ZdmeaLD26Jzry6CfNtP4QRv6WvOrx0WPSEefYg/b5eLeHbzbIrbyaxWPGi87L4+/unho1e5Xl5HileLJy4GNHGf+0Mveh4nurNYNAPosPt8yQnw76V4o7oyV2xrmh5kVDXsEu2e1qSkzX8++8V/vzzIMeP+6eur1mzOAMHNqR9+xrZP3VXIrFgEhKSmDdvH7///i8JCck4O9vzv/91oF+/+llflZSYIJJ5184QHpmi5eDzlemTecOfwcwBcGpn+n0bdIavfE3efduSyMz1O1tKFkJDDc+SCQgIYObMmamv3dzcmDhxYqpwAZHz0r17d6OrjyQWQkIcbJwNg0rDos8MC5fydWH6Hpi570XhotWK1t4ja4j23k/uQt4i8NEiWHQZGnXNVuESGRnHokUHadjwR4YMWczx4/5YW1vRqVNNtm4dz/btH9GzZ10pXCSSTGJnZ8NHH7Xi338/pVatkkRFxfP557507z4/tVdMlmFrB0OmwYy94vvk/k0YXx/W/aSvOnLNC99vE/lzaTm2CeaMMtwsU/LGKOp5CQgIwNfXlzVr1nD27FkmTpyYrlvuwoULmTFjBv7++jtStVrNwoULU1+HhIS8tNroeaTnxcJITIDdi2HVjyLL3xCe1WDgD1CvvWEBcuUY/DUJLh8Rr53doPdk0Qkzm9229++HsWjRAVavPpFaJeHm5ki/fvUZOLChnC0kkWQhyckaliw5wvTp24mJScDe3oaPP27NiBHNs35ydUQI/DZMNLEEURww4Bv9+xoN/DwY9ixNv1+/KSInT/JKzC5slJ1I8WIhaLWiOdyiz+DJHcPbFC0PA7+Dxt0NN4q7c0VMgj2+Rby2cxAN6Xp9LpLsspFbt54wb94+Nmw4TVKSSOArUyY/Q4c2pVu32tLDIpEoSFBQKBMnruXgwRsAVKlShLlz+1GunIEeT2+CViuS/xd+Kl5/OEPMRtKRnATfddN/J+kYMw86jspaW3IgUrxI8WLePAqEuaNfjBHrKFAS+n8N7/YznLEffE/Eof2WibsdXW+XflOyfe7QuXN3mTt3L7t2XU4djNiwYRlGjWpB06bllesMKpFI0qHValm37hTffLMZtTqGXLns+OGHrvTuXTfrK5JWTxM3TgCj50CnMfr34mPhizZw6ZB+nUoFX659cfyAJB1SvEjxYp4kJoi7lpXfiX/w58lTGPr+D9oMEbHm54kIgdVTRYO6xJTy+EbdRPJu8QrK2p4GrVbLoUM3mTvXj6NHb6eub9u2KqNHv4uXV4lss0Xy+iQmJhMaGkVISDRRUXHExCQQHR2f+iweCcTGJqDRaNBqQaPRotVqU5+1Wi0qlQoHB9vUR65cdinP4rWzswN58jjh4eFMnjxOODrKAX7ZSXBwBGPHruTw4ZsAdOnixYwZPbJ+YvXfX8GqH8Tyx39Bm8H696LD4dNm4H9ev87WDn7cBTWaZ60dOQgpXqR4MT8uH4HfR8DdKy++5+IBfb4QXW4N5ajERsPG32DtTH2vl+rNYPB0qPiOklanQ6vVsnfvVX7+eTcXLoiRAjY2VnTtWpvRo1vIUmcTkZSUzKNH4QQFhXL/fij374fx9Gkkz55F8uxZFCEh4hEWFvPqgymAg4MtHh5O5MnjjIeHEwULulKsmAfFi3tQrJgHxYrloUCB3HLkQxai0WiYO3cvs2btIjlZQ6lSeVmwYADVqmVhB1ytFnw+ET2oVCr4fBU0761/P+wJfNQIHupvcHB0gZ8OQpmaWWdHDkKKFylezIeIEPhzkhicaAjv/jDsZ3DL9+J7SYmw80/hqdG14y5dAwZPg9qts616SKvVsm/fNX7+eTfnz98DIFcuO/r1q8ewYc1kEm42EBeXSGDgU27desKtW0+4ezeE+/fDCAoK5fHjcJKTNa8+CGBlpcLd3YncuR1wdLTHyckOJyd7nJzsU187ONhibW2FSqXCykqFSqVCpSJ1WaPREheX+MIjNjaBuLhEIiLiCA2NJjQ0ivh4A8NCDWBra03Rou4ULepB2bIFqFixEOXLF6JChYJZ7zF4izh5MoBRo5bz8KEaOztr/ve/jgwZ0jjrwkhaLcweKTp3W1mL0uiGnfXvPwqEjxpC6CP9OvcC8NuxjKfbv8VI8SLFi+nRamHPMlj0qeGy58KlYdwf4OX94nsaDRxaB3//T3/XUshThIea9sq2Kc9arZYDB67z88+7OXtW9IzJlcuODz5oxMiRzcmTx/kVR5Bklvj4JG7ceMTVqw9Thcrt28HcuxeSOj7BEHZ21hQpIi7+RYu6U6CAK3nzOqc8XMib15k8eZxxd3fMtjwkrVZLTEwCISFRhIZGp3qAHj0K5969EIKCQgkKCuXBg7DUJG9DFCvmQYUKhahYsRCVKhXGy6skRYq4ZX0eRw4lLCyajz/+h927LwPQunUVfvmlN+7uTq/Y8zXRaOCnQeC3XISGvt0ibq50BF6GTxqLYbA6CpeGX4+abJaauSLFixQvpuXedZg9Ai4efPE9axuRnd/3f4ZDRBcOiEz+WykNDd3yiyGL7YYZzoNRAK1Wy8GDN/j5592cOXMHEK7/QYMaMWpUc/LmdckWO3I6sbEJXLv2iEuX7qc+rl9/RGJissHtXVwcKFu2AGXKFMDTMy/FiuWhaFF3ihXzIH9+F4tNjk5O1vDoUTj374dy924IN2485tq1h1y//ognTwyPxChUyJXatUtRp05JatcuReXKRbK+NDgHodVqWbz4MN9/v4WEhGSKFHFnyZIhVKlSJGtOkJwEU/uImUh2DjB1F1Rrqn//6nGY5A3xaUKXZbxg1n5wktcpHVK8SPFiGuJjRULt2hki5PM8lerDeB8xEPF5osOFaNn5p3idy1nMH+r2sVjOJs6evcuPP25N7Ybr4GDLgAENGDWqBfnzy8+TsWi1Wu7dC+HUqUBOnQrk9Ok73Lz5xGC4x9U1F5UrF6FcuYKULVsg5ZGf/Plzv3XehtDQaK5ff5QqZi5dus+VKw9f+L05ONhSs2Zx3nmnNM2bV6BmzeLY2Egx8zyXLt1n5MhlBAQ8JVcuO+bO7Ufbtga+j4whMQG+6wontovvrOl7oGI9/fundsGUDvrBsCDmq32/HexkUjdI8SLFiyk4swfmjISH/i++55gbhkyH94YbDvmc2C4muermg7QfKZo6GcqDUQh//2CmT9/O9u0XAbC3t6F//waMHt2CAgVcs82OnEJiYjKXL99PFSunTgUSHBz5wnZ58jhTrVpRqlbVP4oV83jrREpmiImJ5/z5IE6fFiLw9Ok7qNXpk5Hd3Bxp3LgczZtXoFmzChQsKD/DOsLDYxk+/G8OHbqJSqVi8uT3GD26RdZ85hLi4Kv2cG6vaJY5a7/I09OxfzVMfz99192mPWHy6mwLh5szUrxI8ZJ9hD4Gn4/FP6UhGneHUb+LMujniQiBBRNg7wrxunAZ+GQxVG2smLnP8+RJOL/++i8rV/5HcrIGlUpFz551+PTTNjIRNxNotVpu3XrCoUM3OXz4JseP307tMKzD1taaqlWLUqdOKerUKUXNmsUpWNBVCpU3RKPR4O//lJMnAzl8+CaHDt14QcxUqlSYZs0q0L59dapXL/bW/86TkpKZMmUTf/8tunL36FGbmTN7Zc0099ho+LKNqLB0zSuqi0pU0r+/ZR7MHZN+n05jYNRsk0y5NyekeJHiRXk0GpFhv3iyCPk8T75iMHa+aOlviMPrxdwPdbC44+j2ifC2ZFM7/8jIOBYs2I+PzwFiYxMAMaH2iy/aU6FCoWyxwdJ5/Dicw4dvpj6ez89wc3Okdu2SqWKlevVi5MolOw0rTVJSMufPB7F//zUOHLjO+fNBpP2aL1EiDx061KBjx5pUrlz4rRYyS5YcYcqUjSQna6hTpxSLFw/OmkT86AiY9K6YSO1RCH4+BEXK6N9f/i0s/yb9Ph/8KFpGvMVI8SLFi7L4nxc9W66fePE9KyvoPF609TeUqxL2BOaMhiPrxesSlYW3pUJdRU3WkZys4Z9/TjJjxnaePYsCwMurBF9+2Z769cu8Yu+3G41Gw6VL99mz5yp79lzh0qX0s6gcHGypW7cUjRuXo3HjclSpUsRik2hzEiEhURw8eIPduy/j53c1VawDeHrmo2PHGnTqVJPy5d9O0X7w4A2GD/+biIg4ihXzYOnSD7PmBiYiFD5rBoGXIH9xIWAKpDSw1Gph/jjYPDf9Ph8tgrYfvvm5LRQpXqR4UYbYKNGWf+PvoDFQEVLGCyYshHK1XnxPq4W9K2HBeIgMFVVHvSdDny+zLVnt5MkAvvpqY+pF19MzH5Mnv0e7dtXe6rvPlxETk8Dhwzfx87uCn9/VdN4VlUpF9erFaNJEiJVatUri4GBrQmslryImJh4/v6ts2XKeffuuERenT6yvWrUo/frVp0sXr7eut8ytW08YOPBP7tx5hpOTPYsWDaJZsyzo2h32BD5pCvdviLD4z4cgT4ow0mhgRr/0IXcrK/h6I9Tv+ObntkCkeJHiJes5ugnmjTU8+dneUfRg6TzW8Cyip/dF6fSJ7eJ1mZrC25I2kU1BHj5U88MPW9m06SwgSm4/+aQ1H3zQWJaXGiA6Wlzgtm07z9696S9wTk72NG1anpYtK/PuuxVl2bgFExUVx7//XmHLlvMcOHCNhARxQ+LkZE+XLl7061c/azvSmjmhodEMG/Y3x47dxs7OmgULBtC2bbU3P/DT+/BJE3gcKHJfZh3QFyMkJsA3ndPPebNzEJVKVRq9+bktDClepHjJOiJC4JcP4dgmw++/856YmKpzh6ZFqxWlzws/FW39be3EGPken4GN8nfosbEJ+PgcYM6cvcTGJqBSqejb9x0mTWonL7rPobsj37btAn5+V9MJlqJF3WnZsjItW1amfv0yWZPUKDErQkOjWbfuFCtWHMffPzh1fbVqRenfvwGdO3vh5JTzy3kTEpIYO3YFW7dewNrail9/7U337nXe/MCPAoWAeXZf3LTN3Efq5Pu4GPi8JVw9pt/e2Q1+Pgylqrz5uS0IKV6keMkabpyC77uLKc7P41FQZMc37m44Q/5RIPw2VJQMguh38PFf6bPuFWTv3qt88cV6goJCAahbtxTffdflrbqTfBWJicns23eNDRtOs2dPesFSsmRe2revTvv21alatagMq70laLVa/vvPn+XLj7Njx4VUb4ybmyODBjViyJDGOb6zdHKyhk8/XcOaNScBmDq1G4MGZYEXJOiGEDDqYKjwjvCuOKbcREWGiffuXNZvn6ewGCNg6MYwhyLFixQvb4ZWC9sXwoJxwq35PO1HiPlCzm4vvqfRiFLAvz4X3STtc8GgH6HzOLBWPkTz+HE4X3+9ka1bLwBQqJAbX33VgU6dasoLMOLidOnSfdatO8WmTecICYlKfa9EiTy0b1+dDh1qSMEiISQkinXrTrF8+TECA8WIDwcHW/r0eYcRI5pTrJiHiS1UDo1Gw9dfb+Kvvw4DMHnye4wda2CUSWYJvCSmTUeGig68P+wAB0fxXshDmNAQntzRb1+0PPyaUnL9FiDFixQvxhMXIwaN+S178b1iFYT3pHIDw/vevwk/D4YrR8Xrak3hoz/TlwgqRHKyhmXLjjF9+nYiI+OwtrZi6NCmfPppaxwdc767+1U8fhzOhg1nWLfuFDduPE5dny+fC1271qJLFy8pWCQGSU7WsGvXJebO3Zs6Td3a2opOnWoyalQLKlUy0MMpB6DVapk1aye//bYHgLFjvfn883Zv/j9y8zRMfFeE0mu3hm8264sWHtwWgxzV+tAdFd6BGXshVxbNYjJjpHiR4sU4HtyC77qJu4PnadpTCBdD5c/JSbD+F1g6BRLjxTYfzsy4o24Wc/nyAyZOXJs68dnLqwQzZvSgcuUsmltioWg0Gg4evMGyZcfYs+dK6mBDe3sbWreuSo8etWnatLxsIy95LbRaLUeP3mbuXD8OHbqZur5Nm6pMnvweZcvmzCGD8+bt5ccftwHwwQeN+P77Lm/eAuDKUfi8lfBON+gM/1urzwP0Py8qlGLS9E2q0xa+3ZwtuYKmRIoXKV4yz5GNYjJqzHOD4Kys4cMZYsaQoTuOwEvC23LztHhdq5Uol86GOG1sbAKzZu1i0aKDJCdrcHFxYPLk9+jfvwHW1m9vf5GnTyP5558TrFhxPDXnB6BOnVL07FmH9u1r4OqaPc0AJTmTixeDmDdvH9u3X0Cj0WJlpaJXr7p88kkbChd2M7V5Wc6yZUeZPHk9Wq2WgQMbMnVqtzf3wJzbC/97T9zwNe8DE5frQ+sXD8EXrcW4AR3e/eHTv3P0GAEpXqR4eX2Sk2DxF7Bu1ovvueYTdwTVmxnYLxlW/wirfhBDGJ3dYPgv0GpQtrS4Pn36DhMmrCIg4CkAHTrU4NtvO7+1M1y0Wi3Hj/uzdOkRdu68RFKSGNzn6pqLHj3q0L9/gxx7ZywxHbduPWH69O3s3Cm8tQ4OtnzwQSPGjHkXd/ecFebw9T3F+PGr0Wq1jB3rzeTJ7735QU9sh2+7iO/Q1oNFkzqdODm+VbyXtqdWj89g6Mw3P6+ZIsWLFC+vR8gjmNobLh168b3ydWHKeshX9MX3osPhx95wepd4Xb8jjFtgeH5RFhMXl8isWTvx8TmARqOlYEFXZs7sgbd3ZcXPbY7ExyexefNZFi06xJUrD1LX16pVgv79G9C+fQ0cHWVLfomynD59hx9/3MqJEwGAEM2jR7/Lhx82yVGNC1esOMbEiesA+OKL9owZ8+6bH/Twevixpyh2eH7G0Z5lMGtg+u0nLhNemByIFC9SvLyai4dgai8xWPF53hsOI3833Pn2wS2Y0hGCrotKovEL4d33s8Xbcu7cXSZMWM2tW08AMUzt22+74ObmqPi5zY2QkCiWLz/G338fSZ3W7OBgS/futRkwoCFVqrzd+T6S7Eer1bJ371WmTt3O9euPANHF+scfu9G0aXkTW5d1zJ+/jx9+2ArA9OndGTCg4Zsf1G8FzBogKj2fFycrvhOdzXXkzgN/Xc+RFUhSvEjxkjFarUiu/XPSiy3+be3FMMU2gw3ve24v/NBD9CTIW1QkkJX1Utzk+PgkfvllN/Pm7UWj0ZI/vwszZ/akVau3q4ETQEDAU/74Yz++vqdT+7IULOjKBx804v336+PhkbNc9RLLIzlZw/r1p5k2bXvqOImOHWvyzTedckxYd/r07cye7YdKpWLOnPfp2tXASJTMsupH+Pt/IgS/6Irek52UCGNqQ8BF/bYtB8Jnf7/5Oc0MKV6keDFMdLhIrj2yQby2sRX/GCAGh01ZD+VqG953yzyYP14IngrviPkbeZQf5Hbr1hNGjlzG1asPAejatRbff98lx8XTX8W1aw+ZPduPrVvPp1YNVa1alOHDm9G+fXXs7GTXW4l5ERERy6xZO1my5AgajRZnZ3smTmzHoEENLb7CTavV8sUX61m69CjW1lb89dcHb34zlZwE4+rBrTPwTnv4boveo33jFIyvJ0JLOmbugxrN3+ycZoYUL1K8vEjgJVEG/eCW+Iewc4D4WPFeTW/4YrVhN2RSoph+uu0P8dq7v6gmslN2cJtWq2X16hN89dVGYmMT8PBwYtasnlkza8SCOHfuLr//7se//+o7b3p7V2LUqBa8846n7MsiMXsuXgxi8mRfzp0TrQyqVCnCzJk9qVGjuIktezM0Gg3jx69i/foz2NvbsGLFMBo2LPtmBw28DGNqieagE5eDdz/9ews+go2/6V8XKQs+FxX/Ls5OpHiR4iU9fivg92FCrNjai2x2nXDp9bkYqmio+21EiBgPcOGAEDxDpotsd4UvmOHhsUycuCa1S27jxuWYPbsvBQrkDJfz63DiRAC//ro7tZ+GSqWifftqjB3bUuazSCyO5GQNq1b9x9Sp2wgPj8Xa2ooJE1oyblxLix6OmpiYzLBhf7N792Wcne3ZunU85cu/oUd69VRY8qWYfbTwit7DHRsFw6rAk7v6bd//CgZ+92bnMyOkeJHiRZAQD398BNsWiNdOrqKnQEKcaCT32VJo1NXwvnevwpQO8ChAbPv5KqjfQXGTT54MYPToFTx4EIaNjRWTJrVj5Mjmb94UykK4eDGIGTN2sH//dUB0Mu3a1YsxY7xlqbPE4nn2LJL//W8jW7acA6BmzeLMnv0+pUvnN7FlxhMXl0jfvj78958/xYt7sH37R282/ylt+KheB5FbqLthPLULvmyr39bGFhacz7aZcUojxYsUL0Kd/9BDxEpVKnAvAOqnImelWAX4egMUr2h43xPbYVofiImEgqXg2y2KTzfVaDTMmbOXWbN2otFoKVkyL/Pm9aNmzbdjKNnNm4+ZNWsn27eLpDwbGyt69arL2LHeFC+ex8TWSSRZy8aNZ/niC1/Cw2NxcLBlypSODBzY0GLDoCEhUbRv/xt374ZQr15p/vlnxJvloQVegtG1RNh+0gpR0aljej/Yt1L/ukoj+OlgjmheJ8XL2y5ezu1LqQoKBXtH4XEJFaWLNOoqujTqppmmRasF35/hz4liuVpT+MpX8ZK88PBYxo5dgZ/fVQC6davF1KndcXHJObHcjLh3L4Sff97F+vVn0Gi0qFQqunb14pNP2lCyZM4rhZRIdDx8qOajj1Zz+LAIjTZrVoFffultsRVJN248okOH34mKiqdPn3f46adebybGdNVHz4eP1E/hw4oirK/jo0XQ9sM3+wHMALMSL2q1mrVr17Ju3Tr27NnzWvvMnDkTNze31P0nTpz42ud768XL6X/h644iPOSaD7Qa8SG3soIPpkLPiYZzVhLi4Lfh+oGMbYfCmLlgq2yDs6tXH/Lhh0u4c+cZ9vY2TJ3anT593lH0nOZAREQss2f78eefB0lIECXrbdtW5bPP2lKhgvJVXBKJOaDRaFiy5Ag//riNuLhEPDycWLBgAI0blzO1aUaxb981BgxYhEaj5ZtvOjFsWDPjD5aUKMJHt8+KRqDfbNJ/d/sth5kD9Ns6u4neL+6WHVrOzPVbUT/T2bNnWbt2LWq1mtDQ0FfvgBAuAMOGDWPYsGF4eXkxfPhwJc3MOZz1g286CeFSoAQkxArhkjsPTN0NvSYZFi6hj2FiCyFcrKxFh8cJPooLlw0bztC+/W/cufOMokXd2bx5XI4XLklJySxbdpSGDacyf/4+EhKSadSoLDt2fMRffw2WwkXyVmFlZcWQIU3YvfsTKlcuQmhoNH36/MHcuXuxxKBAixYVmTKlIwDffbeFffuuGX8wG1vRy8XGFo5vgf2r9e+92w+8WupfR6lFfuNbRLaEjXx9fZk2bRpnzpx55bbu7u4EBgamel5AVFq8rplvrefl3D6Y0l5UERXyhPCnImeljJfIb8loUOLtc/B1J3gaJNT7l2uhVkvD22YRiYnJfPfdZv766zAATZqUY/78ATm+wdqBA9f57rstqd1HS5fOz5QpHfH2rmSxsX6JJKuIjU3giy/Ws2bNSQDatavGr7/2sbjwsVar5dNP17B69QlcXBzYunU85coVNP6AK3+ApV+Bi4doXueRcqxHgaL6KD5Gv+3UXVC79Zv9ACbEbDwvmSUgIAC1Wp1OuOjw8/PLfoMshYsH9cKlcBmR6xITCeXrwKx9GQuXw+vho0ZCuBQtB7NPKC5cwsLEnZVOuIwb583KlcNztHAJCgpl0KC/6NvXh+vXH+Hu7sj333dh376JtGxZWQoXiQTIlcuOX37pzcyZPbCzs2bHjou0a/crN28aGGFixqhUKqZN6069eqWJjIxj0KC/iIiINf6AvSZBmZrie/33ESIfEaBQqRfLpGePhLiYF4+RAzE78WIINzc31Gq1wffi4+OJiIhI93iruHQYvmwnhEvR8hCtFi5Ez+rw4y6RrPs8Wq2Yl/F9d6Haa7USwqWosnHmgICndOjwO8eO3cbZ2Z7Fiwfz+efvYW1tVh/DLCMhIYk5c/xo2nQ6//57GRsbK4YNa8rRo18yZEgTi+5vIZEogUqlol+/BmzcOJZChdzw9w+mXbtf2bHj4qt3NiPs7GxYtGgQxYp5cOfOMz77bK3xYTAbW1FkYWMLxzfDgX/073UZD2XTjCZ4HAgrc07fl5dhEVcNDw+PDHNmpk2bhqura+qjWLFi2WydCblyVNT8x8eIsudoNYQ/EzX/0/dAbo8X94mLEZOkdYO+ukyAH7aLkJGC/PefPx06/EZAwFOKFHFny5bxtGlTVdFzmpKjR2/h7T2LadO2ExeXSIMGZdi7dyLffNP5rRwkKZFkhpo1S7B798c0bFiGmJgEhg79m4ULD5jarEyRJ48zCxYMwMbGiq1bz7Ny5XHjD+ZZDfp+JZbnjtEP1LW2EZVGVmluhNb9lH4OUg7FIsTLy5J9J0+eTHh4eOojKCgoGy0zIVePwxdtIC4aSlSGmAgIeyJaRs/YC275XtwnMQG+7QIH1woV/9EiGPmr+AdQkHXrTtGr1wLCwmKoWbM427dPyLGJqSEhUYwdu4IePeZz+3YwefM6M2fO+6xbN0o2mZNIMkHevC6sXj2CQYMaodVq+eabzXz99UY0aef7mDleXiX4/PP3AJgyZRPXrj00/mC9P9eHj2aP1IePytSE7p/ot9Mkw2/D0s9ByoGYlXjx9PQ0uF6tVmf4nr29Pblz5073yPFcOwFftBbtoktWgbgoePZANJSbuU+f0JUWjQZ+GgRn/hW9X6bvUbwvgFarZebMnYwfv4rExGTat6/OunWjyZ8/Z/6Ntm49T9Om01m//gwqlYqBAxty6NBkunWrLfNaJBIjsLGx5scfu/K//4nu3osWHWLEiGWpE9UtgREjmtGiRUXi4hIZMWIZMTHxxh1IFz6ytoFjm+DAGv17/b6GwqX1r6+f0M+jy6GYnXhxc3MzmPvi7e1tAovMkOsnYXIrkZDrWU2UQz+5C3mLCuGSr+iL+2i1ooxu/2rxwf96g2hApyBJScl8/PE//PbbvwCMGfMuf/wxAEdHZcuvTcHTp5EMHbqE4cOXEhoaTcWKhdi2bTzTpnWXISKJ5A1RqVSMGtWCefP6Y2dnzbZtF+jdewFhYdGmNu21sLKy4vff+1KgQG5u3XrCV19tNP5gntXEPCOAeWOEtx3AwRHG+6TfdvFkCHkDT4+Zky3iJaOwT0BAQGpfFx2TJ09OV1nk6+vLsGHDFLXPYrh5OkW4RIiE3IR4eOgvPC2z9kHBkob3+2cabJotlj9bqngpXXx8EiNGLGPNmpNYWan46adefPFF+xw3n0ir1bJp01maNZvB9u0XsbGx4qOPWrFz58dvzVgDiSS76NLFi1WrRpA7twMnTwbSseNsHjwIM7VZr0WePM7MndsPlUrF6tUn2LjxrPEH6z0ZStcQPbzSho9qvgutP9BvFxMB88a9kd3mjKJXE5048fHx4ezZs0yaNAlfX9/U9/38/PDxSa8WJ06ciFqtxtfXF19fX06dOvXCNm8lt87C5y0hOlwIF00y3L8hWvfP2CtyXQyxY5GYUAow8jdo0VdRM6Oj4xkwYBE7dlzEzs6aRYs+oG/feoqe0xSEhUUzbNjfjBq1nLCwaCpVKsz27R/x2Wdt32ymiUQiyZAGDcqwefM4ChcWlUjdu8/j/n3LEDANG5ZlwgTRimLixLUEBj417kBpw0dHN4ocRh1DfwK3NEMuj6yH41uNN9qMkbONLAH/86IDbmSYUNwqlWgu5+IOM/dD6eqG9zuyEX7oLvJdek+GwVMVNTMsLJoBAxZx5sxdHB3tWLJkiMW2+X4Zx47dZuzYFTx6FI6NjRUTJrRizJh3pWiRSLKJ+/fD6NFjHnfvhlC8uAe+vqMpWtRAdaWZkZSUTM+eC/jvP3/q1i3Fhg1jjPdIL/8Wln8jOqgvuqIfDXBgjago1ZG/uHg/1xtMus4mLLZJncQAARdhkrdeuNjaC+HimBum/ZuxcLl4UEyG1migzRD44EdFzXzyJJxu3eZx5sxd3N0dWbduVI4TLomJyUyfvp0ePebz6FE4np752LZtAh9/3FoKF4kkGyla1B1f39GULJmXe/dC6dZtHkFBrzeCxpTY2Fgze3ZfnJzsOXkykKVLjxl/sN6ThRc+IgTmjNKHj5r2hHfe028XfA+WTnkzw80QKV7MmcDLMOld8eEsXUMo5+snwMEJftwJ5Wob3s//AkxJGc7YoDOM/8PwTKMsIjg4gu7d53P9+iMKFMjN+vVjclzOx507z+jceTazZ/uh1Wrp0+cddu/+hGrV3qK+QhKJGVGkiBAwpUrlJSgolG7d5lqEgCla1IMvvmgPwNSp27h/30ibbe3E7CNrGziyAQ6tE+tVKhg7P72nZdPvcPPV43ksCSlezJU7V0SoKPyZEC4uHnD5CNjnEk3lKjcwvN+jAFFGHRMBVRrD5FWK9nEJCYmiZ8/5+PsHU6SIO5s2jc1xPVx27rxE69Y/c+7cPVxdc+HjM5Cff+6Nk5O9qU2TSN5qChd2w9d3NJ6e+bh/P4yuXS1DwAwc2IB33vEkOjr+zbrvlq4BfVJyGueOhrBgsZy/OHyQJk1AoxG9X5KT3shuc0KKF3Pk3rUU4fJUuAXdC8L5fUJpf70p4zLnsCeiGinsiSip+26LEDsKERoaTa9eC7h58wmFCrmybt0oSpTIq9j5spukpGSmTt3GkCGLiYyMo06dUuzZ8xkdOtQwtWkSiSSFQoX0AubBgzD69vUhJCTK1Ga9FCsrK376qRf29jYcPHiDdetOGX+wPl+I60T4MyFgdHQYBRXe0b++fRY2zzX+PGaGFC/mRtgTmPguqIOFAClQAk7vEt6Tr9ZD7VaG94uOEKMCHvpDgZJirpGCLf/Dw2Pp0+cPrl59SP78LqxZM4qSJXOOcAkJiaJvXx/mzt0LwLBhTVOSAt1NbJlEInmeggXFzVORIu74+wczcOCfxjeDyyZKl87PJ5+0AeCbbzYTHGzkXL604aPDvvrwkbU1fPRnes/73/8TOTA5AClezAmtFn75EEIfiVlFRcvD8S1ibsUX/0C99ob3S4iDbzqLRF7XfDD9X8ijXOgmMjKOvn3/4NKl++TJ48zataMoUyb/q3e0EM6du0urVj9z5MgtHB3tWLBgAN9801kOUpRIzJhChdxYtWo4bm6OnD17lxEjlpGUlGxqs17KiBHNqFatKGp1DF98sd74A5WuITwwIJJ3I0LEcqkq0Otz/XZx0WI2Ug4oMpbixZzYsQhObBNKukYLoaBVKpi4DBp3M7xPcjJM7wcX9osErR93ZtzzJQtISEhi8OC/OHfuHu7ujqxZM5Jy5QyMI7BQNm8+R9euc3n0SI2nZz62b59Ap041TW2WRCJ5DcqWLcDSpR/i4GCLn99VJk5cZ3w+STZgY2PNzz/3xsbGih07Lr7Z9Ow+X4o5d+HP0o8G6PuluBHW8d9WOLnD+POYCVK8mAsPbokW/gBth8K/f4vlD2dm3FhOqxUtoo+sF4Lnm81QrpbhbbMAjUbDxx//w9Gjt3Fysmf16hFUqlRYsfNlJ1qtll9//ZeRI5cRH5+Et3cldu78mPLlc1bysUSS06lTpxQLFgzAykrFP/+cYNasnaY26aVUrlyEMWPeBeCbbzYZP7fJ1k7vZdkyTwziBbBzgP7fpN924+/GncOMkOLFHEhOghn9IT4GqjcTTeniokVibrePM95v+bdCYatUMGkl1GyhqJnTp+9gw4Yz2NhYsWjRoBxTJhwfn8S4cStTv+SGDWvKkiVDcHFxMLFlEonEGFq3rsKMGT0A+O23PW+WEJsNjBnjTaFCbty/H8bChQeMP1DTnuBRSKQepO2826irvokdwNk94obZgpHixRxY9aPo3+LsBp414MpREQL6ZAlk1H1xy3xY8a1YHjMPmnRX1MS//z6Smrw6a1YvmjWroOj5sgtRMTWf9evPYG1txfTp3fnmm85YW8t/DYnEknn//fqp7fgnTVrHxYtBJrYoYxwd7fjyS5HTOHu2H0+ehBt3IFs76JhScbThV31ui60dtHtuRuB2yx67I7+hTc21E7Dye7HcaSxsWyCWh/8ChUoZ3ufoJhEuAjEKvcNIRU3ctesS//vfBgA++6wtvXrVVfR82cXDh2q6dJnDyZOBuLg4sGLFMAYMaGhqsyQSSRbx6adt8PauRFxcIkOGLDHrEuouXbyoVasEMTEJTJv2Bjkp7w0XoaLbZ+HS4fTrrdIUHexeAvGxxp/HxEjxYkpio2BGPzFksUkPOLVTdMWt0xbafmh4n4gQ0WxIq4X2I6D/14qaePnyA0aNWo5Go6Vv33qpdzKWTkDAUzp3ns2tW6JHzZYt42jatPyrd5RIJBaDlZUVc+b0S+0BM3Kk+VYgqVQqvv22CwBr15403lPkmhe8B4jljb/p1+ctIjqu64gM1ZdVWyBSvJgSn0/g4W3IW1Q8bp4WwxY//jPjdv6LPhPN60pUgpG/K9r2PywsmiFDFhMXl0izZhWYPr07KgXPl11cvvyAzp1nc/9+GJ6e+di8eZxMzJVIciiurrn4668PcHS048iRW/z44zZTm5QhXl4l6NZNFF1MmbLR+EqpLuPF87FNouu6jk5j0m+n8/RbIFK8mIrjW2HHQiE+un4Em+eI9aPnQp4MKnjO7ROuPoAJi0QcUyGSkzWMGrWcoKBQSpTIw/z5/bGxsfw+JydPBtC9+1yePYuicuUibNw41iKm0UokEuMpX74Qv/8uqjZ9fA6wadNZE1uUMZMnt8fBwZaTJwPZuvW8cQcpUQlqtxYe+k2z9eurNRXl1Dqu/ScKRCwQKV5MQdgT+GWIWO44GnYvFhVHjbpB8z6G94mPhd+Hi+X2IzOebZRFzJq1k4MHb+DgYMtffw3Gzc1R0fNlBydOBNC3rw8REXG8844n69ePJl8+F1ObJZFIsoH33qvO2LHegEjgNdcZSIULuzF6tKgc/eGHrSQkGDmPqGtK643di0UHdhA3yx1Gpd9uq2V6X6R4yW50XXTDn0KpqmBlA3evgFt+GLcg4zDQqh9FiMmjEAyZpqiJO3deZPZsPwB++qlXjujlcubMHfr1W0hMTAJNmpRj5crh5M6t3NwniURifnz2WRtq1y5JZGQc48atJDlZY2qTDDJyZAsKFMjN/fthrF170riD1GolOrXHRMKuv/TrvfuDY5qbtn0r9eLGgpDiJbtJ20W3w2jYnOLSG+8DbvkM7xN4GdbOEMtj5oKTq2Lm+fsHM378KgCGDm1C167KNb3LLs6fv0ffvj5ER8fTsGEZFi8egqOjciE3iURintjYWDNnzvs4Odlz4kQA8+fvM7VJBnF0tEv1vsye7Wec90Wlgi4TxPKm2aIbOwjhokvoBdFTbO/yNzPYBEjxkp2k7aLb53/g+5MYVe49ABp2NryPRgO/DRVhpfqdoGEXxcxLTExmzJgVREXFU69eaf73v46KnSu7uHgxiD59/iAyMo569UqzdOmHUrhIJG8xJUrk5YcfugIiPG6u/V/ef78++fO7cP9+GOvWnTbuIN79IXceeHIHjm/Wr+84Ov12WxdY3LwjKV6yi3RddJtD2GN9pdGol7Rq3vaHSKrK5Sy8LgpW+/z6624uXAjC1TUXc+f2s/hBhP7+wfTt60N4eCx16pRi+fKhODram9osiURiYnr2rMN771UjKUnD6NEriIlJMLVJL5Arlx2jRum8L3tITDSixNs+l+jvAqJpnY7iFcX8PB13r8DlI29gbfYjxUt2kbaLbssBsHW+WP/xX2KdIZ49gMUpsyoGT4N8RRUz79SpwNQ8lxkzelC4cAY2WQhPn0by/vs+hIZGU716MVasGIaTkxQuEolE9FSZMaMnBQu64u8fzNSp5lk+3a9fA/LlcyEoKBRfXyO9Lx1Hg42tECc3T6dfnxYLK5uW4iU7SNtFd/B0WDpFLLcfCbVbZbzfvLEi2apiPbGtQkRFieQ1jUZLt2616NjRsqcoR0fH07//Qu7dC6VkybwsXz5UzimSSCTp8PBw4pdfegOwZMkRzp+/Z2KLXsTR0Y6RI5sDb+B9yVMYmvYSyxt+06+v31F4/nUc9oWwYOONzWakeFGatF10m/eBGyfgaRAULg1DZ2a835GNcHQjWNvAhIVgrVwIZ8qUTdy9G0KRIu78+GM3xc6THSQmJjNs2N9cvHgfDw8nVq4cRt68shxaIpG8SLNmFejWrRZarZaJE9eaZffdAQMakDevM3fvhrBhwxnjDqJL3D24Rnj0QVxbdCElgKREUVZtIUjxojSLJupzW+q0FU3mVCr49G+Rx2KI6Aj97KKeE0VJtULs23eNf/45gUqlYs6c9y26fFir1fLFF77s33+dXLnsWL58KKVKZVDBJZFIJMDXX3fC1TUXly8/4O+/zS/vw9HRnpEjRX7KnDl+aDRGlHeXqwVVGovcyy3z9OvbDRUhJR3bffRVSWaOFC9K8uwB7Fwklof/LFr7A3T7BKo0yni/JV9AyEMoXAb6/k8x82JjE/jyy/WAKIuuV6+0YufKDpYvP8bKlf9hZaXijz8GULNmCVObJJFIzJy8eV1SJzrPmLGThw/VpjXIAAMHNsDVNRcBAU/Zv/+6cQfRNa3b7gNxMWLZvQA07qHf5skdOLP7jWzNLqR4UZKt84XSrdYUzvwrOuuWqASDvs94n6vH9cm8431EtrhCzJnjx927IRQq5Mqnn7ZR7DzZwcmTAXz11UYAPv/8PVq2rPyKPSQSiUTQt289atUqSXR0PFOmbDS1OS/g6GhP797vALB48eFXbJ0B9TtCwVJiIKNfmr4uhsqmLQApXpQiPlYoXIAW78PeFWJ5vI8YV26IxAT9xOhWg6BmC8PbZQG3bwczb55o0PT9911xdrbchNZHj9QMHfo3iYnJdOhQI7W5k0QikbwOVlZWzJjRA2trK3bsuMj+/ddMbdILDBrUEJVKxf791/H3NyKx1toaOo8Tyxt/Ez3EACrVh9I19Nud3A5P7r6puYojxYtS7FsJESFQoKQYBZAYD2VrQeWGGe/j+xPcuSxGmg/7STHTdLkhiYnJvPtuRdq2VS6nRmni45MYOvRvnj6NpGLFQvz6a+8cMflaIpFkL5UqFWbIkMYA/PjjNuNySxSkRIm8vPtuRQDjc3NaDxYddoOui2gAiBzMtN4XrVYMDTZzpHhRAq0WNqY0nuswUv9B6Dg64yZzD27Biu/E8ojfRFdEhdi48SxHjtzCwcGWH37oatEX+xkzdnD27F3c3BxZvHiwbEInkUiMZty4luTO7cDVqw/ZuNH8Jk8PHizE1Zo1J4mKisv8AZxyQ5sPxXLapnXN+6bvN7bzTxEJMGOkeFGC8/uFB8XBSQxSfHIXXDygWW/D22u18Ntw4Z2p1Qpa9FXMtLi4xNSGTOPHt6REibyKnUtpDh26wR9/7Afg11/7WPTPIpFITI+HhxOjR78LiBuj+HgjJzorRJMm5fD0zEdUVLzxIwM6jQUrK+F5uXNFrHNwFF4ZHepg0arDjJHiRQk2pXhdWg4U4SMQH4yMkm+vHoML+0UuzMsmS2cBy5Yd5eFDNYUKuTF8eDPFzqM0ISFRqQMkBwxoQOvWVUxskUQiyQkMGdIkdaLzsmVHTW1OOqysrPjgA1GpumTJYbTGzCMqVAoadBbLG9OMpnm+EaqZd9zNFvEyc+ZMFi5cyMKFC5k58yWN2VLw8/OjR48eLFy4ED8/PyZNmoSvr282WJoFPPSH/7aK5Tpt4fRuIUY6vKRD7u4l4rlZbyjkqZhpkZFx/P67GAHwySetcXCwfcUe5olWq+XTT9fw5EkEZcsWYMqUTqY2SSKR5BAcHe1Sqy9/+20PERGxJrYoPT171sXJyZ7bt4M5duy2cQfRlU3vXQ7hz8RykTJQO03V6cWDcPfqmxmrIIqLF51YGTZsGMOGDcPLy4vhw4e/dB+1Wo2fnx/Dhw9n+PDhlC5dmu7duyttatawea4IA9VuA+dTxq3XbZexKImNFl0PQVQYKYiPzwHCwqIpXTo/PXvWUfRcSrJmzUl2776Mra018+b1l1OiJRJJltKrV11Kl85PWFg0Pj4HTG1OOlxcHOjcWYxwWb/eyI67lRuKApKEODH8V8cL847+wFxRXLxMmzaNYcOGpb729vZm4cJXZzIHBgai1Wrx9/dPt79ZExOpb6/cbij8m+JR6TA6432ObhAjBAp5QtUmipn27Flk6j/h55+3w8bGMidGP3sWyXffbQHgs8/aUqVKERNbJJFIcho2NtZ89pnwQixZcoTo6HgTW5Sebt1qA7B9+wXi4hIzfwCVSu992fWnuOEGES0oWEq/3Z6l4gbbDFFUvAQEBKBWq3Fzc3vhPT8/PyVPbRr+/RtiIqBoeVEmHaUWM4xqt854H13IqOUgRXNd5szxIzo6nurVi9GuXTXFzqM0X3+9CbU6hsqVizBiRDNTmyORvJUEB0fw7bebzXIWUFbx3nvVKVUqL2p1DKtX/2dqc9JRt24pChd2IzIyDj+/K8YdpEFnMd/oyV19Xxdr6/S5LzERcGD1G9urBIqLF0O4ubmhVqtfuu/atWvx9fVl4cKFTJo0KcPt4uPjiYiISPcwCRoNbJotljuP03fJbT9SZHYb4lGgSNRVqaDVQMVMCwuLZsUK8c83aVI7iy2N3r//Ghs3nsXKSsWsWT0t1nskkVgyyckaunWbh4/PAaZN225qcxTD2tqKESPERGcfn4PGTXRWCCsrK7p08QJgwwYjS7pzOUE54cHhcpquve/2S7/dnmXGHV9hTFJt5OHhQWhoaIbve3l54e3tTffu3Rk2bBilS5emR48eBredNm0arq6uqY9ixYopZfbLOblDDGB0doMiZcH/vKgeavVBxvvsWSqea7SA/MUVM2358mPExiZQuXIRmjYtr9h5lCQmJoHPPxdJ20OGNKZGDeV+XxKJJGOsra2YNKktAAsW7Gfnzksmtkg5unevTd68zjx4EMaWLedMbU46dKGjffuuolbHGHeQKqJvDJcO6dflKSSaq+q4eUpMnDYzTCJeXiZcADw9PfH01Ce49uzZE19fX4PemsmTJxMeHp76CAoKympzXw9dyVmbD0X4CETjn9wehrfXaPTi5WUC5w2Jj09KnYUxYkQzi/W6+PgcICgolMKF3Zg4sZ2pzZFI3mrat6/BsGFNAfjoo1XcufPMxBYpQ65cdnz4ochFnD9/n3GlyQpRoUIhKlUqTEJCMtu3XzDuILo8y7TiBcTIAB0JcRBw0bjjK4ii4iWtAEmLWq3O8D3ghbJoXc6MoTCUvb09uXPnTvfIdu5cgXN+IjzUpAccXifWP5+5nZaLB8UET8fc0LCLYqZt2nSW4OBIChVypUOHGoqdR0mePo1k/nxRufXllx1wcpJddCUSU/Pllx2oXbskERFxDB36t9k1dMsqBgxoiJOTPdeuPeLgwRumNicdutDRG1UdqVRw/6YYHKyjYv302904aaSFyqG4eHFzczMoOry9vQ3uo1ar6dGjR7p9dB6Xlwkek6LLdWnQGc7uES62ivWgrFfG++gqkZr2Et0NFUCr1bJw4QEAPvigMXZ2NoqcR2l+/nkX0dHx1KhRnE6dapjaHIlEAtjaWvPHHwPx8HDiypUH/PLLblObpAhubo707l0XECF4c6JzZy9UKhX//efPo0fqzB/AxR1Kpcy2u5Qm76XSc+Ll+gmjbVQKxcNGkydPTldZ5Ovrm670OSAgIF3jOjc3NyZOnJhOqCxcuJDu3bsbrFoyORGhotEPQMcx+rr4l5VHR0fA4RTvUmvlQkaHD9/k2rVHODra0a9f/VfvYIbcuvWElStFsvGUKR2xyij5WSKRZDuFC7sxc2ZPAObN28u5c+Y/jdgY3n9ffH/u2XOFp08jTWyNniJF3PHyEvl/+/ZdN+4ghkJHntVFzqaOt1G8TJw4EbVaja+vL76+vpw6dQofH5/U9/38/NK9BiF4Zs6cmfoICQlh3bp1SptqHDsXQXysGCkeFQbP7oNrPhE+yohD68Q+RcsLD41CrFolPnA9e9bFzU0Z747STJu2neRkDa1bV6FevdKmNkcikTxHu3bV6NLFC41Gy/jxq4iNNe+BfsZQoUIhvLxKkJSkYd26U6Y2Jx0tWlQCROKuUejES9qKIxtbfSUSiCnUUWrjjq8QKq05ZSBlAREREbi6uhIeHq58/ktyEgzwhKdB8OkS8Fsuuur2ngyDp2a830eN4MpRGDwNen+uiGnh4bHUqDGF+Pgkdu36mGrVTFSF9QZcu/aQd9+dhUql4sCBSZQtW8DUJkkkEgOEhUXTosVMnjyJYMyYd/nii/amNinLWbnyPz77bA2lS+fn0KHPzab44eLFINq0+QUnJ3uuXPkh8+kBoY+hdyGR+7I+VD9detFEWDdLv920f6FWyyyz2xCZuX5LH/ybcHSjEC6u+YTn5fw+kbTbfkTG+9y/KYSLlRW0HKCYadu2nSc+Pony5QtStWpRxc6jJHPn7gWgfftqUrhIJGaMu7sT06aJES4+PgcICHhqYouynk6dauDoaIe/fzAnTwaa2pxUqlQpQr58LkRHxxtnl0dB0d5DqxXXJh3PJ+2aWehIipc3Ye8K8fzecNj1l1h+p8PLe7boyqhrtYY8hRUzTefa7N69ttncIWSGu3efsXmz6KswZozh5G6JRGI+tG5dhRYtKpKYmMzXX280tTlZjrOzAx07iplC5tRx18rKiubNKwKwd28Who6eT9o1s4ojKV7ehJunxXOVxvqeLS8rj05OBr+UboUKJureufOMkycDsbJSpTYysjTmz9+HRqOlefMKFus5kkjeJlQqFd9+2xlbW2v27r3Gnj1Gtq03Y3r1ElVHO3deIiHBfErD331XiJd9+64ZdwBDzeo8CqZvVnf9hH4GkhkgxYuxhD6GkIciTpgYL4Yy5i0KNd/NeJ9zfvDsgShPq9dBMdN8fYWoaty4HAULuip2HqUICYli7VrhORo7VnpdJBJLoXTp/AwdKprXfffdFpKTNSa2KGupU6ck+fO7EBkZx9Gjt0xtTipNmpTH2tqKW7eecO9eSOYPoPO83DwNcWm69ab1vqiD9TOQzAApXozldkqr6KLl4XFKT5pytTOeYwT6IYzN+6YvQ8tiduwQ3RC7daul2DmUZN26U8THJ1GtWlHeecdMe/tIJBKDjB/fEnd3R/z9g9m0yci5O2aKlZUVbduKwba671lzwNU1F3XqiGnQBw4YUTJdsKS4+U5KTJ/bYsZ5L1K8GMvtlH/KMl761sme1TPePjIMjm0SywqOA7h/P4zr1x9hZaVKLaGzJLRabWpfl/79G1hkvo5E8jbj4uKQOtDwl19257jJ023aiKZuu3ZdNivPkq6VxJkzdzK/s0oFVQ2Ejp5v5SHFSw5A53kp6wUBKXMlPKtlvP2Bf0R4qWSVl3fefUN0tf5eXiXw8HBS7DxKceJEAP7+wTg62tGpU01TmyORSIxg8ODGeHg4ERj4jA0bjGxdb6Y0aFAGV9dchIREcfq0+VQd1a5dEoDTp+8YdwBDSbtm3KxOihdj0XleSlWDu1f0yxlxMmV0vPcAoXIVws9PiBdvb8vzugCsWHEcEDM7nJ2VC61JJBLlcHKyZ9SoFgDMmbMXjcZ8PBRviq2tNS1bVgbMK3Tk5VUCgMDAZ4SERGX+ADrxcvUYJKY0GrS1S9+s7vZZs5kwLcWLMUSGweMUxZ3LWUzddHCCQi/JzwhKGehVTrnqn9jYBI4cEUlk775reeIlKiou9ctA145bIpFYJgMGNMDZ2R5//2AOHbppanOylLZtReho714jq3sUwM3NkTJl8gNGho6KV4TceUT399tpcpXS5r0kxEHgpTczNIuQ4sUYdCGjQp4QfE8sl6qacbJuYoJe7BQtp5hZx4/7ExeXSKFCblSqpFwPGaXYu/cacXGJeHrmo3p1y+sILJFI9Dg7O6SWFi9efPgVW1sW9euXQaVSERDwlCdPwk1tTiq1apUE4MwZI6qCVKo0JdNp/l5mmrQrxYsxpEvW1eW7vCRZ93EgaJLB3lHRxnSHDgnvTosWFSwy0VXndWnbtqpF2i+RSNLzwQfiYrh37zUCA3NO1103N0cqVxbf5ceP+5vYGj26vBejPC9guN+LmSbtSvFiDKnipSYEpsQ8X5bv8iClH0CRsormu+jUtiWWF8fGJqR2h3zvvZcIQYlEYjF4euajRYuKaLVaVq82j4teVtGgQRkAjh+/bWJL9Og8L+fO3TOuyqtaSt7LlSOgy1PKUwgKlNBvI8WLBXMrjefF/zUqje6nxHsVDBklJCRx+fJ9ALy8Sip2HqU4ePAGMTEJFCniLkNGEkkOQhc62rTpbI5K3K1fX4iXY8fMx/NSrlwBnJ3tiY1N4Pbt4MwfoHQNkccZpYY7l/Xr04aOgq5DtOlDZVK8ZJaYSHiQIkYKlIRnQjBQqmrG++i2L6KceLl69SHx8Um4uztSqlRexc6jFLoqKRkykkhyFt7elXBysuf+/TDjy3jNkHfe8USlUuHvH2w2eS9WVlaUK1cQgNu3n2T+ANY2UKmBWE4XOnp+ztEpIy3MOqR4ySwBF8R8h7xFQJ3y4ShQEpxe0oY/GzwvuhhnzZolLPLir3O9Nmmi3O9IIpFkP7ly2dGunbi527gx53TcdXNzTC2MOHXKfPq96CqObt0ywvMCrzek0QxCR1K8ZBaDIaNX5GikzXlRiHPnRNWTrtbfknj4UE1g4DOsrFTUrWt5+ToSieTldOkiRpXs2HExR4WOqlUTQ2OvXn1oYkv0lC4txItRnhfQi5dLh/SDGM2wWZ0UL5nFP6VMuoyXPln3ZfkusdH60JKCnpdz50SyriWKl//+EzHjqlWLkjt3LhNbI5FIspr69cvg6GjH06eRXL36yNTmZBk6z8uVK+YjXsqUKQBgXM4LQPk6InwU+lgMEoYXm9WZwYRpKV4yi67SKO1YgJdVGj1MyUR38RANgBQgLi6RO3fEJNHKlYsocg4lOXZM/I502fsSiSRnYW9vQ8OGwvO8f7/5NHZ7U3Ti5do1cxIvOs9LsHFeLjsHfRpE2sTciuY1YVqKl8yQEAd3UkYBeFbXZ2OXfknY6IHy+S737oWg1WpxdrYnb15nxc6jFLr5ILrBYhKJJOfRvHkFwMipx2ZKxYpCvNy/H0Z4eKyJrRGULJkXGxsrYmMTePTIyERih5S5eHHR+nXl66Tf5sZJ446dRUjxkhkCL4lmc675RAvlhDjReK7gS/I0siHfJSBANH8qVSqfxSXrxsUl4u8v7K9ataiJrZFIJErRrJkQL6dOBRIXZx7zcd4UNzdHihRxB+D6dfPwvtjaWlOypKg4NTp0ZEi8OLml3+buVeOOnUVI8ZIZbhlqTlcVrK0z3ue+8mXSgYHPhCml8il2DqW4desJycka3N0dKVAgt6nNkUgkClGiRB7y5nUmKUnDlSsPTG1OllGxYiEAs8rl0YmXoKAQ4w5gSLzY2qXfJtm0AlSKl8yQbizAa3TWhWwJG+nabnt6Wl5/F12suGLFwhbnNZJIJK+PSqWiRo3igL46MiegEwr374ea2BI9+fK5APD0qRHTpcGweLF5XrwkGXfsLEKKl8xgKFn3ZZVGkC09XnTixRI9L9euibsV3d2LRCLJudSsKaohL1zIOeJFFzZ6+FBtWkPSoBcvkcYd4HXES5L0vFgGSYn6UFG6MumXJOtGhEJEituusHKVNPfuCcVfooQy1UxKcuuW6EVQvnxBE1sikUiUpkYNMfrj4sX7JrYk6yhc2A0wL/GSN68QL8+eGSlecqUUfqQLG9mn30Z6XiyEKDUkJohlZzcITrlzeOlYgJRk3TyF9R8GBQgNFR8wndq2JB49UgNQtKiHaQ2RSCSKo+tBcvfuM5KTc0azOnMUL4p4Xl7IeZHixTKwsdUv68ql8xYVQiYjsiHfJSEhiejoeEBkvlsaulI+3ReARCLJuRQu7IatrTUJCcnGl/GaGbrvrsePw81GkGWZeIlNkzMjc14sFOs04iUpxQPzvBJ9nmyoNAoLE8rYykqFq6tldaeNiYlHrY4BoFAhN9MaI5FIFMfa2orixUV4+86dZya2JmvInz83NjZWJCdrePIkwtTmAHrxYnTY6HU8LxopXiyDtJ4Xq5TS6IS4l++jCxsp6HnRXfxdXR2xsrKsP6fuzsvZ2R4XF4dXbC2RSHICJUsK8XL3bs4QL9bWVri6Cq93RIR5NKrTiZeIiDji440QGfay2ijnYJR40XlelGtQFxYmxIu7u+WFjIKDxV1K/vyyv4tE8raQL5/4f9fl6uUEnJzEhV0Xwjc1Tk765FqjGgIa9Lw8l7Br4mojm+w4ycyZM3FzcwNArVYzceJERfZRFJVKiBZNshhaBa8WL7qhVvmKKWaWLmzk7u6k2DmUQveP7uxs/4otJRJJTkGXm6e78coJODqK77CYmAQTWyKwsdH7JRITjfCQyD4vQoQADBs2jGHDhuHl5cXw4cOzfJ9sQed90XleEuNePlnTMaX6J145V6IlC4DYWKHcdf/4Eokk56PzEutuvHIC5uZ5UalU2NqK61RCQnLmD2BQvNim3yani5dp06YxbNiw1Nfe3t4sXLgwy/fJFnR/PN04AI3m5X9A3SyIaLViJmk0QjxZWVled9rYWHGXkiuX7Su2lEgkOQVdYYEuXy8nYG6eFyBVvLyR5yU+jXixstJHHSBni5eAgADUanVq+Cctfn5+WbZPtmH9nOcFXh460o0Vj1IrZpLO8WOJrfV1/+i5cr2iaksikeQY7OzEBTApyQiPgJmiyzExF88L6H/PWeZ5gfShIxOLF0VzXgICAgyud3NzQ61WZ8k+8fHxxMfrPzAREQqWquk8L6o0mi8hTh8eeh5dDxhFxYtQLxaoXVKz4O3tsyX1SiKRmBEvi7hbGjrPt84Tbg7oPC9GicRcGYgXWzuIT/GY5WTPS0Z4eHgQGpq5IVYZ7TNt2jRcXV1TH8WKKZccm+p5SU7SZ16/1PPiJp4VDBtZsufF2lp8/JKSzKOxk0QiUR5L/K56FYmJQiDoBIM5oA8bZaHnJW3F0ds4VTqzwuVl+0yePJnw8PDUR1BQ0JualzE6z0tyItil9CVJfIl40XleopXrJKm14NsXncclIcG0Cl4ikWQfOu1iyd9dz6MTCHZ25iNedBj1e37bw0aenp4G16vV6gzfy+w+9vb22NtnU7WKTrwkpYiX6PCXVxLpPC8y58UgUrxIJG8fuipDB4eck6ivS4q1tTWfELiuIMKoas604kWr1StOW/MRL4p6Xjw9PXFzczOYx+Lt7Z1l+2Qb1s+JF3h52CjV86JWzCR7e6H04+NN68IzBl1CmVEdICUSiUWStit4TkGXFGtOYSNd8rCjoxEFETrxotFAYpokZDPyvCgeNpo8eXK6KiFfX990ZdABAQGpfV1edx+TkTZsZJsJ8aKg5yV3blF2GBHxioZ5Zoi9vfh9GtUBUiKRWCQ68WKJg2QzwtxyXhISklIFVdpuu6+NQ5qmpxk1qsvp4mXixImo1Wp8fX3x9fXl1KlT+Pj4pL7v5+eX7vXr7GMyng8bgclLpXV3L+HhltczwcND/IOEhES9YkuJRJJTCA8XoXY3N8saJPsydDONzKVZaNp+M0Z5Xqxt9CGijEYE5OScFx1pW/t379493Xu6LrqZ2cdkGAobvSxhNxuqjXQNn3RfCJZE/vyixDw4OBKtVmuReTsSiSRzPH4sChjy5MmgxYQF8vSpmN6sm9tkanQhIzs769TwfKZxcILEhIwnS7+N1UYWi6FqI7MJG8VaXPZ+gQLiHz02NoGoKPNp7iSRSJTj3r0QQD9d2tKJiUkgMlJcB3Q3ZKZG53kxKmQEEBejv265eOjXv01hoxyF0Qm7ypVK61yvSUkas2pN/To4Otqnull1E6YlEknOJTlZQ1CQaHtRokReE1uTNTx7JrwuDg62uLg4mNgagS6MZfTcuKDrosoodx5wy69fL8WLhZLO85ISr32dJnWJ8a+eQG0kuXLZpU4QtcRZIfnzC+/LkydSvEgkOZ1Hj9QkJiZjZ2dNoUKupjYnS9CHjFzMJvT96JEawPjf8Z3L4rlE5fTt2+PS5CdK8WJBZDZhN5ezGGYFioWOVCpVapxVF0u2JEqUEK7jgIBgE1sikUiU5saNx4Dwuug6bFs6wcF68WIuPHigBqBwYXfjDnD3inguWUW/TqOBgAv611K8WBCZTdi1ssqWiiOdALh7N0SxcyhFuXIFAbh584mJLZFIJEpz/vw9AKpXV3CMSzZz584zAIoUMVIoKMCDB2EAFCniZtwBdOKlRGX9useBECOEGpNXw6dLjDcwC5DiJTNkNmEXwDFFvChYcVS8uBAvuliyJaEXL49NbIlEIlGaCxfE+JacJF5u3RI3XuXKFTCxJXoePlQDbyCo7ug8L2nEy+1z4rlsLWjeG+q2M97ALECKl8yQNmz0Ok3qIFsqjooXF9nglul5Ef/w0vMikeRstFptGvFS3MTWZB23bokbL92NmDmg97wYIV5io+DJHbGc1vPif148l6n5RrZlFVK8ZIbUsFHC63tesqHiSBc2CgqyRPEi/uEfPw63yIRjiUTyegQGPuPp00hsba2pVKmwqc3JErRabeqNV9my5uN5eSPxcu+aeHbLD65pKsL8UzwvpWu8mXFZhBQvmSFPyj/cg1uvL16yYTijLmxkiZ4XFxeHVPGli4dLJJKcx8GDNwCoU6eUcV1fzZAnTyKIjIzD2tqKUqXymdocAGJi4nn2TFQFFS7slvkDGMp3Ab3npbT0vFgeFd4Rz9f+A+cURfvs/sv3yYbhjLqw0cOHaouc0Fy3bikATp58cRinRCLJGRw4cB2Apk3Lm9iSrENXPVWyZF7s7c1jovS1a48A0TBPN4IlUxjKdwkLhpCHomzas1oWWPnmSPGSGXTiJeg6FEv5B7x2XDTzyYhs8Lzkz5+b3Lkd0Gi0qcljlkTdup4AnDwZaGJLJBKJEiQkJHHs2G0AmjWrYGJrso4zZ+4AUKVKEdMakobLlx8AULmykTYZ8rzovC5FyooWIGaAFC+ZwS0fFC4tlnXl0uHPRBgpI7LB86JSqahatSgAFy++whNkhujEy9mzdy3ScySRSF7O0aO3iI6OJ29eZypXzhn5LqD3Fr/zjqeJLdFz5YoQL1WqFDXuAAbFi3nlu4AUL5mnfIr35fZZKFdHLF85mvH22VBtBPoP6qVLlideypTJj7u7E3FxiRZpv0QieTmbN4uL33vvVcfKKmdcdpKSkjlz5i5gXuJF73kxQiTGREJwSu6hIc+LmeS7gBQvmadiPfF8/QRUbiiWXyZeGnSGGXth4HeKmlWtmuibcOlSkKLnUQKVSkX9+sKjpYuLSySSnEFcXCI7d14CoHNnLxNbk3VcvfqQ6Oh4cud2oHx58yiTTkpK5vp1kfNiVNjo7lXx7FEIcqcZyHhbel4sH514ufYfVKovlq8ey3j7AiWgZgsRK1SQatWE5+XKlYckJSUrei4laNlSqPx//71iYkskEklWsm/fNSIj4yhc2I06dUqa2pwsQxcyqlOnlNl4k/z9g4mLS8TR0Y5SpYwYfGkoZBQbDQ9uimUz6fECUrxkHs/qYGsPkaH6aZv3rkGEabvbliqVF2dne+LiEi0yadfbuxIqlYpLl+6ndoeUSCSWzz//nACgU6eaZnORzwqOHhUJyLqcPXPg/Hnhea9UqbBxv2vdQMa0lUaBF0VRikdBcDefXjY555OUXdjaQdkU1+eD21AsJXP+Zd6XbMDKyio178USk3bz5HGmdu2SAOzZI70vEklO4M6dZ+zdK5qevf9+fRNbk3XExiZw6JDwRphT9dSRI8Km+vXLGHeAl1UamVG+C0jxYhwVdHkv/0GlBmLZxOIFoGZN0XL7xAnL7JfSqpX4h9m9+7KJLZFIJFnB0qVH0Wq1NG9eAU9P82jilhUcPnyT2NgEihRxN5syaa1Wy5EjovK1ceNyxh3EkHgxw3wXkOLFONI2q3udpN1sQveBPXToBtqX9Z4xU1q3rgqIu4dnzyJNbI1EInkTYmISUkNGgwY1MrE1WYvuBqtVq8qoVCoTWyO4desJT55E4OBgm+rFzhRRangmKpUoUUm/PuC8eDajfBeQ4sU4dEm7ARf0IaQbJyExweDmSUnJ/PnnQQYPXkxMTLxiZtWtWwo7O2sePlQTGPhMsfMoRZky+alZszhJSRo2bDhjanMkEskbsGrVccLDYyle3IMWLSqa2pwsIzlZkxrabt26iomt0aMLY9WpUwoHB9vMH0BXaZS3iL7FR3ISBIpKMel5yQnkLy6Sl5KTxATO3HnEjCNdbPA5rK2t8PE5yK5dlxTtIuvoaE/t2qLV/uHDNxU7j5L07FkXgDVrTlqk90gikYjy6Hnz9gEwevS7WFvnnEvN2bN3efYsity5HYzPLVEARUJG966La1suZyhU+g0tzFpyzicqO1Gp9M3qrp/U571kEDpSqVQ0aiRKpXUfMKXQfXAtVbx06lQTe3sbrl17JBvWSSQWyqpV//HkSQSFC7vRq1ddU5uTpei8wt7elbG1tTaxNYKkpGSOHxfVT02aZKF40d2Qe1YHM6sUMy9rLImKBpJ2X5L3ohMvR49mj3g5evQWyckaRc+lBG5ujqm5L//8c9LE1kgkkswivC57ARg71hs7O/MYWJgVxMYmsGnTWQB69KhjYmv0HD/uT2RkHO7uTsY1p9No4L+tYrlsLf163VgAM8t3ASlejCdtszpd0u7VoxkOaWzYUIiXixfvo1bHKGZWtWpFyZ3bgfDwWIv1XPTuLe7UfH1PERERa2JrJBJJZli06CCPHoVTqJAbvXu/Y2pzspRduy4THh5LkSLuNG6sbOPRzKAbv9CuXVXjQnTn9sKjAHDMLbrC60j1vNR4UxOzHClejKVcbeFGexokEpxsbCH0MTy+Y3DzggVdKVMmP1qtluPH/RUzy8bGmkaNhPfl338ts+S4SZNylC1bgKioeFat+s/U5kgkktfkyZNwZs/2A+CLL97D3j7neF1A33CvV6+6ZtNwLyEhiZ07LwLQqZOR4xe2+4hn7/6Qy0ksJyaIGX4gPS85ilzOUDIl09z/ApRJ+dC8JHSk874oHTpq164aAFu3XrDIpFcrKyuGD28GwJ9/HiIx0fLGHUgkbyMzZuwkOjqemjWL06VLzpljBBAUFMrhwzdRqVRmlcdz+PBNwsJiyJfPJXVGXKYIfQzHN4vl94anObCvKJ/2KASlqmaJrVmJFC9vQtpmdamho4yb1enyUXRdEJWiVavK2Nvb4O8fnDqky9Lo2rUWefM68/Chmq1bz5vaHIlE8gouXLjHmjUiT+3bb7uYjWciq9B5gRs1KkuxYh6v2Dr72LJFhIzat69uXMho92JROVupfnqRsnmOeG4/QkQWzIyc9enKbnTN6tJOmL6aseelfv3SqFQqbt58QnBwhGJmOTs7pLasttQLv4ODLYMHNwbAx+eARXqQJJK3hcTEZD75ZA1arZYuXbyMa5JmxkRHx7N0qfhuHzCggYmt0RMXl8iuXSI9oGNHI0I7Gg3sWCSW26Xxutw4JfI5bWzTe2PMCCle3gRd0u6NU1A+xY0YeAmiww1u7u7ulNpKWulS5g4dagCWGzoCGDCgIbly2XHp0n05MkAiMWPmz9/H1asPcXd34ttvO5vanCxn9er/UKtj8PTMR5s25hNC2bv3KpGRcRQq5GrcxO4z/8KTO6IpXdOe+vU6r0vTXmY1jDEtiouXmTNnsnDhQhYuXMjMmTNfub2fnx89evRg4cKF+Pn5MWnSJHx9fZU20ziKVRDZ2fExEPEMCnmKaqNrJzLcpXlz4RHZteuSoqblhNCRh4cTH37YBIAZM3ZYZOm3RJLTuXnzMb/+uhuA77/vQt68Lia2KGtJTEzGx+cgACNGNDOrhns6b1C3brWNC9OlJuoOAPtcYjnsCRxcI5Y7j8sCK5VB0b+CTqwMGzaMYcOG4eXlxfDhL3dBqdVq/Pz8GD58OMOHD6d06dJ0795dSTONx8pK73G59nqhI10y7b591xUdFZATQkcAo0a1wM3NkRs3HrNxoxwZIJGYE4mJyXz00WoSEpLx9q6U45J0QeSUPHgQRr58LnTvbj69XW7efMyRI7ewslIxYEDDzB8g5KG+t0va0ND2haLSqMI7UN58ft7nUVS8TJs2jWHDhqW+9vb2ZuHCha/cLzAwEK1Wi7+/f7r9zZJMNqurWrUoxYp5EBubwP791xU1TRc6Wr/+DBqNZXotXF1zMWpUCwBmzdpFQkKSiS2SSCQ6Zs7cwblz93B1zcX06T3MZkhhVqHRaJg/X4w5+PDDJsbNDFKIJUuOAGK+UtGi7pk/wM6/QJMMVRrpBzEmJcK2BWLZjL0uoKB4CQgIQK1W4+bm9sJ7fn5+Sp02+zE0Yfr6CZG9bQCVSpXqfdm+/aKiprVtWxVX11wEBYWmDu2yRAYPbkyBArkJCgpl2bKMq7kkEkn2ceDA9dT5RT//3JvChd1Ma5ACbN58nmvXHuHsbE///uaTqBsREcu6dacA+OCDxpk/QHIy7DSQqHt4PYQ+ErP7GptpxCMFRcWLIdzc3FCr1S/dd+3atfj6+rJw4UImTZr00m3j4+OJiIhI98hWdOIl6DrkKSJyYGKj9JM4DdC+fXUA/PyuEBeXqJhpuXLZ0b17bQCWL7fci76jox0ff9wagJ9+2snTp5EmtkgiebsJDo5g3LiVAAwc2DD1hiwnER+fxIwZ2wF9+NpcWLfuFDExCZQtW4CGDY0YDnl6l2iw6uIBTdKIFF2i7nsjwNYua4xViGzPPPLw8CA0NDTD9728vPD29qZ79+4MGzaM0qVL06NHjwy3nzZtGq6urqmPYsWKKWF2xrjlg8IpjYEuHhS18vDS0FHNmsUpVMiVqKh4Dh26oah5uruFf/+9wuPHhqugLIG+fetRtWpRIiLi+OGHLaY2RyJ5a0lISGLEiGU8exZFxYqFmDKlo6lNUoTly49x714oBQrkZtiwpqY2JxWNRsPff4vrywcfNDIuVKdL1G05EOwcxPLNM6JPmRmXR6fltcWLr68vPXr0eOXj7NmzLz3Oy4QLgKenJ56enqmve/bsia+vb4bemsmTJxMeHp76CAoKet0fKeto2FU871gIlVJCR6d3Z7i5lZUVbduKO5UdO5QNHZUrV5C6dUuRnKxJbW1tiVhbWzF9endUKhXr1p1OnaAqkUiylylTNvLff/44O9vzxx8DyZXLvO/QjSEyMo7ffvsXgE8+aY2jo72JLdKzfftF/P2DcXFxSPWsZ4rgIDgpPEq0S5NTqvO6NOkpwkZmzmsPnujevXumqn7SCpC0qNXqDN8DIZLSnkeXMxMQEICX14uZ7Pb29tjbm/iD9d5w8P1JuOJ0SU4nt8OjQChUyvAu71Vj8eLD7N59mYSEJEUnr/bv34CTJwNZufI/xo71NqtSv8xQs2YJ+vevz7Jlx5g82Zd///00R02slUjMnaVLj7Js2TFUKhXz5/enbFnz7AHypsyfv4/Q0GhKl85vVsMlNRoNv/wiboyHDm2Ks7ND5g+y6y/RnK5aUyguKlIJC4YDq8Vyp7FZZK2yKHYV8/T0xM3NzWDui7e3t8F91Go1PXr0SLePzuPyMsFjcgqXhtptxPL5fVC7tej3snVehrvUretJvnwuhIfHsnfvNUXNe++96ri7O/LgQRgHDihb4aQ0n3/+HnnyOHPz5hMWLNhvanMkkreGo0dv8dVXGwCYPPk9vL0rm9giZQgMfIqPzwFADJe0sbE2rUFp2Lr1AjduPMbVNRdDhxoRykpOgl1/iuW0oaGdi0R5dPm6UNF8xNrLUPQWfPLkyekqi3x9fdOVPgcEBKRrXOfm5sbEiRPTCZWFCxfSvXt3g1VLZkX7keJ592JoO1Qs7/oLYqMNbm5tbUWPHqKGfvVqZScnOzjYpp7rzz8PKXoupXFzc+Trr0WM/eefd3Hp0n0TWySR5HyuXHnA4MGLSUrS0LmzF6NHtzC1SYqg1WqZPNmXuLhEGjcuZ1bddJOT9V6XYcOa4eqaK/MHObkDnj0A17z6dIe05dEW4nUBhcXLxIkTUavV+Pr64uvry6lTp/Dx8Ul938/PL91rEIJn5syZqY+QkBDWrVunpJlZQ912kL84RIZCTITwxkSpYd/KDHfp00co3H37rvHwoVpR8wYPboy1tRUHD97g4kUT5AVlId261aZdu2okJWkYM2YFsbEJpjZJIsmx3LsXwvvv+xAZGUe9eqX55ZfeOa6fi45Nm85x6NBN7O1tUnPszIUtW85z69YT3NwcUzuPZxpdom6rD8AuJd3i6EYhaNwLQJOMi2PMDZXWUgffZEBERASurq6Eh4eTO3fu7D356mmw5AvhemveB/74CEpUhoWXIIN/gm7d5nL8uD+ffdaWjz5qpah5Y8euYP36M7RvX52FCwcpei6lCQmJ4t13ZxIcHMmHHzbhu++6mNokiSTH8exZJJ06zSYw8BmVKhVm/foxxt3xWwBqdQxNmkzj2bMoJk5sy4QJyn4fZ4bkZA3Nm8/g9u1gJk1qx/jxLTN/kCd3YUApkdKw5BYUSSmx/rgxXD4C/abAgG+z1vBMkpnrt2VmbporbYaIMrMbJ8GzOjg4wd0rcOFAhrv07Ss69K5e/Z/iXXDHjHkXENnqt249UfRcSpMnjzO//NIHEKGwgweVLTmXSN42wsKi6dvXh8DAZxQr5sGKFcNyrHABmDp1G8+eRVG2bAFGjjSvsNjy5ce4fTsYd3dHBg82oikdwM4/hXCp+a5euNw+J4SLtY3o7WJBSPGSlbjnh8Ypbrd9K8WwK9CXoBmgXbtquLrm4v79MMW74JYvX4jWraug1WpTW15bMi1aVGTgQFGaPm7cSovuYyORmBNhYdH06rWAy5cfkDevM6tWDadgQVdTm6UYhw7dYMWK4wDMmNEDe3vzqWIMCYli5sydAHz2WVtcXIyoMHpyFzb+JpbTJuqmlkf3gDyF3szQbEaKl6ym4yjxvH8VvNtPLB/fLD48BsiVy46uXWsByifuAowdKyq91q8/zf37YYqfT2m++qoDFSsW4unTSIYO/VvOPpJI3pDnhYuv72hKl85varMUIzQ0mgkTRJnwwIENqVevtIktSs/MmTtQq2OoVKkw/frVz/wBtFr4dajo/F65ITTqJtarn8K+VWLZzOcYGUKKl6ymUgPwrAbxsSJ8VNNb1NRvnZ/hLrrQ0a5dlwkJiVLUPC+vEjRqVJakJA0+PpZfauzoaM+ff35A7twOnDlzh6+/3mRqkyQSiyUkJCpVuOTJ48y6daMoV878G5YZi1ar5bPP1vD4cThlyuTnq6/Mq1vwxYtBrFghbmp/+KGrcWXbu/6Cs3tEJ91PFoNVymV/2wJIjIdytfVjbiwIKV6yGpUK2qd4X7bOh05jxPLOP4WgMUDlykWoUaM4iYnJrF6tfBdcXe7LypX/KV7llB2UKpWPuXOFl2vp0qOsXXvSxBZJJJbH/fthdO48J1W4+PqOonx5ywolZJbVq0+wc+clbG2tmTu3H46O5tMtWKvV8r//bUCr1dK5s5dxHqHgIPD5RCwP+gGKlhPLd67A6qliudvHGRaUmDNSvCjBu++Dows8uCXUboGSooRa56IzgC53Y/Hiw8THKxv6aNy4HO+840lcXCKzZu1U9FzZhbd3ZT75RAxv/PxzX86dMxymk0gkL3LjxiM6dfodf/9gChd2Y8OGMTleuAQEPGXKlI0ATJzYlmrVsnku3itYv/40p0/fwdHRjq++6pD5A2i18Nsw0bqjYj3oMkGsT0yAmf2F1+Wd96BZ7yy1O7uQ4kUJcjnrk3W3+0DH0WJ582zxgTJAly5eFCzoyuPH4WzceEZR81QqVap7dO3aU1y79lDR82UXH33UCm/vSsTFJdK//yICA5+a2iSJxOw5ffoOXbrM5dGjcMqWLcDmzeNybNt/HTExCYwYsZSYmAQaNizDyJHNTW1SOoKDI1JD4OPHt6RQIbfMH2TPUjGyxtYePlkC1ikhpxXfiiqj3Hngoz8t0usCUrwoh67j7vHNYlyAvSMEXIRLhw1ubmdnk9ruef78fYqXTXt5laBDh+potVp++GGroufKLqysrJg/vz9VqxYlNDSa999fyLNnkaY2SyIxWzZvPkfPnvNRq2OoVasEGzeOpUgRd1ObpSharZaJE9emhsd+//19rKzM51Io8nDWEhYWQ5UqRRgxwghh9ewBLJgglgd+p59hdOUYrJkulsf7WMQAxowwn79YTqNkZTH4SqOBw776yqOXlE3361cfFxcHbt8OZs+eq4qbOHlye2xtrdm//zqHDuWMPinOzg4sXz6U4sU9uHPnGf37LyI6Ot7UZkkkZoVWq+Wnn3YxcuQy4uIS8fauxJo1I/HwcDK1aYrz11+H2LDhDNbWVvj4DKRwYTdTm5SOdetOsWfPFWxtrfn9977Y2mYySVerhd+HQ3S4aJja7WOxPjYKZg0Q1yTvAdC4W9Ybn41I8aIkHVISd3cshPYpDYCObhRJVAZwcXFgwIAGANnSh6Vkybyp5/v++62Ke3uyi/z5c7Ny5XDc3Z24cCGI4cOXkpiYbGqzJBKzIDY2gREjlqXOyRk+vBlLlgzB0dHexJYpz7Fjt/n22y0AfP11Rxo0KGNii9Lz4EFYah7Op5+2oWLFwpk/yN4VcGI72NrBp0tEAzqAhZ/CQ38xxmb07Cy02jRI8aIkDToLt1zoY3h4G6o3A02yfgiWAYYMaYKdnTWnTgVy8uSLE7mzmgkTWuHi4sCVKw/YsEHZXJvspHTp/Cxb9iEODrbs23eNESOkgJFI7t0LoXPnOWzdeh4bGyt++qkXX3/dCWvrnH8puH8/jOHDl5KcrKFbt1oMGWLkfCCF0Gq1fPrpGiIi4vDyKmFcHk7II5if0rOl3zdQopJYPrFdP9fo07/ByfIbDub8T6wpsbXTT5jeOh86pXyodizMsGy6YEFXunWrDcCCBcr3YcmTxzm1cd20aduJjIxT/JzZRa1aJVm8eDD29jbs3HlJChjJW82//16mdeufuXTpPu7uTqxZMzK1x1ROJyIilkGD/iQkJIoqVYowY0ZPsxq6CKLNw8GDN3BwsOW33/pmvqeLVguzR4iBwGVrQc/PxPrwZ/DLELHc9SOoYV7JycYixYvStBsGVtZivlGRMsJlFxECB/7JcBddgtbu3ZezZQbRkCGNKVUqL48ehTNjxg7Fz5edNGtWQQoYyVtNUlIyU6duY9CgvwgPj6VmzeLs3v0J9eubV8hEKRISkvjwwyVcvfqQfPlcWLx4sFn1cwG4cOEe33yzCYDJk9+jTBkjOhrvXw3Ht4j5erpwkVYLv4+AsCfCCzN4atYabkKkeFGafEWhfkrXxh0L9Xkwm+dkWDZdtmwB2rSpCsBPP+1S3MRcueyYNq07AEuWHMlxPVKaN6/I4sWDsbOzThUwcoyA5G3gwYMwevf+g7lz9wIweHBjNm4cS9GiObuiSIdGo+Hjj//hyJFbODnZs3z5UIoW9TC1WekIC4tOGW2STOvWVfjwQyPCWaGPYd5Ysdz3Kyglrh/sXQFH1gtBM2mF6DuWQ5DiJTvQCZY9S0VDIDsHUWd/9ViGu3z6aRtUKhVbt57n4kXDCb5ZSZMm5enWrVZqmV5O804IATMkVcAMHPgnUVE5J0QmkTzP5s3n8PaexbFjt3FysmfBggH88ENX7OzMZ+ig0kybtp0NG85gY2PFokWDzK4RnUajYfz4Vdy/H0aJEnn47be+mQ9nabUwZ5RohFq6BvT+XKwPvgdzUzq89/8GytTMStNNjhQv2UGNFlCkLMREwqld0OJ9sX5TxhnflSoVpmtXL0CMas8OvvmmM+7ujly9+pBFiw5myzmzkxYtKvL33x+SK5cdBw/eoGvXuQQHR5jaLIkkS4mIiGXcuJWMHLmM8PBYatQQYaJOnXLWxetVLF58mHnzRNXmrFm9aNasgoktepF58/bh53cVe3sbFi4chKtrrswf5OBaUcVqbSOScW1sRTn0rEGiu26l+tBzYlabbnKkeMkOrKz0Teu2zYdOKe69w+tFM6EM+OyzttjaWnPo0E0OH76puJl58jindt796add3LsXovg5s5tmzSqwfv1o8uRx5vLlB3ToIFqiSyQ5gSNHbtGy5U/4+p7GykrFhAmt2Lx5HJ6e+UxtWrayevUJ/ve/DYBo/d+rV10TW/QiR4/eSs0x/PHHblStWjTzBwkLhnkp3pU+X0Lp6mJ54+9wYT84OMFny/Tl0jkIKV6yi1aDwD6X6LIbFw1VGqeUTf+R4S7Fi+ehf3/Rh2Xq1G1oM8iRyUp69apLgwZliItLZPJk32w5Z3ZTo0Zxtm4dT8mSeQkKCqVjx9mcPn3H1GZJJEYTHh7Lp5+uoWfP+QQFhVK8uAcbN45l4sS2mW9yZuH4+p7i00/XAKIYYfz4lia26EXu3n3G8OFL0Wi09OxZlz59jJzqPG+MqCbyrAZ9vhDr7lyBxZPF8vBfRKFIDkSKl+zCxR2a9RHLW+dD5xTvy3YfSMg492LChJY4Otpx4UIQ27dfUNxMlUrF9Ok9sLMTnXdXrVJ+yrUpKFkyL1u2jKN69WKEhUXTvftcVq36z9RmSSSZZteuSzRrNj318ztwYEP27PmMOnVKmdiy7GfjxrNMmLAarVbLwIEN+e67LmZXEh0WFk2/fosIDY2matWiTJ3azTgbD/nCoXWimvWTJaI1R2ICzOinH7rYbmjW/wBmghQv2YkucffwOqjcEPIWhfCnImaZAXnzujB8eDMApk/fQVKS8om0ZcrkZ+LEdgBMmbIxx4ZV8uZ1Yf360bRtW5WEhGQ+/XQNkyatVXyqt0SSFTx8qGb48L8ZPHgxT55E4OmZjw0bxjBtWndcXHJOVcnrsnXrecaNW4lGo+X99+vx449dzU646Mq2ddO7ly790Liy7Vtn9b1bek+GsiI/khXfgv95ix+6+DpI8ZKdlKslZk0kJsC/f0OHlDyYTRlPmwbR98XDw4mAgKesWXMyW0wdMaIZjRqVJTY2gdGjl+fY0mJHR3sWLRrE55+3Q6VSsXz5cbp3n8vjx+GmNk0iMUhCQhJz5vjRuPE0tm69gLW1FWPGvMuePZ9Sr15pU5tnErZtO8/o0ctJTtbQs2ddZszoYVbDFkE3cHENx4/74+wsyrYLFjSi0+3dqzC5lUjGrdIY+v5PrE87dHHCQoseuvg6mNdf922gU0py1ZoZUPc9Ma781hnRvjkDXFwcmDBBxG1nzdpJRITh7rxZiZWVFb//3hd3d0cuXrzPrFk7FT+nqbCysmLcuJYsXz4UV9dcnDlzl9atf+bIkVumNk0iSceBA9dp0WIm06ZtJzY2gdq1S7Jz58d88UV7cuUyr8Zr2cXKlf8xYsQykpJE2/+ff+5ldsIF4Ndf/2XdutNYW1uxcOEg4+YWPQqAz1uKRqflasP328DOXgxhTDt0sVHXrP8BzAzz+wvndJr3Fd6XmAhY+Z2+8mj2SIjOuGy3f/+GeHrmIzg4Mlsa1wEUKuTGTz/1AmD+/P0cPZqzL+YtWlRk586PqVixEE+fRtKr1wK++26LDCNJTM6tW0/44IO/6NvXh4CAp+TL58Lvv/dl8+ZxVKlSxNTmmYz58/fx2Wdr0Gi09OtXn99+62uWc5rWrj2Z+r09bVo348q2nz2ASd4Q8hBKVIapu8Apt2jB8WXbHDV08XUwv79yTsfaGj5aJErXjmwQscpCnvDsPvz1eYa72dvb8MMPQk0vXnyYy5czLrHOStq2rUbfvvXQarWMG7eS0NDobDmvqShZMi9bt46nX7/6aLVa/vhjP+3b/8bNm49NbZrkLSQ4OILPP19HixYz2b37MtbWVgwd2oTDhyfTo0cds8vpyC60Wi1Tp27jhx+2AjB6dAtmzOhhlsJl+/YLfPyxGAczcmRz+vVrkPmDqJ8Kj8vjQChcGqbvEXktsdHwv/fg6nFRFPLt5hwxdPF1ML+/9NuAZzXo/qlYXvQZjPhVLG9bABcPZbhbs2YV6NChBhqNlsmT16HRaLLBWPjuu854eubj0aNwPvtsTY4sn06Lo6M9M2f2ZPHiwbi7O3HlygPatPmFxYsP5/ifXWIeREfH8/PPu2jQ4EeWLTtGcrKGVq2qsG/fRL79tgu5cxvRzCyHkJysYdKkdakjD774oj1fftnBLIXc3r1XGTVqORqNll696vLll+0zf5DocPiiNdy7BnmLwHQ/yFMI4mJgSnu4fFgIlml7RIfdtwSVNod9G0dERODq6kp4eDi5c+c2tTkZEx8Lw6sKV1+nsaJceuci0Yn3jwuiJ4wBHj8Op0mTaURFxTNrVi/efz97psJevBhEhw6/k5iYzOTJ76VOos7pPHkSzscf/8P+/dcBqFevNDNm9KBs2QImtkySE4mJiWfp0mPMn7+PkJAoAGrWLM5XX3V8a5Nx0xIdHc/Ikcvw87uKSqVi5swevP9+fVObZZAjR27Rv/9C4uOT6NixJvPm9cu8Zyg2WgiXK0fBNR/8fAiKVxDXjykd4ZwfOLoI4VLRyF4xZkRmrt/S82Iq7HPBuJQGdVvmQuPukKcwPLglyt0yoGBBVz77rC0AU6duTf2CU5pq1Yrx/fddAFGyvW/ftWw5r6kpUMCVFSuG8f33XciVy47//vOnZctZ/PTTLuLiEk1tniSHEBOTgI/PAerV+4Hvv99CSEgUJUvmZcGCAWzbNkEKF0RpeOfOc1Lb6fv4DDBb4XLqVCCDBv1JfHwSrVpVYc6c9zMvXBLi4dsuQrg4ucL0f4VwSYiDb7sK4ZLLGX7clSOES2aRnhdTM3Mg+C0TU0D7fQ3fdxdNh2afEKXVBkhKSqZNm1+4evUhffq8w88/9842cydOXMuKFcfJnduBHTs+fqvajgcFhfLFF77s3SuEm6dnPmbO7EmDBjmzg6VEeaKi4lix4jgLFuzn6dNIAIoX92DChFZ061b7reuOmxEXLwYxaNBfPH4cTt68zixZMoRatUqa2iyDXLwYRM+e84mIiKNJk3L8/feHODjYZu4gyUnwfQ84tkm0+J++R8woSkyA77qK6lR7R5G0W7WxIj+HKcjM9VuKF1MT/gyGVBClb4OniQZDB9eAZ3WYe0oM2TLAqVOBdOoksso3bx6Xbd004+OT6NFjHqdP36Fs2QJs2zbhrWqIpdVq2bbtAl99tYHgYHGx6dzZi8mT36NYMQ8TWyexFIKDI/jrr8MsXXqEiAjRYbtYMSFauneXoiUtO3deYsyYFcTGJlCuXAGWLx9mtv9rp04F0r//QiIi4njnHU9Wrhye+SZ0Gg3MGgh7V4hWGj9sh5rvQlIi/NBTCBr7XPD9dqjRXJGfw1SYlXhRq9WsXbuWdevWsWfPntfaZ+bMmbi5uaXuP3Hi60/EtDjxArBnmfiw2jnAjL0wpYMYb/7Bj/p5FQb45JN/WL36BBUrFmLHjo+xt8+e4VtPnoTTtu2vPH4cTuvWVfjrrw/Msq+CkoSHxzJ9+naWLTuGVqvFzs6aIUOaMG5cS+Mmw0reCvz9g/njj/2sW3eKhATRLbt06fyMHNmcHj3qSNGSBq1Wy9y5e5k+fQdarZamTcvj4zPQbJOVDx++yQcf/EVMTAJ165Zi+fJhmb+x02phzmhRvGFlDV9vhPodhCdmah847CsEzffbwCvn5R2ajXg5e/Ysp0+fRq1Ws2bNGs6cOfPKfWbOnAmQKlj8/PxYt24dPj4+r3VOixQvWq0ogzu3VyjslgNh5gAxq2LBBRHnNEBISBRNm04nNDSa0aNb8OWXHbLN5HPn7tKlyxwSEpL5+OPWfPppm2w7tzlx6dJ9vv9+S2pDO3d3Rz76qDUDBjTAzi7nTXKVZJ7kZA37919jyZIjqYnfALVqlWT06Ba0alX5rRP/ryIiIpYJE1aza9clAAYMaMAPP3TFxsY8xd2//15m+PClxMcn0aRJORYvHoyjo33mDqLVinYZa2eKtv6fr4TmfYRwmdEfDvwjrgnfbIY6OfP71mzEiw5fX1+mTZv2WuLF3d2dwMDAVM8LiGGBr2umRYoXgAe3RfVRQhx8tlR8UE/thEoN4JfDkMGX286dFxkyZAkqlYoNG8bwzjue2WbyP/+cSO1fMHduP7p2NZyjk9PRarXs33+d777bzM2bTwCRtzBmzLv06FE32zxiEvMiLCyaf/45ybJlR7l7NwQQ32Xe3pUYNapFtv6vWhI3bjxiyJAlBAQ8xc7Omh9/7EbfvvXMshQaYPPmc4wdu4KkJA1t2lRlwYIBxv3Pr54KS74UyxMWiqGKycnw0yARQrKxhSkboJ4R5dYWgsVWGwUEBKBWq9MJFx1+fn7Zb1B2UqSMSNgF8PkYBv0gMsmvHhNTqDOgbdtq9OxZF61Wy/jxK4mKynhCdVbTu/c7DBvWFIAJE1Zx4MD1V+yRM1GpVLRoURE/v8+YNasn+fO7cO9eKBMnrqNBgx/5669DxMYmmNpMSTag1Wr57z9/JkxYRa1a3/L991u4ezcEV9dcDB/ejKNHv2Dp0g+lcMmAzZvP0a7dbwQEPKVwYTc2bhzL++/XN1vhsnr1CUaNWk5SkoYuXbzw8RlonHDZNEcvXIb9LISLRgO/fiiEi5U1fLEmRwuXzGJ24sUQbm5uqNXq7DXGFHT/RFQdRYTAxt9hyAyx/q/P4cndDHf7/vsuFC3qzr17oXz99abssTWFKVM60rmzF0lJGj78cAnnzmVsZ07Hxsaa99+vz7Fj/+O77zpTsKArjx6p+eqrjdSr9wPz5+8jMjL7xKUk+3j4UM3vv++hYcOpdO06l7VrTxEXl0jlykX46ade/L+9846rqvzj+JuNyFZEFEWGiltx5MqJs9RK1ByV/nI2NBuamWnT0IZmDshyZBqKWpalgiv3wIkbcIGDeQHZ4/z+eLj3iooCMu6F5/16nde9Z97nuefecz7n+a6QkDnMnj2IevWql3dTdZLMzGxmz97MpEmrSUvLpHPn+mzf/h6tWrmUd9MeiaIofP/9Dt5773cURZQm+OGHkcXzWdqxEpZMFu9HzQafd4VwWThRrDM0go/WQecXS7ILeo9OiZeCsLe3Jz4+/pHrMjIySEpKyjfpLcYm8M5Pwt4ZvBpqeUDTzpCeAgsnFFh52srKnIULR2BgYMC6dUfYvj20zJpsaGjIggXD6dKlAampmYwa9RNhYdFl9vm6iIWFKWPHduXQoY/5+msfnJ3tiIlJ5osv/sLLaw4ffbSRK1fulnczJU9JcnI6gYHHGDHCj3btPsPX9x+uXYulalUzRoxoz5YtU9ix4z1GjGhf9IiTSkR4eDQDBizkp59EdvG33urJ2rUTqFbNspxb9miysnJ4993fNcVq33jjKUoT/LcBvntdvH9pKrwyW1znf3xLJC01NITpv0KXISXYg4pBoce3AgMDCQgIeOJ2M2bMwMvL66ka9SAFCReAuXPn8umnBSd10zsaPQMD34I/F8GiSfDJRpjcHo5vh+Bfoderj9ytQwcPJkzoxrJlu/nggwBat3ahenWrMmmyqakxy5ePYciQJZw+fZPhw5exZctknJxsy+TzdRUzM2NefbUTw4e3Z9OmEBYv3klYWDQrV+5n5cr9dO5cnzFjnqV37yY6WZNF8jBpaZns3HmeP/88yc6dF/IlKmzf3p2XX27H88+3KLqzZiVEURQCAo7y8cebSE3NxM7Ogm+/fZm+fZuVd9MKJCkpjXHjVrJv32UMDQ348svBvPZap+Id7L8N8PVIMcrSbyxM+FYsX/qOiDYyMID3VwqnXclD6JTDbkREBO7u7g855xoYGBAUFIS398OhYRkZGWRkZGjmk5KSqFOnjv457N5PajKMbSyKNQ77UGRX/GWGKLy1/ALYPTo1fXp6Fv37f8/Fi7fp06cpv/zyvzK1FcfF3WPQoB+IiIjB09OJTZvewtbWosw+X9dRFIX9+6+wYsV+duwIJTdX/M5r17bDx6cNgwe3wcOjRjm3UvIg9+6ls3v3RbZtO8uOHedISdFeb9zcHBg0qBU+Pm1wda08CRuflsTENKZP38CWLScB6NTJgx9+GKnTDzyRkQm88oo/ly7dwcLCFD+/1+jZs3HRD5STA6tmwe9zxXzXYSKyyNBQ1LoLzBMx7/4Mff9Xch3QA/Q+2igkJAQ3N61DW6WINnqQg3/CnBeEvXPRUeG4FXZSDB9+vL7A3c6di6J//+/Jysop09pHam7ejGfgwIXcvZtEmzb1+O23CZUqiV1hiYxM4NdfD/Lbb4fyVepu2bIugwe3ZtCgVmU2ciZ5mNu3VezYcY4dO0I5cOCKJicLgLOzHYMGtWLQIC+aNKmls86kusrRoxG89dYaIiMTMDIyZNq0frzxRg+dHn08c+Ymr722nLt3k3B0tGb16nE0a+Zc9AMlJ8DcEXB8m5j3eQ9e/1pc53/5CAK+Fsun+MFz40uuA3qCzokXf39//Pz8HhIvERERBAYG5ktCp05QN368OHGBgYEEBQVV7DwvBfHZYNi/CTyfgbcWw+RnIDdHhMs9xnlr8eKdfPnl35iZGfPnn5Np3rxOGTYazp+/xeDBP5KYmEbr1i6sWTNBJm4rgPT0LLZvD2XjxuPs3n2RnBxRKdzIyJBu3RrSt29zevdugoODFDKlSUZGNsePX+W//y6zZ89Fzp6NzLfezc2B3r2b0r9/c1q3dpGCpRikpWUyb96/+PvvRVEUXFyqsXjxK3h56aZTrppNm0J4//0A0tOz8PR04tdfx1G7tl3RD3TtHMwZJIrxmlWBqcuhxwhRw2jpFNiad49760cY+GbJdkJP0BnxohYnAQEBnDhxgmnTptG2bVt8fHwAIWp8fX0JDw/Pt9+8efM0Iy/Hjh3D19e30J9ZocRLbJQwH6UmwZuLIO6WGGq0rwk/nRdmpEeQm5vL6NE/Exx8njp17Pn333ext69apk0/c0b4viQkpNKiRR3Wrp2AnV3ZtkHfiI1N5o8/TrJx43FOn76pWW5gYICXlwu9ezehb99meHjUkDfPpyQ3N5fLl++yb99l9u69xKFD4fnC2dXfeZ8+TenTp6msIv6UHD9+jXfeWUtERAwAQ4e25fPPX9LpUdns7By+/PJv/Pz2ANC9uydLl75avAy/+zaKLOrpKeDoIjLnerSC6BuihtGlo8LHZeICeHFyifZDn9AZ8VIeVCjxAvDXUlj0hsj5suQkzHoOIi9D39fh3eUF7paYmEa/ft9x7Vos3bp58uuv48p8WPbcuSiGDVtKfHwKTZrU5vffJ+psBIGuceXKXbZuPc327aH5hAyAq2t1OneuT8eO9enY0UOOyhSCjIxszpy5ydGjERw5EsHx49dQqVLzbePgYEXXrg3p0qUBXbt6yu+1BEhLy2T+/G34++8hN1ehZk0b5s0bgrd3k/Ju2mOJi7vHpEmrNZmzJ0/25oMP+hX9GpqTA6tnw7ovxXzLHjAzAGyqw4lg+OplkRrDyg4+XFthM+cWFileKpJ4yc2Fd58Vyeo6DIQhH4h5AN9gUU6gAM6di2LAgIWkp2cxdWpvPvigXxk1WsvFi7cZOnQJsbH3aNTIiYCASdKXo4g8zv8CoGHDmnTs6EGnTvXx8nKhZk2bcmqpbpCbm0t4eAxnz0Zy5sxNTp8W0/2RQQBVqpjStm09unXzpEuXhjRq5CRHtEqQ48evMXXqOsLDReqEoUPbMmfOCzrvxH/mzE1ef30FUVEJWFiYsnDhCJ57rkXRD3RPJaKJjv4j5l+aCuPmgYGh8G1ZNUtc3z28RFRpzXol2Q29RIqXiiReQNhK32glqop+shFO7YIti6GmK/idhSoFm2M2bjzO22//BsCqVWPp1avsn3iuXLnL0KFLuHs3ifr1HVm/fhKOjpX7BltckpPTOXgwjIMHr3DgQBjnz996aBtHR2uaN69Dy5Z1adGiDi1a1KmwI15JSWlcuXKXS5fucOnSbc6ciSQ0NCpfRJCaatUsadfOlXbt3HjmGTeaNKktCyGWAipVKl9/vZVffz2Eoig4Olozb97Qcrn2FJUNG44xffoG0tOzcHNz4Oefx9CwoVPRD3T9vAi4iLoiCu6+8xN4jxKCZt6rcPgvsV3f14WPi6nums/KEileKpp4AVg5C9Z+AfZOIvronY4Qc1Oo+YnfPXbXmTM3smLFfqytzfn333fLJaQzIiKGIUOWcPu2Cjc3B9asGS+zjZYAcXH3OHw4nAMHwjhyJJxLl+5oQrDvp0YNKzw8HKlf3xF39xp4eNSgfn1HatWy1fnRhtTUTCIj47lxI44bN+K5di2Wy5fvcOXKXW7fTnzkPubmJjRpUpsWLerQvLkzXl71cHd30Pm+6jOKorB58wnmzPmD2Nh7gP6MtqSkZDBz5kbWrz8GgLd3YxYtGlW8QIMDf8C8VyDtHtSoK/xb6ntB+Gn4fLBw2DUxE0EY/V4v2Y7oOVK8VETxkpkOE5oLJf/8JGFCmtlPOHktOAiNCg6JzszMZvDgxYSEXKNx41ps2TKlXDJ+Xr8ey5AhS4iMTMDeviqrVo2ldet6Zd6OikxqagahoVGcOXOTU6eEuUQ9bP8ozMyMcXKypVYt7eTkZIuTkw329pbY2FTBzs4CGxuLEh2lyM3NJSUlk8TENGJjk4mJEZP6fXR0MlFRCdy8GU9MTPJjj+XkZEP9+o40aFCTpk1r07x5HTw8auhsBeKKSHh4NDNmBGp8RDw8avD110Po2NGjnFv2ZM6di2LixNWEh0djaGjA1Km9mTq1d9Erfefmwq9z4LfPxXyLbjBzPdg6QNBq+GEiZKSBYz2YFQgNKmch28chxUtFFC8Ap3bDtB5CsHy3X4TWBa8Gl8aw+ASYFpzV8/ZtFX36fEts7D0GDWrF4sWjiv7nLAHu3k3k1VeXc/ZsJObmJixaNLJ49mRJoUlOTic8PJorV+4SFhZNWNhdrly5y7VrsWRn5xb6OFWrmmFjU4WqVc0wNTXGzMwYU1NjTE2NMDU1xsTEiNxchZycXHJycsnOziU3N5ecHIW0tEySk9NJScng3r2MR5p1HoeVlTl16thTt241XFyq0aBBTRo0ECNJxYr+kJQIaWmZLFmyi0WLgsnMzMHc3IQpU3oxcWJ3na+mrigKq1Yd4NNP/yQjI5uaNW1YvHgUHToUQ3ClJMLXo+DI32L+xSkwbr4QNMumioy5AG36ioR01vYl15EKhBQvFVW8AHz7P9i+AlyaCIfdiS1AFQ2jPoFXH18m4dChMIYNW0p2di5vvtmDmTMHlFGj85OSksGkSasJDj6PgYEBn3wykPHju8oh/TImKyuH27dV3LqVf7p9W0wqVSqJiWkkJqaVWhtMTIyoXt0KBwdLHBys8t6LqVYtW41g0XWzQ2VDURT++usUX3zxF5GRCQB07dqQuXN99MIcrFKl8v77AfzzzxlAmIm+/3548XzDblwU/i2Rl4Q5aOpP4P0KRN+Ez320YdAjPxHX6XJ4aNQXpHipyOIlKQ5ebwSJMTBylqhC/cVQMDKGb/ZAk8fX2Vi//ijvvLMOgK++Gszo0Z3LoNEPk52dw6xZm1m16gAAo0d35rPPXpBD/TpITk4uSUlpqFSpqFSppKZmkpmZTUZGNpmZ2Xnvc8jOzsHIyAAjIyOMjAwwNjbC0NAAIyNDqlQxwdLSnKpVzbCyMsfS0gxLS3PMzIylaNUzzpy5yezZf3DkSAQATk62fPLJQAYObKkX5/Lw4XAmT/6NyMgETEyMmDnzecaNK+bD06Et4DtKlHRxqCP8Wxq0hpM7RRh0YqwIg57+G7Qr+2hPfUOKl4osXgB2rxMppkEU7jryN+wLFDWQvtkD7i0fu/uCBTuYN+9fDA0NWL58TLkVQlMUBX//vXz22RYURcHbuzFLl75K1aqyqJ1EomtERyfx9df/EBBwFEVRMDc34c03ezBpUg+9qJqdnp6Fr+8/mgy/9epVZ+nSV2jRom7RD5abC2s+gzV5o93Nuwr/FpvqEOALqz7WhkHPCgQn15LtTAVFipeKLl4A/D+AwG9ETYwZa0UV6tD9YOMA3+8H5wYF7qooCtOmree33w5jbm7Chg1vlKvj7Natp3n77d9IT8+iQQNHfvppjMxoKpHoCKmpmfz8838sWhTMvXvCV+nFF7346KPni5cmvxw4ffoGkyev5cqVuwAMH/4Mc+a8ULwMvylJIpro0BYxP+htURE6PUVk0VUvl2HQRUaKl8ogXnJz4ZsxwmHX1Fzkf1n5sSje6FAHvj8ANQquaZSdncOYMT+zc+cF7O2rsmXLFNzcyq8qbkjINcaOXcHdu0lUrWrGt98OY+DAVuXWHomkspOVlcPvvx/hu++2c/duEiAKh3722Yu0aVOvfBtXSLKycliwYAc//BBMTk4uNWpYMX/+sOLnnAk/BV8Nh5sXhX/LlGXQezREnBG16G6F5YVB/wj9xpZkVyoFUrxUBvECImndpy/Cka1gaQuzNsKiSaJ8gHND+PY/sKtR4O4pKRn4+Czm9Omb1KtXnS1bJpdr9tvo6CTeeONXDh4MA+D1159l1qyBmJrqdtSCRFKRyM3N5a+/TjNv3j9cvRoLQJ069nzwQV9eeql1uUQpFoeLF28zefJvhIZGATBgQEvmzvUpXp239FRhIgr8VhTHre4MszdBw7YQvAYWjs8Lg3YR12EZBl0spHipLOIFxJ/qw16ifEC1WqJuxtwRIoGdRyuYv1v4whRATEwyAwYs4MaNeFq2rEtg4BtYWJSfz0l2dg7z529j0aJgAFq3dmHZstf0ZnhaItFXFEVh795LzJ27VVNVu1o1S955pxejRnXU+dBnNRkZ2SxevJMffggiMzMHOzsLvvrKh0GDijmSe3KXECe38goIdxkCb/4oHhiXTYW/lojlbfrCh2vAulqJ9KMyIsVLZRIvAEnx8F4XuH5O+Lq89wvMeVFEJDXtDF9tB/OCQ03DwqIZNGghCQmpdO5cn5Urx5a7A96OHaFMmbKWxMQ07O2rsnjxK3Tt2rBc2ySRVEQUReHgwTC++247hw6JG7SlpRmTJvVg3LguWFrqj8/G0aMRfPDBeo1vi7d3Y+bPH1q8ciRJ8fDT+yI1BUD12vD2EpEgNPwUfD8OLh/XhkGPnAVGMlryaZDipbKJF4DYKFEyIPoGNGgDE7+HWc+L5Elt+8GcP8CkYEESEnKNl19eRkpKBs8+24CVK1+nSpXyFTDXr8cyfvwqzp6NxMDAgIkTu/HBB/0wNzcp13ZJJBUBRVH477/LfP/9do4evQqAqakRo0d35u23vfWqHlZSUhpfffU3q1cfBKB6dUs+//yl4oVvKwr8twEWvy1yaBkYiKzm/5srcrT8Ogc2LRDmIys7mL4G2vUv8T5VRqR4qYziBeDmJXi3s8gt0Mobhn8Es54TttiuQ0XJ9cc8GRw9GsGIEX6kpmbqjIBJT8/ik082s2bNIQAaNHDkhx9G0rx5wc7IEomkYBRFYc+ei3z33Q5CQq4BokzEiBHtefPNntSqZVuu7Ssq//57hpkzN3Hnjqhz9fLLzzBr1gDs7Irh2xJ9Exa9oc2UW7eRSDrXpBMc+gt+fFOY5EFcUycugGrFKNwoeSRSvFRW8QJw6Rh80F2E7XUdBr1eFdkfs7Og3zh4x088SRTAkSMRjBwpBEyXLg1YsaL8BQzA9u2hTJu2npiYZIyMDJk82ZspU3pJZ16JpJDk5uYSHHyehQuDOHnyBiAKWI4a1YE33uhBzZr6Vek9MjKeTz75g23bzgLg5uaAr+8QOnWqX/SD5eSIFP6/zBAFFY1NYPhMGPahML8vmQwHNottHesJ85FMOlfiSPFSmcULQEiQGHHJzoJBb0GzLiLbY24uDPkAxvo+QcCEM3KkP6mpmXTr5skvv/xPJ0w1cXH3mDlzE1u2nASgadPaLFw4gkaNapVzyyQS3SUjI5tNm0JYtmy3xhfE3NyE117rxMSJ3YrnD1KOpKdnsXTpbhYtCiY9PQtjY0PeeKMHU6b0Kt6D1rVzsGAcnBejuzTuAFOXi4jNLYth5UwhaIyMwec94d/yGB9CSfGR4qWyixeA3b/D1yOE/fbVz8TQ5vfjxLoxX8HwGY/d/fDhcEaN0j0BA7Bly0lmzNhIQkIKJiZGvP9+XyZN6i5LC0gk95GYmMbq1Qf45Zd9mjwtVlbmvPJKRyZM6IaDQ/mlRSgOiqKwY8c55sz5g+vX4wBo396dL798qXgPMJkZ8PtX8Ptc8aBnYQX/+xqenyjyZS2cAFdCxLaNO8AUP1GORVJqSPEixYvgj0ViuBNg8jJIvwf+74v5t5fAgEmP3f3wYTECk5aWSffunvz8s+4ImOjoJKZN28COHaEANGlSm6++GkzbtjINt6RyExWVwPLl/7FmzSFN9W4nJxvGju3KyJHt9bIKd3h4NLNn/8GuXRcAqFnThk8+GcigQa2KV5Po3AH4bqxINgfQfoC4Jla1gVWzRMby3FwRDv2/r6H/OFlQsQyQ4kWKFy0rZ8HaL4SZ6OMN4oli3Zdifvoa6DHisbsfOhTGqFE/kZYmnHiXLx9TvJTapYCiKGzYcIzZs//QVD4eMqQNM2cOoEYNee4llQdFUThyJIIVK/bxzz9nycnJBcDT04lJk7ozaFArvfQPS0nJYOHCIPz89pCVlYOJiRETJnRjypRexauBlpIIP88Q/i0Ado7w5iJ41gcO/ikijGJFjhu6D4cJ34F9zZLrkOSxSPEixYsWRYGFE+EffxEq/cW/cGCTsOUaGokQ6vbPP/YQBw+G8eqrP5GamkmTJrVZs2acTtnJ4+LuMXfuVtatO4KiKFhamvH++30ZM+ZZTEykKUlScUlNzWDz5hOsWLGf8+dvaZZ37lyfSZO6062bp15Uen6Q7GxRmmD+/G3ExCQD0KNHIz799AXc3QvOGv5YDv4pIoni8r6nvq/DuPmQlixEi7omkZMbvL0U2vQugZ5IioIUL1K85CcnB74cCvs3CbvuvN2weQHsXCPqIn35L7To9thDnDlzk1Gj/ImNvYezsx2//TZB54onnjx5nZkzN3HqlIikaNDAkS+/HFy86AOJRIe5di2WVasO8PvvRzSjjlWqmDJ4cGtGj+5M48b66cSuKAq7dl3g88+3cPmycC6uV686c+YMolevJsUTYnG3hfl8X6CYr+UB7/hDs2dh8w+w+hMRnWlkDEOnwYiPwUz/TGsVASlepHh5mMx0mNkPTu8B2xrwzV5YPg0O/wVVLEUZgQZtHnuI69djGTnSn4iIGOzsLFix4nXatXMrm/YXktzcXNatO8pXX/1NQkIKAAMGtGD69OfKtfCkRPK0ZGZmExR0jrVrD7NnzyXUl+569aozenQnhg5th62t/kbBnD0byeefb2H//isA2NlZMHVqH159tWPxTF6KAtt+Fn5+KYlipHnIBzDqE7gWCgvGi0y5IDKRT14G9YpZsFFSIkjxIsXLo0lJgve7ij9sTVfw3QnfvQ6nd4t6HN/+By6NH3uIuLh7vPback6cuI65uQmLF4+iX7/mZdP+IpCQkML8+dtYvfoAubkKRkaGDB3alqlTe+PsbF/ezZNICs2VK3dZu/YwgYHHiYu7p1neo0cjxozpTPfunnpTLPFRREUl4Ov7Dxs3hqAoCqamRrz+ehcmT+6FjU0xR0CunAC/d+HMXjFfvzW8uxxquonQ5y2LhbixsoOx86HPGOmQqwNI8SLFS8Ek3IWpnUSRMbcW8Pnf8NlLIrldtVrw3X5wenzETmpqJm+88Ss7doRiYGDAF1+8xJgxncuoA0Xj3LkofH3/ITj4PAAmJkaMHNmBKVO8dcpvRyK5n5SUDLZsOcW6dYc5fvyaZrmjozVDh7bj5Zfb4eqq3yOJcXH3+PHHnaxadYD09CwAXnzRiw8/fI46dYr5gHHjojAD/bdBzJtZwGufw4uT4cAfsHSK1uel5ygY/y3YFdOHRlLiSPEixcvjuR0h6iAl3BUJ7GashRm94fp5qOUO3+57Ysrr7OwcPvpooyZt/1tv9WTGjOd01jnw2LGrzJ//r2ZI2tzchNGjO/Pmmz30qoaLpOKSk5PLgQNX2LTpBFu3ntaEORsZGeLt3Zjhw5+hR49Gep/PSKVKxc9vDz/9tJfU1ExA5GuZPXsgLVrULd5B716HNZ9B0EoR4mxgAN1HCOFiYACL34IjW8W2tTxg8lLw8i6ZDklKDClepHh5MuGn4L2ukJoEHV+ANxbC+93gzlWo11T4xFg//ulHURQWLgxi3rx/AejXrxkLFozQmVDqR7F//xV8ff/R1HSpWtWM119/lv/971kZXi0pcxRF4ezZSDZtCuHPP09qkskBuLpWZ/jw9gwZ0qZCjBLeu5fOzz/vY9my3Ron4+bNnZk+vX/xo6IS7sK6r2DrMsgSQogOA4VocagDgd/Cpu8hI1Wk/H95hphMdfcaVZmR4kWKl8JxZi/M6ANZGSJs8OUZorBj/B3wfAZ8g4Uz7xMICDjK9OnryczMwd29Bj//PIYGDXQ3N4I6osHX9x9CQ6MAUU33xRdbM358V1luQFLqXL8ey+bNJ9i0KYSwsGjNcltbCwYMaMFLL7WmXTs3nR3JLAppaZmsXn2QRYuCiY8XTvSenk588EE/+vZtWrw+3lPBhm9E1GS6OCYtusP/voK6jWHzQtj4rXDUBTHCPGWZKLQo0VmkeJHipfDs3wxf+Iih1uEfQbfh8H4XSE6AVj3h861g+uRkUCdPXmfs2JXcvq3CwsKU778fzoABLUu//U+Boihs23aWJUt2a0ZiAJ59tgETJnSjW7eGeu0IKdEtrl6NYevWM/zzzxlNOD8IE2avXk146aXWdO/uqZfJ5B5Famomv/12iCVLdmlGlNzcHHjvvb4MHNgSI6Ni/LfSUkT22/W+QsAANGwrSp406gB/LYYAX0iOF+vqNRWjMB0HPbaem0Q3kOJFipei8e9ybd2jSQvAsz1M7ymeaDq9CB+vFzkQnkBc3D0mTVqt8SuZMKEbM2c+rxc2+pCQa/j772Xr1tPk5oq/hIdHDcaP78rgwW10orK2RL9QFIVLl+7wzz9CsNyfRM7Q0IBOnerz0kut6d+/uU6bWotKcnI6K1fux99/ryY6qnZtO959tw9DhrQp3vUgKxP+/UlkC4+/I5a5NIbRX0LbvrDVX9QpShC5YXBuCK/Mga5DZRSRHqFT4kWlUrF+/Xo2bNhAUFDQE7cPDg7Gz8+PXr164ebmRlBQEG3btsXHx6dQnyfFSzFZ9xWsmCneT18jUmJ/3F9cNLx6wfRfRSrtJ5CdncPXX//DkiW7AOjY0YNly16lenX9KAIXGRnPzz/vY+3awyQnpwMi38QLL3gxbFg7mjVzrhBD+ZLSITc3l9Onb7JtWyhbt54mIiJGs87IyJBOnTzo3785ffs2q3A+VgkJKSxf/h+//LJP49Pi4lKNt97qiY9PW8zMijGilJMjkmn+OgfuXhPLarrCq59ClyGwY5UQNOqU/jVdYdRs6DmyUA9cEt1CZ8TLiRMnOH78OCqVioCAAEJCQp64T2BgIOPGjUOlUuHm5sb06dMZP358oT9TipdioiiwbKqwFRsawmtfCPvw1yMgI00Il2m/QutehTrc1q2neeeddaSkZODkZIO//2hat65Xun0oQZKT0/n99yMsX/4fN2/Ga5Y3auTE0KHtGDy4td4IMknpolKlsnfvJXbtOs+uXRfz5WIxNTWiS5eGPPdcC3r3boKdXdVybGnpEBOTjJ/fHlatOqCJkPLwqMGUKb0YNKhV8UZaFAUObIaVH8MNUYwR+5owYpbIybI3QEQX3bkq1lV3hpF564x1o3ispOjojHhRExgYyNy5cwstXry9vbG1tS3WZ0nx8hTk5oo6SP/+JObb9IWXP4RFb8L1c2LZsOnChlyIC8SVK3d5/fVfCAuLxsTEiJkzn2fs2C565UeSk5PLvn2XCQg4yrZtZ8nIyAbA2NiQHj0aM2xYW3r2bFxh/BQkT0ZRFC5evE1w8Hl27brA8ePXNIUQASwtzejWzZN+/Zrj7d24QpmE7ufq1Rj8/fcSEHBUk6elceNavPNOL/r3b168/7miwIlgWPERXD4ullnZwbAPYcAbIiP4r3Mg8rJYZ+cIw2eKqs8ygkjvkeJFipfioyiwfQX8+KYoKVC9Nry3QhRz/HuZ2KZRe/hw7ROT2YEIj5w6dR1bt54BhBlpwYLhepnlVqVKZcuWkwQEHOXkSa3Dpb19Vfr2bUa/fs3o1Kk+5ubyya+iERWVwMGDYRw4cIV9+65w+7Yq3/r69R3p2bMxPXs2om1b1wotZo8du8qyZbvZti1UU6LAy8uFKVN64e3duPhm1fOHhGg5vUfMm1eFwe+K6dRukXzuWqhYZ11NK2jM9bckgiQ/ei9e4uPjsbe3Jz4+nvDwcHx9fQvcPiMjg4yMDM18UlISderUkeLlabl6Fr4YCjcvipogY74U1Va/HyfCDy2sRbrtLkOeeChFUViz5hBz5vxJWlomlpZmfP75Swwd2lZv/UcuX75DQMBRNm48TnR0smZ51apm9OjRiL59m9GzZyOsrWWBN30kNjZZI1YOHAjL57sCIkKoUycPevZsTI8ejahbt1o5tbRsyMnJZfv2UJYuzR+Z5+3dmAkTutGxo0fx/8sRZ4S/3ZG/xbyJKTz/hkjdcOU4rJwFYSfEuqo24PM+vDhFFJmVVCj0WrxEREQA4OYmCv75+/sTFBTEhg0bHrn9nDlz+PTTTx9aLsVLCZB2T5iRdv0m5tv1h1c/gyVvi6ckgP7jYeL3hXr6uXo1hilT1mrSnffp05T584fqte9IdnYOBw6EsW3bWbZvD+XOnUTNOhMTIzp29KBfv2Z4ezehVi3b8muopEAUReHGjThCQq5z7NhVjh6N4MKF2/m2MTQ0oEWLOnTqVJ9OnerTtq0rFhYVPwItNTWT9euP4u+/l2vXYgHhxzN4cBsmTOj2dPmcoq7A6tmwe52YNzQSPisjZ0FUGKz6WHudqWIJL74jRmGs7J6uUxKdpVTES2BgIAEBAU/cbsaMGXh5eT20b2HFy4OoVCrs7OxISEh4pClJjryUMooC234R6bUz04Vj3Idr4Ph2CPharHdpAjMDClWRNScnl6VLdzN//r9kZeVQrZol8+YN0cnijkXl/kiTbdvOcuXK3Xzr3dwc6NjRg44dPejUqT4ODvor2vSZtLRMzpyJJCTkGsePXyMk5BoxMckPbde4cS06d65Px44etG/vXqlG0W7ciGPVqgOsW3cElSoVEFF3r73WidGjOz9dpNTVUJH1NmgV5OaIZV2HiQiixBhYNUtrOjKrAgPfgqHTwKb603VKovPo9chLYGDgQ2HRBgYGhISEPCSKHoX0eSklIs7AF0OEo5yhEfxvLri3hHmviNwKpuYwaaFwnCvE8PG5c1FMnvyb5gl36NC2fPbZixXqBhEWFs327WfZtu0sJ0/e0OSPUVO/vmOekPGgQwcPWWOpFEhLy+TixducOxdFaGgUZ89GEhoaRVZWTr7tTEyMaNbMmTZt6tGmTb1KeT5yc3P577/LrFixn+Dg8xp/FheXaowf341hw9piYfHkhJUFHFzUFtq8AE7t0i5v11+YpLOzhGg5vl0sNzGF/hOE6egJddYkFQe9FS/qUZbw8HCN2ehJIy8PIsVLKZKaDAsnaId5n3kOxs0XIdbqi06XIfCOP1jaPvFwGRnZfPPNvyxZshtFUXBysuXzz1+kX79meusLUxCJiWkcORKe50cRxvnzt3jwr+fm5kDz5nVo1syZ5s2dadrUGRubiiPmSpv4+BTOnYvSCJXQ0EjCwqIfEo0ANWpY0aaNK61bC7HSrJlzpXW0TkpKY/36Y6xcuT+fb0+3bp6MGdOZHj0aFS8bLkBKEuxYIbLi3goXywwNoeOLwgRkXlU44h7aItYZGUOf/8GIj6FGnafsmUTf0Dnx4u/vj5+f30PiJSIigsDAQKZNm6ZZNn369HwOuvPmzePYsWMF+rw8iBQvpYyiwD8/wZLJoiaSQx346Hc4fxB+mQE52eBYDz5aJ6KSCsGRIxG8885arl+PA6Br14Z88cVLuLtX3FL1CQkpHD4czoEDYRw8GMbFi7cfuV29etVp1sxZI2jq13ekZk2bCifuCkt2dg43bsQTFnaX8PAYwsLuEhYWTXh4tKZuzoNUq2ZJ06a1adq0Nk2a1KZ163o4O9tV2u9QzcWLt1m16gCBgcc1+VksLc0YNqwdo0d3frr/X1SYECw7VoiHHhAPNP3GiQihW3nr1aLF0BB6jIJRn4jK9pJKic6IF7U4CQgI4MSJE0ybNi1ftlx/f398fX0JDw/X7KNSqfD399fMx8XFPTba6EGkeCkjwk8LM1LUFfG09PrX0KQzzB0uEkcZGcPoL2DIB4VKz52amsmPPwazZMkuMjNzMDExYuLE7kyZ4l38oWo9Ii7uHmfPRnLmzM2818h8yfHup0oVU1xdq+Pm5oCrqwOurtVxdXXAzc2B6tUt9fqmnJubS0xMMpGRCURFJRAVpSIqKoHIyASuXo3h2rXYh0w+9+PiUk0jUoRgccbR0Vqvv5OSJCUlgz//PMm6dYcJCbmuWd6ggSNjxjzL4MGtsbQsZr4URREmoc0LReSQ+tZSxxNemAydX4L9m2DLj3D9vHa/rsNEKv+6nsXvmKRCoDPipTyQ4qUMSU2GBeNhz+9ivv0A4ffyywyRARNEaYFpq0V2zEIQERHDJ59sZtcukVWzVi1b5swZxHPPtah0N6D4+BRCQ4WQOXv2JqGhUdy4EZ8vIdqDWFqa4eRkS40aVjg4WFOjhhU1alhr5h0drXFwsMLKyrzMcpFkZ+egUqUSH59CXNw94uNT7pvuEReXwp07iURFJXDrluqx4gREmLK7ew3c3Wvg4XH/q0OlELpFRVEUTp++ydq1h9m8+YRmlMXY2JDevZsyenRnOnV6ilDnjDSRwv+PH7R5WADa9hMhzU7u8NcS2P6LtspzFUvoPVo449Zp+HQdlFQYpHiR4qXsUBRRFG3pFGFGqlFXRB5dOydCqjPSwLaGKC3QpnchD6mwfXsos2f/oRl9ePbZBnzxxUvUr//k+koVmaysHG7ciOPq1RiuXo0lIiJG8z4yMuEhP5rHYWpqRNWqZlhYmGFpaZb33pSqVcV7Y2NDwEDjf21gIN7f/5qZmU1aWiZpaVmPfE1NzdTUiCosRkaG1KxpQ+3attSubYezsz21a9tSt241PDwcqVXLRq+yNJcXKlUqmzaFsHbt4XxFIV1dqzN8eHuGDm37dFFDMZFClGz101ZxNq+qFSXRN4Rp6OhW7ShM7fpiXe/RUFVenyX5keJFipeyJ+ykSGp3KyzPjOQrqr1+9bJIeAdFKi0AIlLkxx93smTJLjIysjExMWLs2C68/bY3trYyq+aDZGRkc+NGHNHRSdy9m0RMTHLeaxLR0clER4vXhIRH+4aUNnZ2FtjZVaVaNUvs7atib699X6OGNc7OdtSubYejo7VeVCLXRbKzc9i37zKBgcf599+zmrT9ZmbGPPdcC0aMaE+HDu7FH2VRFLhwWJiG9gVqQ50d68Ggt4QJ6NCf8OePIsGlmjZ9hemoTR9Z5VlSIFK8SPFSPqQkwYJxsHe9mO8wECYvhd++gL+XimWez8CMdYUqLaDm2rVYPvlkM8HBwk5ubW3OhAndGTu2S4WtG1OaZGXlkJKS8cCUqXmfmprBvXsZ5OTkah6Y1ZcJRVFQFO2rmZkx5uYmVKliSpUqD76aYm5ugq1tFWxtLaQgKUXOnYsiMPA4mzeH5Mv47OnpxIgR7Rk8uPXTFYXMyhRiZfMCuHRMu7x5V3hhCrg0Fv/x7SsgNUmss7CCXqOFqHFuUPzPllQapHiR4qX8UBRRA2nZO+KC5+gCM9dDzE34fizcUxWptMD9BAefY+7crZrcMHZ2VXnzzR6MHt25UmQ7lUju586dRDZtCmHjxuP5MgLb2VVl0KBWDBnShpYt6z6dr5gqRpiF/loC8XmfYWIK3UeIkRRVtPB1Ofav1jTk3AAGvQ3er0rTkKRISPEixUv5c+UEfDlU5HYwNoGx86DjC/D1SBFWDUUqLaAmNzeXv/46zTffbCM8PBoABwcrJk/2ZtSojpiZVdyCeBJJUlIa27eHsnHjcfbvv6LJYWNqakTv3k0ZPLgN3bt7Pr0zdvhp+GMh7ForfNlAON0//wb0GCHEyp+LtNWdQSScG/Q2tO4tTUOSYiHFixQvukFKInw3Vgw3gxAvU3+CTQvg96+KXFrgfrKzc9i0KYRvv92uceqtVcuWqVN7M3RoO0xMpIlCUjFITk4nKOgcW7acYs+eC2RmaqOx2rVzxcenDc8/3/Lp/cBycuDwX0K0qNPzAzRoI+oKubeEf/zz526xsBJJ5Qa+KZxxJZKnQIoXKV50B0URQ85+7+aZkerBx+uFXdx3FMTfEaUFJi6A58YXqrTA/WRmZhMQcJQFC3Zw+7YIw3RxqcbEid0ZMqSNDJ2V6CX37qUTFHSev/46xe7dF8jIyNasq1/fkUGDWjF4cGtcXEqg3s+NC7DzN9i1Bu7m5X4xNIJnBwvTUEqSGGU59q92H+eG8EKeaUhWd5aUEFK8SPGie1wOEWak2xHCjDTuG+j2MnwzWntRfNZHlBuoWa/Ih09Pz+LXXw+yaFEwsbH3ALC1tWDkyPaMHt2Z2rVlJVqJbnPvXjo7d17gr79OsWvXBU2kEIC7ew0GDmzJwIEtadiwBGr9xESKMh+710L4Ke1yK3thzvV+BU4Ei4RyUVfEOgODPNPQZPDylqYhSYkjxYsUL7pJSiJ8+zrs3yjmO78EU5eL5FU/fyhKCxgaQffhMHQ6uDYt8kekpmawdu1hfv55n6bcgJGRIc8914Lx47vi5eVSkj2SSJ6KW7dUBAWdY/v2UA4evJLPJOTm5sDAgS0ZMKAlnp5OT5+kMSle/Pd2/QZn/9M62BoZi4Ry3UeILLfbfoYdKyFNPARgYX2facjj6dogkTwGKV6keNFdFAW2LBZmpOwskdRu5Czh+7J6NpwI0m7bfgAM+xCadCzyx+Tk5BIUdI7ly//j4MEwzfLWrV0YO7Yr/fs3l34xkjJHURTOn7/F9u2h7NgRypkzkfnWu7k58PzzLRgwoCWNG9d6esGSnir8WHavFSOc2drRHJo+Cz1GQvMucHKnGIlRO9ODNq2/9ysiI65EUspI8SLFi+5z+bhIanfnqph3qANDpwmnwD9+EE6+6p9m02fh5Q/F02ExLuahoVEsX/4ff/wRonmydXKyZfToTgwe3IZatWxLpk8SySNIT8/iyJEIgoLOsWNHKJGRCZp1BgYGtG7tQp8+TenTpxkeHiVQjDQnW4iRXb/Bgc3aERQAt+ZCsLTtD1eOC8Fycqc22ZyBATzzvIga8vIu1v9NIikuUrxI8aIfpKWI6IUN87U5JOwcYfB70KqnSHoVtEr7tOjWXIzEdBkihrqLSExMMqtXH2DVqgMavxgDAwM6d66Pj08b+vdvTtWq0sFX8nQoikJYWDR7915kz55LHDwYls9/xdzchC5dGtKnT1N69WpM9eol4PCqKHDxiBAse9eL/CtqHF2ESajzS3DnGuxZB0e2akOgARq2hW7DoetQqF776dsjkRQDKV6keNEvMtOFjT3ga220g5W9KOrWebBYt3WZ9gmypquoVt17NJhVKfLHZWRk8+efJ/j996McPqytaF6liinPPdecwYPb0LlzfYyMpEOipHAkJqaxb98l9u69xJ49l4iKSsi33tHRmu7dG9GnT1OefbZBySVVvHFB5GLZvVY4w6uxqQ5dhgoxkp4qBMvBP7QhzgB1GwnB0n249GWR6ARSvEjxop9kZ4knx9/napNfWVjBgDeFUPlvg8hBkRgr1tk5ivwTAyZBVZtifeSNG3Fs3BhCYOAxrl6N1SyvWdOGl15qjY9PGzw9SyC6Q1KhSEvL5MSJ6xw6FM7evZc4efK6JmEciKRxzzzjTrdunnTt2pBGjUrA4VZNTKSo5L7rt/yRQuZVRS6l7sPBzEL8X/Zt0P5fQPiYdR8uRItbc2kWkugUUrxI8aLf5OQIn5d1X2qLOppVESGcA9+E49uFqSn6hlhnYS0EzIvviCygxUBRFE6cuE5g4HH+/PMkKlWqZl2jRk707t0Ub+/GtGpVV1Y0roSkpmZw7Ng1Dh8O59ChME6dupEvMgjAw6MG3bp50q2bJ+3bu5dsyQp1pNDutXBmb/5IoTZ9RdbbGnXh4J9C2MTc1O5r4yBGYLoPh0YdZIizRGeR4kWKl4pBbi4c+RvWfqEtBmdiCr3HwOB3hY0/wBeun8tbZwZ9xoDP+1DLvdgfm5GRzc6d5wkMPM7OnefJytLepKpXt6RHj8b07t2ELl0aYGkpC0NWRJKT0zl27CqHDoVx+HA4p0/fJDs7N982jo7WtG/vTufO9ena1RNn5xLOJZSRJiKFdq2FY/88IlJohHBwD9khHG/vr+JsYQ2dXhSCpVXPYvmISSRljRQvUrxULBRFJMxa+4XITwEiH0yPkSIKKSoMAubC+UN56wyFvX/YdHFxfwri41PYtes8QUHn2bPnIsnJ6Zp1pqZGdOjgQa9eTejVqwl16tg/1WdJyofs7BwuXrzDqVPXOXHiBidPXufy5bs8eGmsXduODh3cad/enQ4d3KlXr3rJmYLUpCTCqd0iSujApocjhbqPgGZdREjz7nVwJUS73sRMpBfoPlwkkzOVwlqiX0jxIsVLxeXsPmFOOr5dzBsYiMy8wz8SacwDvs6fxrxNXyFwmnV5avt+ZmY2R49eJSgolKCg81y7Fptvff36jjzzjBvPPONGu3ZuODvblfzNTfJUKIrCrVsqTp7UCpUzZyJJS8t8aFsXl2oaodKhg0fpiNPsLDGCeCJITBePasOWQRsp1KavcM7dsy5/gjlDI/DqJQRLxxdkFWeJXiPFixQvFZ9Lx2DdVyKCQs0zz8OImWBaBdb7wt4AYXoCaNxBhFk/83yJ2PzV4bBBQecIDj7PsWNXycnJb1ZwcrKhXTu3vMkVT08nGcFUhmRl5RAeHs3587c4f/4WFy7c4ty5KKKjkx/a1tLSjJYt69KqlQutWtXFy8uFGjVK4fqhKHDzklasnNmTPwIIRIHDNn3Eb1UVA3t/F2I9R1vfiKadhdNtlyFg61Dy7ZRIygEpXqR4qTxcPSuik+4XKq16woiPReK7wG9g+wptTguXxqL0QPfhosZSCZGQkMLhwxEcPRrBsWNXOXPmYR8Ja2tz2rRxpW1bV5o2rY2npxO1atnK0ZkSICYmWSNQhEi5xZUrd/P5K6kxMjLE09MJLy8hVry8XPDwqFF6jtiqGDgZLMRKSBDE5s+qi3U18Zv16gX1msGtMOHrcniL8HtR495SjMJ0GyaccyWSCoYUL1K8VD4iLwuTUfCv2ifUJp1g+Ext1t6/lohq1iAu/j7vQ9/XwdyixJuTmprByZM3OHr0KkeOhBMScp2UlIyHtrO2NsfT04lGjWrh6emU994Ja+ui56+p6CQlpXH1agwREbFcvRqT9z6Gq1dj80WH3Y+lpRmNGtWiceNaNGokvucmTWqXbCTQg2SkQeh+7ejK/eHMIJzOm3QWYqVxBzHycmqX2PZaaP5ta9fXhjbX9Sy9NkskOoAUL1K8VF7uXof180RxOfVoi4eXMCe16A5b/WDT99oMpDbVRZXcHiPAya3U8l5kZ+dw/vwtjh69SkjINS5evE1YWPRDpiY1tWvb0aiRE+7uNahd2w5nZzucne1xdrbDxqZKhRytuXcvndu3E7l9W6V5vXYtjoiIaK5ejSUu7l6B+xoYGODqWj1PpAih0rhxLZyd7Uo/tD03FyJOa0dWQvflz14L4NZCiJWW3YVj7bkDwgn9wqH85iADA3BvBa17wbNDoL6XzMUiqTRI8SLFiyTuNmz8Fv5eBukpYplLY3j5I+g4CHauESJHXVsJxGhMi+7Qsod4rVGnVJuYkZFNWNhdLl68zYULtzWvt2+rHrufpaWZRsiohU3t2nbY21fF1tYCOzvxamlpVq4iR1EUUlIyUKnSSExMRaVKJTExjdjYZO7cSeTWrUTu3BFC5c6dxHyRXAXh4GCFq2t1XF0dcHNz0Lx3da2OhUUZlnaIvqkdWTm5ExJj8q+vXluIFa9eUK2WMG+eDIbTe7Sjf2oc64ntWvcSvzub6mXVC4lEp5DiRYoXiZrEWNi8UJiN1DeNWu7Cebf7CDj0J/y19OEnYIBaHloh07K7yOhbBqhUqVy6JITM9etxREbGExmZQGRkwmNHHx7EyMgQG5sq2Npa5JusrMwxNjbE2NgIU1NjjI0NMTExypsXr+r53FyFrKxsMjOzyczMITMzm6ysnLx58T4jI5uMjGySkoRISUgQIiUxMfUhv58nYWVlTs2aNjg52VCzpi0uLtVwda2eJ1QcsLIqp/DflCThXKseXYm8lH99FUto3k2IENdmwq/l5E4hWGKj8m9rZS98XFp5i+KHTm5l1QuJRKeR4kWKF8mDpCTClsWw8TtIihPLqlhC867iJtKoA6So4PRu4X9wJUTrAKzGpXGemOkh9rMu+7wuqamZREUlEBWVkE/U3LqlQqVK1Uz3FwIsb0xMjLC1tcDGpgo2NhbY21elVi3b+0SKDU5Otjg52ehO0r+cbBG2rB5duXA4fwizoSE0bCfESqMOoj7X2b3CFKROmqjGxEwklfPyFr8195ZgZFSm3ZFI9AEpXqR4kRREWkqe38t3Dz8R2zlCy57iJtOgrTApqcVMxOn82xoYiJuQ2szU9FmdyrGRlpZJYmJaPkGjnu7dyyA7O4esLO0k5nPzXrM17w0NDTE1FSMxpqbGee+N73uvXm6sESe2tupX8b5KFVPd9tFJiIZrZ4Wz7NW812uhWnOjmloeWr8VMwu4fFyMrFw4/LDfioeXVqw06VSsAqISSWVDihcpXiRPIjcXrp4RT8ong0Xir/vDUgGcG2jFjGtzIWBO7RKC5saF/NsaGkGDNlox06RTqUQxSZ6CtHtw7VyeODmrFSpq5+0HUZt3vHoJH5aoMPFbeVRuFie3PDNQnrixrlbq3ZFIKhpSvEjxIikqmRlw8bBWzFw6mt9sZGgI9VuLG1SrnlDTTWyvFjO3wvMfz9gEPNsLIdOyu3hvWoYOpZWZ7CwROq8ZRckTKvc7Z9+PgYEQHy5Nhb+KazMhXGIj80KYgyH+dv591LlZ1L8H6bcikTw1UrxI8SJ5WlISRWSI2unywZEWU/O8XB15pgHraqLa76ldYnowEZmpuRiNUTsAN2hToknyKiWKIiqLXz2bX6jcvJi/iOH92DmKRHD18oRKLXfAQAgbtbno+rn8VZlBnL8H/VZkdWaJpETRKfEyb948AMLDxZOpn59fofaxtbUFQKVSMW3atEJ/nhQvklIhNkorZB71JG5lL0SJV96TOIgCe2qfmQdNE1UsRb0ltZnJrYV04nwcibEP+KTkvT5ovlFTxVIIFLVQqeMpksMl3NWKlGuhcDtcWyfofgwMtCNtXr2gSUdZ6FAiKWV0RrxMnz4dX19fzfyECROIiIggKCiowH3UYkctWIKDg9mwYUOhRA9I8SIpAxRFPN2rTUyndz98E3V00ZoUWvYQEU7qUZkzeyA5If/2puZgWwOsq4taNTb3TbaPeF/VpuIkL1MU4W+UohIjXskJIhT5fqESf+fR+xqbCGGiFioujcG8qjgf189pRUrkpYdD4dVYVxOjMPWa5pmOmor3VW1KrcsSieRhdEK8qFQqhgwZwoYNGzSjKCdOnKB169aEh4fj5vZoG7GdnR1Xr17V7AMie2ZhmynFi6TMyckWhSLVYubCoYfNFm7N88RMXvTJ7fC8kZldwlm4oBGEgjA2ebTQKUjsWNmXnpkjM0MrPO6f7uUtS31gXjPdN1+QsLifmq5akVGvKVS1hexM4d+iNvfcOP+w47UaCyutQFEfo14TIRorihCUSPQYnREvrq6u7Ny5Ey8vL80yOzs7QkJCNMvuJyIiAnd394eEioGBAUFBQXh7ez/xc6V4kZQ7aSkiRbzaxPRgmLWRscgN4uUtTBPW1USeEANDkUgvMUZMqhjt+/vnHwzhLQyGhmBVreBRHbUQMjB8WFg8KDxSH5h/MBV+cTE0BAtrMeLh5K4VKnY1QckVpR/uN/k8mKlWjak51G2cX6DUayoKdUqRIpHoLEW5fxuXViNsbW1JSMg/NB4cHAxQ4KhLREREgcdSqVSPXJeRkUFGhvbimZRUwAVNIikrqlSFtn3FBCKPyKldcGqnSHh297oQN6H7Ht7Xwlo4ldrW0L42aCte1cssrMWNPjvr8SJHPd1Ticgp9XxpYWEFFjZgaSsEiGZ6YP7+9eaWeSNCBiIJXIoKbl4S4iTiDOxaW3CbjYzBuaEQJ+rRFNemIhJM+g9JJBWaUhMvj2Lu3Ln4+fnlMwkVBnt7e+Lj4ws85qeffloCrZNISgm7GtD9ZTEpCtyOECMyp3bCrTDhzKuKFmIkNUlMUVeefFwTU7CpkV/o2DkKHxBb9XwNYTLCAO4lFCx2kmLF+9zcB8SH7aOFSJU80WFgKEYzcnPFiFBKorYPqUnaedVdkWI/NTHvNW8qzCjS/aHM9e6bnBuI70AikVQ6Ci1eAgMDCQgIeOJ2M2bMeKRJaPr06QwbNozx48cXrYVQoHBRf967776rmU9KSqJOndItqCeRFBsDAxGeW8sdnp+gXa4oYoQk4W6emLmrFTXqZfevS02GrEwRkv1gWHZBWFfLL3RsHaFabZENtooVpCU/LDqS4oTYelCQFMd09TjMqogRJQtrYTK6X6TUbSQT/kkkknyUSZ6XwMBA4uPjnyhcpM+LRFJIMtK04qYggaOeT4p9uE5TSWFqrhUdan8VC2tRKkH9/sF16vUWNuK1ipUcQZFIJLrhsKsmODgYlUqFj48PIJx24+PjHxttFBISkm+9jDaSSJ6CnBxIjn+0sFGLn7TkR4uMJwkSKTokEkkJoRMOuyBCo0+cOIGPj4/GGTcwMFAzAhMREUFgYGC+JHQzZswgODhYs83920skkmJgZCQiiWwdyrslEolEUiKUeqj0o6KE1B/p7++Pr6+vJvuumnnz5mlGXo4dO5Yv0d2TkCMvEolEIpHoHzplNiprpHiRSCQSiUT/KMr9W1YWk0gkEolEoldI8SKRSCQSiUSvkOJFIpFIJBKJXiHFi0QikUgkEr1CiheJRCKRSCR6hRQvEolEIpFI9AopXiQSiUQikegVUrxIJBKJRCLRK6R4kUgkEolEoldI8SKRSCQSiUSvkOJFIpFIJBKJXiHFi0QikUgkEr3CuLwbUNKo60wmJSWVc0skEolEIpEUFvV9uzD1oiuceElOTgagTp065dwSiUQikUgkRSU5ORkbG5vHbmOgFEbi6BG5ubncunULKysrDAwMSuSYSUlJ1KlTh5s3bz6xTLe+Ivuo/1T0/oHsY0WhovexovcPSqePiqKQnJxMrVq1MDR8vFdLhRt5MTQ0xNnZuVSObW1tXWF/iGpkH/Wfit4/kH2sKFT0Plb0/kHJ9/FJIy5qpMOuRCKRSCQSvUKKF4lEIpFIJHqFFC+FwMzMjNmzZ2NmZlbeTSk1ZB/1n4reP5B9rChU9D5W9P5B+fexwjnsSiQSiUQiqdjIkReJRCKRSCR6hRQvEolEIpFI9AopXiQSiUQikegVFS7PS0kzb948AMLDwwHw8/Mr1D62trYAqFQqpk2bVmrte1pUKhXr169nw4YNBAUFPXH74OBg/Pz86NWrF25ubgQFBdG2bVt8fHzKoLXFo6h9BP06h1D09uryeSzOd69P56sinatHURn+b5Xhugk6fv9TJAUybdq0fPPjx49XvL29H7uPr6+v4uvrq5kPCgpSxo8fXyrte1pCQkIUPz8/xdfXV/Hy8irUPhs2bFBsbW0VQHFzc1P8/PxKuZVPR3H6qE/nUFGK115dPY/F6Ys+na+KdK4eRWX4v1WG66ai6P79T4qXAkhISFC8vb2VhIQEzbKQkBAFUMLDwwvcz9bWNt8+iqIouq4RN2zYUKQ/4YP90weK0kd9O4fFaa+unsfi9EWfzldFOlePoyL/39RU5OumPtz/pM/LYzh+/DgRERGaeTc3N0AMhT2KiIgIVCqVZsjsfoKDg0ujiZISRt/Oob6193EUpy/61H99amtZIb8T3UXX73/S56UAbG1tSUhIyLdMfQLUJ/FB7j/RDx6roBOuj6xfvx57e3vi4+MJDw/H19e3vJtUYujbOXya9uraeSxOX/TpfFWkc1VS6NP5e1r06Rzqw/1PipciMHfuXPz8/B6pLB+H+gdbEfDy8gK0P2B/f3+GDBnChg0byrNZpY6+ncMntVefzmNxvnt9Ol8V6VyVFPp0/gpDRTiHunb/qzTiJTAwkICAgCduN2PGDM0P7X6mT5/OsGHDGD9+fJE/uyz+hE/bv8LyoOoeOnQoEyZMKHC4sCQpqz4+irK6kJZUH5/U3vI8j0WlON+9Pt34KtK5Kin06fwVBn0/h7p4/6s04sXHx6fYYWmBgYG4u7s/8cQVNJymUqkKXFdSPE3/ikJgYGC+z1H/8SIiIkpcMDxIWfSxPM8hFL2PxW1veZ7HgihOX8r7fBWFinSuSgp9On9Pgz6fQ529/5WKG3AFIigoSNmwYYNmPiEh4Yne1g+u1/WvubBe8wkJCQ95m6uX6bonfVGjH/TpHBa1vbp8Hovz3evT+apI5+pxVOT/m5rKcN3U5fufjDZ6DCdOnODEiRN4eXkRERFBREQE/v7+2NvbA0I1q5P4qJkxY0Y+z+rAwMBiDbWVJQUN6z3YP1tbW6ZNm5ZPRfv7++Pj46PzQ5+F7SPo3zl8Unv16TwWtS+F2UeXqEjn6nFU5P+bmop+3dT1+5+sKl0AKpUKV1fXR3pJq78yf39/fH19NdkH1cybN0/zQz127JjOepVHRERofCxOnDjBtGnT8mV9fFT/VCoV/v7+mvm4uDid7R8Ur4+gP+dQzePaq2/nsah9edI+ukZFOlcPUhn+b5XhuqkP9z8pXiQSiUQikegV0mwkkUgkEolEr5DiRSKRSCQSiV4hxYtEIpFIJBK9QooXiUQikUgkeoUULxKJRCKRSPQKKV4kEolEIpHoFVK8SCQSiUQi0SukeJFIJBKJRKJXSPEikUgkEolEr5DiRSKRSCQSiV4hxYtEIpFIJBK9QooXiUQikUgkesX/AVlnn2iRFEdBAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "t, x_stacked = prepare_VdP_data()\n", - "t_max = 320\n", - "x, y = x_stacked[:, 0], x_stacked[:, 1]\n", - "t_train, t_test = t[:t_max], t[:t_max]\n", - "x_train, x_test = x[:t_max], x[t_max:]\n", - "y_train, y_test = y[:t_max], y[t_max:]\n", - "plt.plot(x_train, y_train, color = 'midnightblue')\n", - "plt.plot(x_test[::3], y_test[::3], color = 'orangered')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 129, - "id": "ea55038c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "setting builder with \n", - "setting builder with \n", - "Deriv orders after definition [[None], [0], [0, 0]]\n", - "Surface training t=0, loss=0.9886471033096313\n", - "Surface training t=1, loss=0.9427707195281982\n", - "Surface training t=2, loss=0.9672328531742096\n", - "Surface training t=3, loss=0.9703816771507263\n", - "Surface training t=4, loss=0.9534877836704254\n", - "Surface training t=5, loss=0.8831912577152252\n", - "Surface training t=6, loss=0.9205136001110077\n", - "Surface training t=7, loss=0.9216731786727905\n", - "Surface training t=8, loss=0.8929721117019653\n", - "Surface training t=9, loss=0.9314711689949036\n", - "Surface training t=10, loss=0.9022430181503296\n", - "Surface training t=11, loss=0.9095550775527954\n", - "Surface training t=12, loss=0.9485109448432922\n", - "Surface training t=13, loss=0.8985200524330139\n", - "Surface training t=14, loss=0.8988443613052368\n", - "Surface training t=15, loss=0.9079181849956512\n", - "Surface training t=16, loss=0.856785386800766\n", - "Surface training t=17, loss=0.8943530321121216\n", - "Surface training t=18, loss=0.9146427512168884\n", - "Surface training t=19, loss=0.8674331605434418\n", - "Surface training t=20, loss=0.9292161464691162\n", - "Surface training t=21, loss=0.8778998255729675\n", - "Surface training t=22, loss=0.8907906413078308\n", - "Surface training t=23, loss=0.8701808154582977\n", - "Surface training t=24, loss=0.8806784152984619\n", - "Surface training t=25, loss=0.8846317827701569\n", - "Surface training t=26, loss=0.8942444920539856\n", - "Surface training t=27, loss=0.8487709760665894\n", - "Surface training t=28, loss=0.8552230298519135\n", - "Surface training t=29, loss=0.8501128554344177\n", - "Surface training t=30, loss=0.886328250169754\n", - "Surface training t=31, loss=0.89305579662323\n", - "Surface training t=32, loss=0.8885660767555237\n", - "Surface training t=33, loss=0.8705417215824127\n", - "Surface training t=34, loss=0.9101085662841797\n", - "Surface training t=35, loss=0.883236438035965\n", - "Surface training t=36, loss=0.8755265474319458\n", - "Surface training t=37, loss=0.8897087275981903\n", - "Surface training t=38, loss=0.8987145125865936\n", - "Surface training t=39, loss=0.8515855967998505\n", - "Surface training t=40, loss=0.8273382484912872\n", - "Surface training t=41, loss=0.8501545190811157\n", - "Surface training t=42, loss=0.8157375752925873\n", - "Surface training t=43, loss=0.8891990780830383\n", - "Surface training t=44, loss=0.863417387008667\n", - "Surface training t=45, loss=0.8449902534484863\n", - "Surface training t=46, loss=0.8651705682277679\n", - "Surface training t=47, loss=0.8387624621391296\n", - "Surface training t=48, loss=0.8899108171463013\n", - "Surface training t=49, loss=0.8949314057826996\n", - "Surface training t=50, loss=0.8846881985664368\n", - "Surface training t=51, loss=0.8558028638362885\n", - "Surface training t=52, loss=0.8631694614887238\n", - "Surface training t=53, loss=0.83486208319664\n", - "Surface training t=54, loss=0.843884140253067\n", - "Surface training t=55, loss=0.8584978580474854\n", - "Surface training t=56, loss=0.8652488887310028\n", - "Surface training t=57, loss=0.8313710391521454\n", - "Surface training t=58, loss=0.7993413507938385\n", - "Surface training t=59, loss=0.8742042779922485\n", - "Surface training t=60, loss=0.870885968208313\n", - "Surface training t=61, loss=0.8537605702877045\n", - "Surface training t=62, loss=0.8842488527297974\n", - "Surface training t=63, loss=0.8636505901813507\n", - "Surface training t=64, loss=0.819481611251831\n", - "Surface training t=65, loss=0.8114693462848663\n", - "Surface training t=66, loss=0.8084243535995483\n", - "Surface training t=67, loss=0.8779691755771637\n", - "Surface training t=68, loss=0.8470955193042755\n", - "Surface training t=69, loss=0.8639155626296997\n", - "Surface training t=70, loss=0.8799643218517303\n", - "Surface training t=71, loss=0.8338180184364319\n", - "Surface training t=72, loss=0.8804574012756348\n", - "Surface training t=73, loss=0.8388114273548126\n", - "Surface training t=74, loss=0.893090695142746\n", - "Surface training t=75, loss=0.8820487856864929\n", - "Surface training t=76, loss=0.8558200895786285\n", - "Surface training t=77, loss=0.8313979208469391\n", - "Surface training t=78, loss=0.8422020971775055\n", - "Surface training t=79, loss=0.8160859942436218\n", - "Surface training t=80, loss=0.8119702637195587\n", - "Surface training t=81, loss=0.850176602602005\n", - "Surface training t=82, loss=0.8378373980522156\n", - "Surface training t=83, loss=0.8290828764438629\n", - "Surface training t=84, loss=0.8308189213275909\n", - "Surface training t=85, loss=0.8508228659629822\n", - "Surface training t=86, loss=0.8458747267723083\n", - "Surface training t=87, loss=0.8200902938842773\n", - "Surface training t=88, loss=0.8573931753635406\n", - "Surface training t=89, loss=0.8189922571182251\n", - "Surface training t=90, loss=0.8291053771972656\n", - "Surface training t=91, loss=0.8653814196586609\n", - "Surface training t=92, loss=0.8588663339614868\n", - "Surface training t=93, loss=0.8343665599822998\n", - "Surface training t=94, loss=0.8733040988445282\n", - "Surface training t=95, loss=0.8483160138130188\n", - "Surface training t=96, loss=0.8673844933509827\n", - "Surface training t=97, loss=0.8467027246952057\n", - "Surface training t=98, loss=0.8449001312255859\n", - "Surface training t=99, loss=0.8182161748409271\n", - "Surface training t=100, loss=0.8737571835517883\n", - "Surface training t=101, loss=0.832281082868576\n", - "Surface training t=102, loss=0.8507178723812103\n", - "Surface training t=103, loss=0.8678694665431976\n", - "Surface training t=104, loss=0.8542511463165283\n", - "Surface training t=105, loss=0.819840282201767\n", - "Surface training t=106, loss=0.855309009552002\n", - "Surface training t=107, loss=0.8498559892177582\n", - "Surface training t=108, loss=0.8562309145927429\n", - "Surface training t=109, loss=0.8126132488250732\n", - "Surface training t=110, loss=0.8217630684375763\n", - "Surface training t=111, loss=0.8392975628376007\n", - "Surface training t=112, loss=0.7749800086021423\n", - "Surface training t=113, loss=0.8707666993141174\n", - "Surface training t=114, loss=0.8442619144916534\n", - "Surface training t=115, loss=0.8434422612190247\n", - "Surface training t=116, loss=0.7835531532764435\n", - "Surface training t=117, loss=0.8579587936401367\n", - "Surface training t=118, loss=0.8538073897361755\n", - "Surface training t=119, loss=0.8450124859809875\n", - "Surface training t=120, loss=0.8199772834777832\n", - "Surface training t=121, loss=0.8885166049003601\n", - "Surface training t=122, loss=0.8371411561965942\n", - "Surface training t=123, loss=0.843525230884552\n", - "Surface training t=124, loss=0.8029419481754303\n", - "Surface training t=125, loss=0.837071567773819\n", - "Surface training t=126, loss=0.8534260392189026\n", - "Surface training t=127, loss=0.832086980342865\n", - "Surface training t=128, loss=0.8671885132789612\n", - "Surface training t=129, loss=0.86307093501091\n", - "Surface training t=130, loss=0.8132600784301758\n", - "Surface training t=131, loss=0.7904165685176849\n", - "Surface training t=132, loss=0.834281861782074\n", - "Surface training t=133, loss=0.8295194208621979\n", - "Surface training t=134, loss=0.8491339385509491\n", - "Surface training t=135, loss=0.817213237285614\n", - "Surface training t=136, loss=0.7950406074523926\n", - "Surface training t=137, loss=0.822572261095047\n", - "Surface training t=138, loss=0.8129115700721741\n", - "Surface training t=139, loss=0.8043290078639984\n", - "Surface training t=140, loss=0.7844990193843842\n", - "Surface training t=141, loss=0.8011768758296967\n", - "Surface training t=142, loss=0.8651831746101379\n", - "Surface training t=143, loss=0.8040529787540436\n", - "Surface training t=144, loss=0.8305290043354034\n", - "Surface training t=145, loss=0.8372505307197571\n", - "Surface training t=146, loss=0.8267313838005066\n", - "Surface training t=147, loss=0.8250131011009216\n", - "Surface training t=148, loss=0.8231572806835175\n", - "Surface training t=149, loss=0.8346372246742249\n", - "Surface training t=150, loss=0.7842526137828827\n", - "Surface training t=151, loss=0.7906504273414612\n", - "Surface training t=152, loss=0.8000355362892151\n", - "Surface training t=153, loss=0.8082972764968872\n", - "Surface training t=154, loss=0.8385337591171265\n", - "Surface training t=155, loss=0.823697417974472\n", - "Surface training t=156, loss=0.79609015583992\n", - "Surface training t=157, loss=0.8089002072811127\n", - "Surface training t=158, loss=0.77027228474617\n", - "Surface training t=159, loss=0.8267933130264282\n", - "Surface training t=160, loss=0.7704010605812073\n", - "Surface training t=161, loss=0.8284273743629456\n", - "Surface training t=162, loss=0.7760556936264038\n", - "Surface training t=163, loss=0.7785756587982178\n", - "Surface training t=164, loss=0.8614969551563263\n", - "Surface training t=165, loss=0.9172786772251129\n", - "Surface training t=166, loss=0.8526241779327393\n", - "Surface training t=167, loss=0.8204451203346252\n", - "Surface training t=168, loss=0.8336445689201355\n", - "Surface training t=169, loss=0.7550641894340515\n", - "Surface training t=170, loss=0.8167165815830231\n", - "Surface training t=171, loss=0.8214873373508453\n", - "Surface training t=172, loss=0.8498299419879913\n", - "Surface training t=173, loss=0.8043765425682068\n", - "Surface training t=174, loss=0.726538211107254\n", - "Surface training t=175, loss=0.7382505834102631\n", - "Surface training t=176, loss=0.8104452788829803\n", - "Surface training t=177, loss=0.7555638253688812\n", - "Surface training t=178, loss=0.830658495426178\n", - "Surface training t=179, loss=0.7829939723014832\n", - "Surface training t=180, loss=0.8563087284564972\n", - "Surface training t=181, loss=0.8139315247535706\n", - "Surface training t=182, loss=0.8359466195106506\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=183, loss=0.8311417400836945\n", - "Surface training t=184, loss=0.8251111507415771\n", - "Surface training t=185, loss=0.8179400861263275\n", - "Surface training t=186, loss=0.750582754611969\n", - "Surface training t=187, loss=0.822142630815506\n", - "Surface training t=188, loss=0.8134550452232361\n", - "Surface training t=189, loss=0.7724684178829193\n", - "Surface training t=190, loss=0.7809622287750244\n", - "Surface training t=191, loss=0.775101512670517\n", - "Surface training t=192, loss=0.8266279101371765\n", - "Surface training t=193, loss=0.825160562992096\n", - "Surface training t=194, loss=0.7346524298191071\n", - "Surface training t=195, loss=0.805948406457901\n", - "Surface training t=196, loss=0.8062301874160767\n", - "Surface training t=197, loss=0.8098877668380737\n", - "Surface training t=198, loss=0.7856581807136536\n", - "Surface training t=199, loss=0.8060660362243652\n", - "Surface training t=200, loss=0.831815093755722\n", - "Surface training t=201, loss=0.817896157503128\n", - "Surface training t=202, loss=0.8262318670749664\n", - "Surface training t=203, loss=0.8007483184337616\n", - "Surface training t=204, loss=0.783202588558197\n", - "Surface training t=205, loss=0.7860988676548004\n", - "Surface training t=206, loss=0.81536865234375\n", - "Surface training t=207, loss=0.837171882390976\n", - "Surface training t=208, loss=0.8292079567909241\n", - "Surface training t=209, loss=0.7927564382553101\n", - "Surface training t=210, loss=0.7929188013076782\n", - "Surface training t=211, loss=0.8041452169418335\n", - "Surface training t=212, loss=0.7681794762611389\n", - "Surface training t=213, loss=0.8326329588890076\n", - "Surface training t=214, loss=0.7487793862819672\n", - "Surface training t=215, loss=0.7815586626529694\n", - "Surface training t=216, loss=0.8123393654823303\n", - "Surface training t=217, loss=0.76442551612854\n", - "Surface training t=218, loss=0.784917414188385\n", - "Surface training t=219, loss=0.7649313807487488\n", - "Surface training t=220, loss=0.7694796919822693\n", - "Surface training t=221, loss=0.8200274705886841\n", - "Surface training t=222, loss=0.7867081165313721\n", - "Surface training t=223, loss=0.7618357539176941\n", - "Surface training t=224, loss=0.803013414144516\n", - "Surface training t=225, loss=0.8156739175319672\n", - "Surface training t=226, loss=0.7858262360095978\n", - "Surface training t=227, loss=0.7860020995140076\n", - "Surface training t=228, loss=0.7613597512245178\n", - "Surface training t=229, loss=0.8027818202972412\n", - "Surface training t=230, loss=0.7970919907093048\n", - "Surface training t=231, loss=0.7968169450759888\n", - "Surface training t=232, loss=0.8696111142635345\n", - "Surface training t=233, loss=0.8197253346443176\n", - "Surface training t=234, loss=0.767425924539566\n", - "Surface training t=235, loss=0.7932302355766296\n", - "Surface training t=236, loss=0.7588902115821838\n", - "Surface training t=237, loss=0.7590893805027008\n", - "Surface training t=238, loss=0.7907216250896454\n", - "Surface training t=239, loss=0.7456654608249664\n", - "Surface training t=240, loss=0.7929400205612183\n", - "Surface training t=241, loss=0.7888955771923065\n", - "Surface training t=242, loss=0.8303270637989044\n", - "Surface training t=243, loss=0.757095605134964\n", - "Surface training t=244, loss=0.754798948764801\n", - "Surface training t=245, loss=0.7680453360080719\n", - "Surface training t=246, loss=0.7991014719009399\n", - "Surface training t=247, loss=0.7833987474441528\n", - "Surface training t=248, loss=0.7984714210033417\n", - "Surface training t=249, loss=0.7506426870822906\n", - "Surface training t=250, loss=0.7900361716747284\n", - "Surface training t=251, loss=0.7672570645809174\n", - "Surface training t=252, loss=0.8189331293106079\n", - "Surface training t=253, loss=0.7619256377220154\n", - "Surface training t=254, loss=0.7758532762527466\n", - "Surface training t=255, loss=0.78867968916893\n", - "Surface training t=256, loss=0.7440716922283173\n", - "Surface training t=257, loss=0.8046784698963165\n", - "Surface training t=258, loss=0.7999950051307678\n", - "Surface training t=259, loss=0.7786743342876434\n", - "Surface training t=260, loss=0.7695385217666626\n", - "Surface training t=261, loss=0.7454710006713867\n", - "Surface training t=262, loss=0.7794701755046844\n", - "Surface training t=263, loss=0.7954500913619995\n", - "Surface training t=264, loss=0.7459602952003479\n", - "Surface training t=265, loss=0.7910334169864655\n", - "Surface training t=266, loss=0.7953811585903168\n", - "Surface training t=267, loss=0.7273563742637634\n", - "Surface training t=268, loss=0.7831596732139587\n", - "Surface training t=269, loss=0.7274343073368073\n", - "Surface training t=270, loss=0.751128077507019\n", - "Surface training t=271, loss=0.763201504945755\n", - "Surface training t=272, loss=0.7401313781738281\n", - "Surface training t=273, loss=0.7804107069969177\n", - "Surface training t=274, loss=0.7662611603736877\n", - "Surface training t=275, loss=0.7668147683143616\n", - "Surface training t=276, loss=0.7861534953117371\n", - "Surface training t=277, loss=0.7928951978683472\n", - "Surface training t=278, loss=0.7713221311569214\n", - "Surface training t=279, loss=0.7488473355770111\n", - "Surface training t=280, loss=0.7516737878322601\n", - "Surface training t=281, loss=0.8057864904403687\n", - "Surface training t=282, loss=0.746375173330307\n", - "Surface training t=283, loss=0.7459585070610046\n", - "Surface training t=284, loss=0.7397898733615875\n", - "Surface training t=285, loss=0.7832269072532654\n", - "Surface training t=286, loss=0.7710034251213074\n", - "Surface training t=287, loss=0.7459835112094879\n", - "Surface training t=288, loss=0.7609102725982666\n", - "Surface training t=289, loss=0.7478221952915192\n", - "Surface training t=290, loss=0.734485387802124\n", - "Surface training t=291, loss=0.7935411334037781\n", - "Surface training t=292, loss=0.7580453157424927\n", - "Surface training t=293, loss=0.7685894668102264\n", - "Surface training t=294, loss=0.8143926858901978\n", - "Surface training t=295, loss=0.7661538124084473\n", - "Surface training t=296, loss=0.7276478111743927\n", - "Surface training t=297, loss=0.735186368227005\n", - "Surface training t=298, loss=0.76703080534935\n", - "Surface training t=299, loss=0.7294259071350098\n", - "Surface training t=300, loss=0.7569030523300171\n", - "Surface training t=301, loss=0.7563686668872833\n", - "Surface training t=302, loss=0.795261561870575\n", - "Surface training t=303, loss=0.736057460308075\n", - "Surface training t=304, loss=0.7346121966838837\n", - "Surface training t=305, loss=0.7478389739990234\n", - "Surface training t=306, loss=0.7731813192367554\n", - "Surface training t=307, loss=0.7585662305355072\n", - "Surface training t=308, loss=0.7120082676410675\n", - "Surface training t=309, loss=0.7812053859233856\n", - "Surface training t=310, loss=0.7421368658542633\n", - "Surface training t=311, loss=0.7565788328647614\n", - "Surface training t=312, loss=0.752046674489975\n", - "Surface training t=313, loss=0.7535577416419983\n", - "Surface training t=314, loss=0.7659083306789398\n", - "Surface training t=315, loss=0.7225235402584076\n", - "Surface training t=316, loss=0.7078520059585571\n", - "Surface training t=317, loss=0.7557200193405151\n", - "Surface training t=318, loss=0.7756243944168091\n", - "Surface training t=319, loss=0.7446266412734985\n", - "Surface training t=320, loss=0.7558716833591461\n", - "Surface training t=321, loss=0.8115290403366089\n", - "Surface training t=322, loss=0.7421313524246216\n", - "Surface training t=323, loss=0.7563617825508118\n", - "Surface training t=324, loss=0.7665748000144958\n", - "Surface training t=325, loss=0.7081491053104401\n", - "Surface training t=326, loss=0.7442633807659149\n", - "Surface training t=327, loss=0.7398814260959625\n", - "Surface training t=328, loss=0.7718417346477509\n", - "Surface training t=329, loss=0.7302681803703308\n", - "Surface training t=330, loss=0.7807044386863708\n", - "Surface training t=331, loss=0.7558240294456482\n", - "Surface training t=332, loss=0.7502057254314423\n", - "Surface training t=333, loss=0.7383986115455627\n", - "Surface training t=334, loss=0.7327306568622589\n", - "Surface training t=335, loss=0.7323121428489685\n", - "Surface training t=336, loss=0.8141681849956512\n", - "Surface training t=337, loss=0.7443369925022125\n", - "Surface training t=338, loss=0.7796941697597504\n", - "Surface training t=339, loss=0.7635084688663483\n", - "Surface training t=340, loss=0.7697791457176208\n", - "Surface training t=341, loss=0.7231041491031647\n", - "Surface training t=342, loss=0.7566798031330109\n", - "Surface training t=343, loss=0.7781175076961517\n", - "Surface training t=344, loss=0.7420321106910706\n", - "Surface training t=345, loss=0.7474707663059235\n", - "Surface training t=346, loss=0.7616835534572601\n", - "Surface training t=347, loss=0.7406007945537567\n", - "Surface training t=348, loss=0.7859188914299011\n", - "Surface training t=349, loss=0.7662819921970367\n", - "Surface training t=350, loss=0.7279219031333923\n", - "Surface training t=351, loss=0.7471508979797363\n", - "Surface training t=352, loss=0.8032934367656708\n", - "Surface training t=353, loss=0.7229422032833099\n", - "Surface training t=354, loss=0.7381096482276917\n", - "Surface training t=355, loss=0.7781494557857513\n", - "Surface training t=356, loss=0.7953217923641205\n", - "Surface training t=357, loss=0.7036938369274139\n", - "Surface training t=358, loss=0.758266419172287\n", - "Surface training t=359, loss=0.7811751067638397\n", - "Surface training t=360, loss=0.7796443998813629\n", - "Surface training t=361, loss=0.7833282947540283\n", - "Surface training t=362, loss=0.7819331884384155\n", - "Surface training t=363, loss=0.7594474256038666\n", - "Surface training t=364, loss=0.7860160171985626\n", - "Surface training t=365, loss=0.8089755475521088\n", - "Surface training t=366, loss=0.7493117153644562\n", - "Surface training t=367, loss=0.7841696441173553\n", - "Surface training t=368, loss=0.7919690907001495\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=369, loss=0.7493490278720856\n", - "Surface training t=370, loss=0.728399395942688\n", - "Surface training t=371, loss=0.7533872723579407\n", - "Surface training t=372, loss=0.7769631445407867\n", - "Surface training t=373, loss=0.7208417057991028\n", - "Surface training t=374, loss=0.7503263652324677\n", - "Surface training t=375, loss=0.773250162601471\n", - "Surface training t=376, loss=0.6970821917057037\n", - "Surface training t=377, loss=0.7650207579135895\n", - "Surface training t=378, loss=0.7724440693855286\n", - "Surface training t=379, loss=0.7594838440418243\n", - "Surface training t=380, loss=0.7592717409133911\n", - "Surface training t=381, loss=0.7685916125774384\n", - "Surface training t=382, loss=0.7049773037433624\n", - "Surface training t=383, loss=0.721996009349823\n", - "Surface training t=384, loss=0.7028826177120209\n", - "Surface training t=385, loss=0.7391636669635773\n", - "Surface training t=386, loss=0.7510725557804108\n", - "Surface training t=387, loss=0.7712835669517517\n", - "Surface training t=388, loss=0.7432951629161835\n", - "Surface training t=389, loss=0.7504519820213318\n", - "Surface training t=390, loss=0.7527242004871368\n", - "Surface training t=391, loss=0.7665772438049316\n", - "Surface training t=392, loss=0.7643280029296875\n", - "Surface training t=393, loss=0.7131465375423431\n", - "Surface training t=394, loss=0.7564645409584045\n", - "Surface training t=395, loss=0.7130173742771149\n", - "Surface training t=396, loss=0.7397458851337433\n", - "Surface training t=397, loss=0.6724823713302612\n", - "Surface training t=398, loss=0.7076232135295868\n", - "Surface training t=399, loss=0.6776107251644135\n", - "Surface training t=400, loss=0.7664172351360321\n", - "Surface training t=401, loss=0.7433988153934479\n", - "Surface training t=402, loss=0.7445217669010162\n", - "Surface training t=403, loss=0.7104253172874451\n", - "Surface training t=404, loss=0.7556849718093872\n", - "Surface training t=405, loss=0.7634987235069275\n", - "Surface training t=406, loss=0.7595275938510895\n", - "Surface training t=407, loss=0.7288810014724731\n", - "Surface training t=408, loss=0.743641197681427\n", - "Surface training t=409, loss=0.7720043361186981\n", - "Surface training t=410, loss=0.7557145059108734\n", - "Surface training t=411, loss=0.6778195202350616\n", - "Surface training t=412, loss=0.731704443693161\n", - "Surface training t=413, loss=0.7612960934638977\n", - "Surface training t=414, loss=0.7778408527374268\n", - "Surface training t=415, loss=0.6806983053684235\n", - "Surface training t=416, loss=0.7315202951431274\n", - "Surface training t=417, loss=0.7587389945983887\n", - "Surface training t=418, loss=0.740537166595459\n", - "Surface training t=419, loss=0.7830256223678589\n", - "Surface training t=420, loss=0.7410067617893219\n", - "Surface training t=421, loss=0.701801210641861\n", - "Surface training t=422, loss=0.7950819730758667\n", - "Surface training t=423, loss=0.7037926912307739\n", - "Surface training t=424, loss=0.7361719608306885\n", - "Surface training t=425, loss=0.7449906170368195\n", - "Surface training t=426, loss=0.7571040987968445\n", - "Surface training t=427, loss=0.7441794574260712\n", - "Surface training t=428, loss=0.7765335142612457\n", - "Surface training t=429, loss=0.7477844059467316\n", - "Surface training t=430, loss=0.746534138917923\n", - "Surface training t=431, loss=0.7632835209369659\n", - "Surface training t=432, loss=0.7551207542419434\n", - "Surface training t=433, loss=0.7352462112903595\n", - "Surface training t=434, loss=0.7787033915519714\n", - "Surface training t=435, loss=0.740862101316452\n", - "Surface training t=436, loss=0.6915798485279083\n", - "Surface training t=437, loss=0.7670897245407104\n", - "Surface training t=438, loss=0.6802357733249664\n", - "Surface training t=439, loss=0.7218988835811615\n", - "Surface training t=440, loss=0.7605783641338348\n", - "Surface training t=441, loss=0.7339531481266022\n", - "Surface training t=442, loss=0.7462826073169708\n", - "Surface training t=443, loss=0.7196294665336609\n", - "Surface training t=444, loss=0.7065421044826508\n", - "Surface training t=445, loss=0.7406232059001923\n", - "Surface training t=446, loss=0.7209380567073822\n", - "Surface training t=447, loss=0.7605304718017578\n", - "Surface training t=448, loss=0.8049536943435669\n", - "Surface training t=449, loss=0.6899971961975098\n", - "Surface training t=450, loss=0.7651414573192596\n", - "Surface training t=451, loss=0.6831399202346802\n", - "Surface training t=452, loss=0.7278355658054352\n", - "Surface training t=453, loss=0.763862669467926\n", - "Surface training t=454, loss=0.7131136059761047\n", - "Surface training t=455, loss=0.6652188301086426\n", - "Surface training t=456, loss=0.7265937030315399\n", - "Surface training t=457, loss=0.7163914144039154\n", - "Surface training t=458, loss=0.7363368272781372\n", - "Surface training t=459, loss=0.7619440853595734\n", - "Surface training t=460, loss=0.7849406003952026\n", - "Surface training t=461, loss=0.761025071144104\n", - "Surface training t=462, loss=0.768541008234024\n", - "Surface training t=463, loss=0.7745031118392944\n", - "Surface training t=464, loss=0.7648779153823853\n", - "Surface training t=465, loss=0.7353124618530273\n", - "Surface training t=466, loss=0.7386506199836731\n", - "Surface training t=467, loss=0.7756676077842712\n", - "Surface training t=468, loss=0.7675487101078033\n", - "Surface training t=469, loss=0.7427074611186981\n", - "Surface training t=470, loss=0.733263373374939\n", - "Surface training t=471, loss=0.7315665185451508\n", - "Surface training t=472, loss=0.7460019886493683\n", - "Surface training t=473, loss=0.7664823234081268\n", - "Surface training t=474, loss=0.7148588001728058\n", - "Surface training t=475, loss=0.6961595118045807\n", - "Surface training t=476, loss=0.7603394389152527\n", - "Surface training t=477, loss=0.735195517539978\n", - "Surface training t=478, loss=0.687769889831543\n", - "Surface training t=479, loss=0.7450501620769501\n", - "Surface training t=480, loss=0.7445606291294098\n", - "Surface training t=481, loss=0.6638004183769226\n", - "Surface training t=482, loss=0.7673302888870239\n", - "Surface training t=483, loss=0.7024849355220795\n", - "Surface training t=484, loss=0.7528132498264313\n", - "Surface training t=485, loss=0.7616735696792603\n", - "Surface training t=486, loss=0.7683420181274414\n", - "Surface training t=487, loss=0.7430952191352844\n", - "Surface training t=488, loss=0.7528255879878998\n", - "Surface training t=489, loss=0.8291309773921967\n", - "Surface training t=490, loss=0.7286237478256226\n", - "Surface training t=491, loss=0.696842759847641\n", - "Surface training t=492, loss=0.744761735200882\n", - "Surface training t=493, loss=0.8391381204128265\n", - "Surface training t=494, loss=0.7562071979045868\n", - "Surface training t=495, loss=0.7820682525634766\n", - "Surface training t=496, loss=0.7160950303077698\n", - "Surface training t=497, loss=0.7489967942237854\n", - "Surface training t=498, loss=0.7511096298694611\n", - "Surface training t=499, loss=0.7161718904972076\n", - "Surface training t=500, loss=0.7435327470302582\n", - "Surface training t=501, loss=0.6752430200576782\n", - "Surface training t=502, loss=0.7289377152919769\n", - "Surface training t=503, loss=0.7398143708705902\n", - "Surface training t=504, loss=0.7572869062423706\n", - "Surface training t=505, loss=0.7390636503696442\n", - "Surface training t=506, loss=0.7293594479560852\n", - "Surface training t=507, loss=0.7396262288093567\n", - "Surface training t=508, loss=0.6592755019664764\n", - "Surface training t=509, loss=0.7389075458049774\n", - "Surface training t=510, loss=0.7587370872497559\n", - "Surface training t=511, loss=0.7715282142162323\n", - "Surface training t=512, loss=0.7712216973304749\n", - "Surface training t=513, loss=0.7448973059654236\n", - "Surface training t=514, loss=0.7488845586776733\n", - "Surface training t=515, loss=0.7625684440135956\n", - "Surface training t=516, loss=0.6990600228309631\n", - "Surface training t=517, loss=0.7258197665214539\n", - "Surface training t=518, loss=0.7379526793956757\n", - "Surface training t=519, loss=0.7275170683860779\n", - "Surface training t=520, loss=0.7592888474464417\n", - "Surface training t=521, loss=0.7214208245277405\n", - "Surface training t=522, loss=0.7130157947540283\n", - "Surface training t=523, loss=0.7186299562454224\n", - "Surface training t=524, loss=0.7217163443565369\n", - "Surface training t=525, loss=0.7225414216518402\n", - "Surface training t=526, loss=0.7205814123153687\n", - "Surface training t=527, loss=0.7652427554130554\n", - "Surface training t=528, loss=0.6987330913543701\n", - "Surface training t=529, loss=0.7290058135986328\n", - "Surface training t=530, loss=0.735487550497055\n", - "Surface training t=531, loss=0.7330231666564941\n", - "Surface training t=532, loss=0.7243134379386902\n", - "Surface training t=533, loss=0.7332590818405151\n", - "Surface training t=534, loss=0.748604953289032\n", - "Surface training t=535, loss=0.6924480199813843\n", - "Surface training t=536, loss=0.7261049449443817\n", - "Surface training t=537, loss=0.725191056728363\n", - "Surface training t=538, loss=0.7118787169456482\n", - "Surface training t=539, loss=0.7153495848178864\n", - "Surface training t=540, loss=0.6991517841815948\n", - "Surface training t=541, loss=0.7354009747505188\n", - "Surface training t=542, loss=0.7307234704494476\n", - "Surface training t=543, loss=0.7050522267818451\n", - "Surface training t=544, loss=0.7018854916095734\n", - "Surface training t=545, loss=0.7836989462375641\n", - "Surface training t=546, loss=0.7793920338153839\n", - "Surface training t=547, loss=0.7337549030780792\n", - "Surface training t=548, loss=0.7533702552318573\n", - "Surface training t=549, loss=0.7374051213264465\n", - "Surface training t=550, loss=0.824949324131012\n", - "Surface training t=551, loss=0.7381605803966522\n", - "Surface training t=552, loss=0.7142050266265869\n", - "Surface training t=553, loss=0.7268541753292084\n", - "Surface training t=554, loss=0.7443929016590118\n", - "Surface training t=555, loss=0.7250387966632843\n", - "Surface training t=556, loss=0.730885922908783\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=557, loss=0.7335785627365112\n", - "Surface training t=558, loss=0.7996465563774109\n", - "Surface training t=559, loss=0.7331822216510773\n", - "Surface training t=560, loss=0.7303139567375183\n", - "Surface training t=561, loss=0.753900557756424\n", - "Surface training t=562, loss=0.7240837812423706\n", - "Surface training t=563, loss=0.7379560172557831\n", - "Surface training t=564, loss=0.7360164523124695\n", - "Surface training t=565, loss=0.706916332244873\n", - "Surface training t=566, loss=0.7648134827613831\n", - "Surface training t=567, loss=0.6997851133346558\n", - "Surface training t=568, loss=0.6866654753684998\n", - "Surface training t=569, loss=0.7547954320907593\n", - "Surface training t=570, loss=0.6889091730117798\n", - "Surface training t=571, loss=0.7452556788921356\n", - "Surface training t=572, loss=0.7430039644241333\n", - "Surface training t=573, loss=0.7243649363517761\n", - "Surface training t=574, loss=0.7281731069087982\n", - "Surface training t=575, loss=0.6830784380435944\n", - "Surface training t=576, loss=0.7264013886451721\n", - "Surface training t=577, loss=0.7192461490631104\n", - "Surface training t=578, loss=0.7580182254314423\n", - "Surface training t=579, loss=0.7615674138069153\n", - "Surface training t=580, loss=0.7164229154586792\n", - "Surface training t=581, loss=0.6958622634410858\n", - "Surface training t=582, loss=0.7621951401233673\n", - "Surface training t=583, loss=0.7826796174049377\n", - "Surface training t=584, loss=0.7374458611011505\n", - "Surface training t=585, loss=0.7170924544334412\n", - "Surface training t=586, loss=0.7665317356586456\n", - "Surface training t=587, loss=0.7644174098968506\n", - "Surface training t=588, loss=0.717094749212265\n", - "Surface training t=589, loss=0.698008120059967\n", - "Surface training t=590, loss=0.7201486527919769\n", - "Surface training t=591, loss=0.7125809192657471\n", - "Surface training t=592, loss=0.6788913607597351\n", - "Surface training t=593, loss=0.7416555881500244\n", - "Surface training t=594, loss=0.7244059443473816\n", - "Surface training t=595, loss=0.8055439591407776\n", - "Surface training t=596, loss=0.7110392451286316\n", - "Surface training t=597, loss=0.7275465130805969\n", - "Surface training t=598, loss=0.7316097319126129\n", - "Surface training t=599, loss=0.6738485395908356\n", - "Surface training t=600, loss=0.7033663988113403\n", - "Surface training t=601, loss=0.6726416945457458\n", - "Surface training t=602, loss=0.6742091774940491\n", - "Surface training t=603, loss=0.7290011346340179\n", - "Surface training t=604, loss=0.73021200299263\n", - "Surface training t=605, loss=0.7320062816143036\n", - "Surface training t=606, loss=0.7162272334098816\n", - "Surface training t=607, loss=0.6814646124839783\n", - "Surface training t=608, loss=0.7287317514419556\n", - "Surface training t=609, loss=0.7109494805335999\n", - "Surface training t=610, loss=0.7013536989688873\n", - "Surface training t=611, loss=0.7124360799789429\n", - "Surface training t=612, loss=0.7302539646625519\n", - "Surface training t=613, loss=0.690718024969101\n", - "Surface training t=614, loss=0.7210116982460022\n", - "Surface training t=615, loss=0.7361367046833038\n", - "Surface training t=616, loss=0.7358748912811279\n", - "Surface training t=617, loss=0.7187643945217133\n", - "Surface training t=618, loss=0.7515898644924164\n", - "Surface training t=619, loss=0.7570104897022247\n", - "Surface training t=620, loss=0.7090416550636292\n", - "Surface training t=621, loss=0.7249105870723724\n", - "Surface training t=622, loss=0.7645576298236847\n", - "Surface training t=623, loss=0.7221427261829376\n", - "Surface training t=624, loss=0.7131045162677765\n", - "Surface training t=625, loss=0.7128370702266693\n", - "Surface training t=626, loss=0.7223873734474182\n", - "Surface training t=627, loss=0.6938134729862213\n", - "Surface training t=628, loss=0.7064409852027893\n", - "Surface training t=629, loss=0.6835471987724304\n", - "Surface training t=630, loss=0.6951909065246582\n", - "Surface training t=631, loss=0.7335845232009888\n", - "Surface training t=632, loss=0.7384417951107025\n", - "Surface training t=633, loss=0.7113054990768433\n", - "Surface training t=634, loss=0.7492238581180573\n", - "Surface training t=635, loss=0.7437246739864349\n", - "Surface training t=636, loss=0.7230762541294098\n", - "Surface training t=637, loss=0.6603690385818481\n", - "Surface training t=638, loss=0.7428089380264282\n", - "Surface training t=639, loss=0.6941562294960022\n", - "Surface training t=640, loss=0.7322643995285034\n", - "Surface training t=641, loss=0.7312476933002472\n", - "Surface training t=642, loss=0.6845727264881134\n", - "Surface training t=643, loss=0.7321513891220093\n", - "Surface training t=644, loss=0.7038001716136932\n", - "Surface training t=645, loss=0.7047080099582672\n", - "Surface training t=646, loss=0.6961954534053802\n", - "Surface training t=647, loss=0.6955245137214661\n", - "Surface training t=648, loss=0.706539511680603\n", - "Surface training t=649, loss=0.7325050532817841\n", - "Surface training t=650, loss=0.6885095536708832\n", - "Surface training t=651, loss=0.7163238823413849\n", - "Surface training t=652, loss=0.7262755334377289\n", - "Surface training t=653, loss=0.7317382991313934\n", - "Surface training t=654, loss=0.7233975827693939\n", - "Surface training t=655, loss=0.7682432532310486\n", - "Surface training t=656, loss=0.7688924968242645\n", - "Surface training t=657, loss=0.7178004682064056\n", - "Surface training t=658, loss=0.767298012971878\n", - "Surface training t=659, loss=0.7156640589237213\n", - "Surface training t=660, loss=0.7258177101612091\n", - "Surface training t=661, loss=0.7761835157871246\n", - "Surface training t=662, loss=0.7172451317310333\n", - "Surface training t=663, loss=0.6390997469425201\n", - "Surface training t=664, loss=0.6588055193424225\n", - "Surface training t=665, loss=0.7361533045768738\n", - "Surface training t=666, loss=0.6896987855434418\n", - "Surface training t=667, loss=0.709961324930191\n", - "Surface training t=668, loss=0.737312912940979\n", - "Surface training t=669, loss=0.6744971871376038\n", - "Surface training t=670, loss=0.716353565454483\n", - "Surface training t=671, loss=0.7372320592403412\n", - "Surface training t=672, loss=0.7279776334762573\n", - "Surface training t=673, loss=0.7383294403553009\n", - "Surface training t=674, loss=0.7921074628829956\n", - "Surface training t=675, loss=0.730728268623352\n", - "Surface training t=676, loss=0.7511984407901764\n", - "Surface training t=677, loss=0.7362821698188782\n", - "Surface training t=678, loss=0.7010192573070526\n", - "Surface training t=679, loss=0.7068208158016205\n", - "Surface training t=680, loss=0.7257193028926849\n", - "Surface training t=681, loss=0.7409135401248932\n", - "Surface training t=682, loss=0.6866344213485718\n", - "Surface training t=683, loss=0.6568992733955383\n", - "Surface training t=684, loss=0.7377621531486511\n", - "Surface training t=685, loss=0.7308855950832367\n", - "Surface training t=686, loss=0.7028296291828156\n", - "Surface training t=687, loss=0.7057842314243317\n", - "Surface training t=688, loss=0.7171322405338287\n", - "Surface training t=689, loss=0.7264114320278168\n", - "Surface training t=690, loss=0.7198080122470856\n", - "Surface training t=691, loss=0.7055399715900421\n", - "Surface training t=692, loss=0.6699729263782501\n", - "Surface training t=693, loss=0.7051178216934204\n", - "Surface training t=694, loss=0.7171212732791901\n", - "Surface training t=695, loss=0.7367691397666931\n", - "Surface training t=696, loss=0.697495847940445\n", - "Surface training t=697, loss=0.7199646234512329\n", - "Surface training t=698, loss=0.7132418751716614\n", - "Surface training t=699, loss=0.697679728269577\n", - "Surface training t=700, loss=0.7474850416183472\n", - "Surface training t=701, loss=0.7306180894374847\n", - "Surface training t=702, loss=0.7097681760787964\n", - "Surface training t=703, loss=0.6825599670410156\n", - "Surface training t=704, loss=0.717701643705368\n", - "Surface training t=705, loss=0.6790167093276978\n", - "Surface training t=706, loss=0.7094304859638214\n", - "Surface training t=707, loss=0.7313178181648254\n", - "Surface training t=708, loss=0.6945205330848694\n", - "Surface training t=709, loss=0.7151888608932495\n", - "Surface training t=710, loss=0.6610653698444366\n", - "Surface training t=711, loss=0.7039419114589691\n", - "Surface training t=712, loss=0.7051483988761902\n", - "Surface training t=713, loss=0.7137382924556732\n", - "Surface training t=714, loss=0.6875665485858917\n", - "Surface training t=715, loss=0.6943468451499939\n", - "Surface training t=716, loss=0.7512934803962708\n", - "Surface training t=717, loss=0.6737077534198761\n", - "Surface training t=718, loss=0.6954838633537292\n", - "Surface training t=719, loss=0.7048223316669464\n", - "Surface training t=720, loss=0.7260208427906036\n", - "Surface training t=721, loss=0.7660005390644073\n", - "Surface training t=722, loss=0.6920788288116455\n", - "Surface training t=723, loss=0.7140858471393585\n", - "Surface training t=724, loss=0.6916328966617584\n", - "Surface training t=725, loss=0.730443149805069\n", - "Surface training t=726, loss=0.7407736480236053\n", - "Surface training t=727, loss=0.7278604507446289\n", - "Surface training t=728, loss=0.748895913362503\n", - "Surface training t=729, loss=0.7268277108669281\n", - "Surface training t=730, loss=0.7223813831806183\n", - "Surface training t=731, loss=0.7281583845615387\n", - "Surface training t=732, loss=0.7131029367446899\n", - "Surface training t=733, loss=0.7125158607959747\n", - "Surface training t=734, loss=0.7400671243667603\n", - "Surface training t=735, loss=0.639182448387146\n", - "Surface training t=736, loss=0.7192652523517609\n", - "Surface training t=737, loss=0.7149066627025604\n", - "Surface training t=738, loss=0.7221704423427582\n", - "Surface training t=739, loss=0.7272633016109467\n", - "Surface training t=740, loss=0.7220409214496613\n", - "Surface training t=741, loss=0.7429201602935791\n", - "Surface training t=742, loss=0.7199559807777405\n", - "Surface training t=743, loss=0.7195783257484436\n", - "Surface training t=744, loss=0.6840713620185852\n", - "Surface training t=745, loss=0.6771870255470276\n", - "Surface training t=746, loss=0.6628480851650238\n", - "Surface training t=747, loss=0.7060926258563995\n", - "Surface training t=748, loss=0.7025270760059357\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=749, loss=0.7543063461780548\n", - "Surface training t=750, loss=0.7194603085517883\n", - "Surface training t=751, loss=0.7463642358779907\n", - "Surface training t=752, loss=0.7197608053684235\n", - "Surface training t=753, loss=0.6490496098995209\n", - "Surface training t=754, loss=0.718960702419281\n", - "Surface training t=755, loss=0.6767506003379822\n", - "Surface training t=756, loss=0.7567952573299408\n", - "Surface training t=757, loss=0.6940947473049164\n", - "Surface training t=758, loss=0.7004585564136505\n", - "Surface training t=759, loss=0.731511265039444\n", - "Surface training t=760, loss=0.719746470451355\n", - "Surface training t=761, loss=0.7286590337753296\n", - "Surface training t=762, loss=0.6941852271556854\n", - "Surface training t=763, loss=0.713170737028122\n", - "Surface training t=764, loss=0.6698143482208252\n", - "Surface training t=765, loss=0.7449012100696564\n", - "Surface training t=766, loss=0.6817027926445007\n", - "Surface training t=767, loss=0.7364096641540527\n", - "Surface training t=768, loss=0.7369137108325958\n", - "Surface training t=769, loss=0.745562732219696\n", - "Surface training t=770, loss=0.6996444463729858\n", - "Surface training t=771, loss=0.7023485600948334\n", - "Surface training t=772, loss=0.7252152562141418\n", - "Surface training t=773, loss=0.6606780588626862\n", - "Surface training t=774, loss=0.681865394115448\n", - "Surface training t=775, loss=0.7005645930767059\n", - "Surface training t=776, loss=0.7160609662532806\n", - "Surface training t=777, loss=0.71548131108284\n", - "Surface training t=778, loss=0.7027323544025421\n", - "Surface training t=779, loss=0.7379911243915558\n", - "Surface training t=780, loss=0.6856720447540283\n", - "Surface training t=781, loss=0.7205274105072021\n", - "Surface training t=782, loss=0.6930259168148041\n", - "Surface training t=783, loss=0.6658027768135071\n", - "Surface training t=784, loss=0.7316795885562897\n", - "Surface training t=785, loss=0.7181362509727478\n", - "Surface training t=786, loss=0.7581804096698761\n", - "Surface training t=787, loss=0.7099959850311279\n", - "Surface training t=788, loss=0.6916402280330658\n", - "Surface training t=789, loss=0.7271737158298492\n", - "Surface training t=790, loss=0.6979239583015442\n", - "Surface training t=791, loss=0.7355188131332397\n", - "Surface training t=792, loss=0.7156620025634766\n", - "Surface training t=793, loss=0.6925897002220154\n", - "Surface training t=794, loss=0.7156149446964264\n", - "Surface training t=795, loss=0.7433619797229767\n", - "Surface training t=796, loss=0.6343139111995697\n", - "Surface training t=797, loss=0.6958023607730865\n", - "Surface training t=798, loss=0.6969950497150421\n", - "Surface training t=799, loss=0.7590940594673157\n", - "Surface training t=800, loss=0.6678725481033325\n", - "Surface training t=801, loss=0.7231174111366272\n", - "Surface training t=802, loss=0.7144701182842255\n", - "Surface training t=803, loss=0.7173844277858734\n", - "Surface training t=804, loss=0.7063211798667908\n", - "Surface training t=805, loss=0.6428735554218292\n", - "Surface training t=806, loss=0.7223107516765594\n", - "Surface training t=807, loss=0.721981018781662\n", - "Surface training t=808, loss=0.7050890326499939\n", - "Surface training t=809, loss=0.6994339823722839\n", - "Surface training t=810, loss=0.6763952672481537\n", - "Surface training t=811, loss=0.6902655363082886\n", - "Surface training t=812, loss=0.6973614692687988\n", - "Surface training t=813, loss=0.6612869501113892\n", - "Surface training t=814, loss=0.6915771663188934\n", - "Surface training t=815, loss=0.7157719433307648\n", - "Surface training t=816, loss=0.6702070832252502\n", - "Surface training t=817, loss=0.7414868175983429\n", - "Surface training t=818, loss=0.6885150969028473\n", - "Surface training t=819, loss=0.698404461145401\n", - "Surface training t=820, loss=0.7460609674453735\n", - "Surface training t=821, loss=0.7328645884990692\n", - "Surface training t=822, loss=0.7464238405227661\n", - "Surface training t=823, loss=0.6737407147884369\n", - "Surface training t=824, loss=0.7445998787879944\n", - "Surface training t=825, loss=0.6772380769252777\n", - "Surface training t=826, loss=0.7257062196731567\n", - "Surface training t=827, loss=0.6895685195922852\n", - "Surface training t=828, loss=0.7211990654468536\n", - "Surface training t=829, loss=0.7248699069023132\n", - "Surface training t=830, loss=0.6858735978603363\n", - "Surface training t=831, loss=0.6456812024116516\n", - "Surface training t=832, loss=0.6741281747817993\n", - "Surface training t=833, loss=0.7278043329715729\n", - "Surface training t=834, loss=0.6989783644676208\n", - "Surface training t=835, loss=0.6860463619232178\n", - "Surface training t=836, loss=0.722197949886322\n", - "Surface training t=837, loss=0.6716084480285645\n", - "Surface training t=838, loss=0.709700733423233\n", - "Surface training t=839, loss=0.6583209931850433\n", - "Surface training t=840, loss=0.7345349490642548\n", - "Surface training t=841, loss=0.7042964100837708\n", - "Surface training t=842, loss=0.7051421403884888\n", - "Surface training t=843, loss=0.7610217928886414\n", - "Surface training t=844, loss=0.7213274538516998\n", - "Surface training t=845, loss=0.6770359873771667\n", - "Surface training t=846, loss=0.6838371455669403\n", - "Surface training t=847, loss=0.699505478143692\n", - "Surface training t=848, loss=0.6936376094818115\n", - "Surface training t=849, loss=0.7039848566055298\n", - "Surface training t=850, loss=0.6554263234138489\n", - "Surface training t=851, loss=0.6247846633195877\n", - "Surface training t=852, loss=0.7072729766368866\n", - "Surface training t=853, loss=0.7030320465564728\n", - "Surface training t=854, loss=0.7297440767288208\n", - "Surface training t=855, loss=0.6974423825740814\n", - "Surface training t=856, loss=0.7115783393383026\n", - "Surface training t=857, loss=0.7018674314022064\n", - "Surface training t=858, loss=0.7260897755622864\n", - "Surface training t=859, loss=0.6732245981693268\n", - "Surface training t=860, loss=0.6242143362760544\n", - "Surface training t=861, loss=0.685344398021698\n", - "Surface training t=862, loss=0.6662025153636932\n", - "Surface training t=863, loss=0.7217663526535034\n", - "Surface training t=864, loss=0.6722208559513092\n", - "Surface training t=865, loss=0.7082835137844086\n", - "Surface training t=866, loss=0.6739883124828339\n", - "Surface training t=867, loss=0.6657216846942902\n", - "Surface training t=868, loss=0.6877162754535675\n", - "Surface training t=869, loss=0.661388099193573\n", - "Surface training t=870, loss=0.6886064410209656\n", - "Surface training t=871, loss=0.6927618980407715\n", - "Surface training t=872, loss=0.7046584486961365\n", - "Surface training t=873, loss=0.7365764379501343\n", - "Surface training t=874, loss=0.6816737055778503\n", - "Surface training t=875, loss=0.714830070734024\n", - "Surface training t=876, loss=0.7367116212844849\n", - "Surface training t=877, loss=0.7114833295345306\n", - "Surface training t=878, loss=0.7035405933856964\n", - "Surface training t=879, loss=0.7049341201782227\n", - "Surface training t=880, loss=0.7019051313400269\n", - "Surface training t=881, loss=0.7082690000534058\n", - "Surface training t=882, loss=0.7504141926765442\n", - "Surface training t=883, loss=0.7663953006267548\n", - "Surface training t=884, loss=0.6959900856018066\n", - "Surface training t=885, loss=0.6613894999027252\n", - "Surface training t=886, loss=0.7334234714508057\n", - "Surface training t=887, loss=0.7444504499435425\n", - "Surface training t=888, loss=0.7145305573940277\n", - "Surface training t=889, loss=0.724106639623642\n", - "Surface training t=890, loss=0.6573009788990021\n", - "Surface training t=891, loss=0.7044869065284729\n", - "Surface training t=892, loss=0.6710876226425171\n", - "Surface training t=893, loss=0.7252714335918427\n", - "Surface training t=894, loss=0.7536189556121826\n", - "Surface training t=895, loss=0.717133104801178\n", - "Surface training t=896, loss=0.7106867134571075\n", - "Surface training t=897, loss=0.6434835195541382\n", - "Surface training t=898, loss=0.6709438264369965\n", - "Surface training t=899, loss=0.7122803926467896\n", - "Surface training t=900, loss=0.7295309007167816\n", - "Surface training t=901, loss=0.6276375651359558\n", - "Surface training t=902, loss=0.6939356923103333\n", - "Surface training t=903, loss=0.6621975302696228\n", - "Surface training t=904, loss=0.726634681224823\n", - "Surface training t=905, loss=0.7003622949123383\n", - "Surface training t=906, loss=0.6597552597522736\n", - "Surface training t=907, loss=0.6949501931667328\n", - "Surface training t=908, loss=0.7264885008335114\n", - "Surface training t=909, loss=0.6257642507553101\n", - "Surface training t=910, loss=0.6359628140926361\n", - "Surface training t=911, loss=0.7042997181415558\n", - "Surface training t=912, loss=0.7134723663330078\n", - "Surface training t=913, loss=0.7309727966785431\n", - "Surface training t=914, loss=0.6972938179969788\n", - "Surface training t=915, loss=0.6806232631206512\n", - "Surface training t=916, loss=0.6505230367183685\n", - "Surface training t=917, loss=0.7306593954563141\n", - "Surface training t=918, loss=0.6778813898563385\n", - "Surface training t=919, loss=0.6735821068286896\n", - "Surface training t=920, loss=0.7467707097530365\n", - "Surface training t=921, loss=0.6429785490036011\n", - "Surface training t=922, loss=0.6966996192932129\n", - "Surface training t=923, loss=0.6880089938640594\n", - "Surface training t=924, loss=0.6997828483581543\n", - "Surface training t=925, loss=0.6953350603580475\n", - "Surface training t=926, loss=0.6392444670200348\n", - "Surface training t=927, loss=0.6705161929130554\n", - "Surface training t=928, loss=0.7205768525600433\n", - "Surface training t=929, loss=0.7035701274871826\n", - "Surface training t=930, loss=0.6975596845149994\n", - "Surface training t=931, loss=0.7066766917705536\n", - "Surface training t=932, loss=0.6730194091796875\n", - "Surface training t=933, loss=0.7085219323635101\n", - "Surface training t=934, loss=0.7065645754337311\n", - "Surface training t=935, loss=0.7064596116542816\n", - "Surface training t=936, loss=0.7196711599826813\n", - "Surface training t=937, loss=0.7117655575275421\n", - "Surface training t=938, loss=0.6817173361778259\n", - "Surface training t=939, loss=0.6831760704517365\n", - "Surface training t=940, loss=0.7369321882724762\n", - "Surface training t=941, loss=0.733904242515564\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=942, loss=0.6883070468902588\n", - "Surface training t=943, loss=0.7097288966178894\n", - "Surface training t=944, loss=0.6895450353622437\n", - "Surface training t=945, loss=0.6643058657646179\n", - "Surface training t=946, loss=0.7045563459396362\n", - "Surface training t=947, loss=0.7344096302986145\n", - "Surface training t=948, loss=0.7063700556755066\n", - "Surface training t=949, loss=0.7125669717788696\n", - "Surface training t=950, loss=0.6978897750377655\n", - "Surface training t=951, loss=0.7154608368873596\n", - "Surface training t=952, loss=0.6837054193019867\n", - "Surface training t=953, loss=0.736769437789917\n", - "Surface training t=954, loss=0.702972948551178\n", - "Surface training t=955, loss=0.6433483064174652\n", - "Surface training t=956, loss=0.6924918293952942\n", - "Surface training t=957, loss=0.7022729218006134\n", - "Surface training t=958, loss=0.6861046850681305\n", - "Surface training t=959, loss=0.6925418674945831\n", - "Surface training t=960, loss=0.7304800748825073\n", - "Surface training t=961, loss=0.6652023792266846\n", - "Surface training t=962, loss=0.6501529514789581\n", - "Surface training t=963, loss=0.7224452793598175\n", - "Surface training t=964, loss=0.7229047119617462\n", - "Surface training t=965, loss=0.7302038967609406\n", - "Surface training t=966, loss=0.6698238253593445\n", - "Surface training t=967, loss=0.7042911350727081\n", - "Surface training t=968, loss=0.6913245618343353\n", - "Surface training t=969, loss=0.6765964925289154\n", - "Surface training t=970, loss=0.7049328684806824\n", - "Surface training t=971, loss=0.6676728129386902\n", - "Surface training t=972, loss=0.6922657191753387\n", - "Surface training t=973, loss=0.6894004344940186\n", - "Surface training t=974, loss=0.7228500545024872\n", - "Surface training t=975, loss=0.7497037351131439\n", - "Surface training t=976, loss=0.7136208415031433\n", - "Surface training t=977, loss=0.6763185262680054\n", - "Surface training t=978, loss=0.726411908864975\n", - "Surface training t=979, loss=0.7041929364204407\n", - "Surface training t=980, loss=0.6936042904853821\n", - "Surface training t=981, loss=0.7127459645271301\n", - "Surface training t=982, loss=0.6977021396160126\n", - "Surface training t=983, loss=0.7019914090633392\n", - "Surface training t=984, loss=0.7036579549312592\n", - "Surface training t=985, loss=0.6820262968540192\n", - "Surface training t=986, loss=0.6903435587882996\n", - "Surface training t=987, loss=0.6731123626232147\n", - "Surface training t=988, loss=0.6699405908584595\n", - "Surface training t=989, loss=0.6927416026592255\n", - "Surface training t=990, loss=0.6640583574771881\n", - "Surface training t=991, loss=0.7109065651893616\n", - "Surface training t=992, loss=0.7418050765991211\n", - "Surface training t=993, loss=0.6721242666244507\n", - "Surface training t=994, loss=0.6950087547302246\n", - "Surface training t=995, loss=0.6750801503658295\n", - "Surface training t=996, loss=0.6377302408218384\n", - "Surface training t=997, loss=0.7425239384174347\n", - "Surface training t=998, loss=0.7111711800098419\n", - "Surface training t=999, loss=0.6869493424892426\n", - "Surface training t=1000, loss=0.7180209755897522\n", - "Surface training t=1001, loss=0.7055557072162628\n", - "Surface training t=1002, loss=0.6950848400592804\n", - "Surface training t=1003, loss=0.6584821939468384\n", - "Surface training t=1004, loss=0.7264292240142822\n", - "Surface training t=1005, loss=0.6788455545902252\n", - "Surface training t=1006, loss=0.6448472738265991\n", - "Surface training t=1007, loss=0.6812686324119568\n", - "Surface training t=1008, loss=0.6958890855312347\n", - "Surface training t=1009, loss=0.678543895483017\n", - "Surface training t=1010, loss=0.718345046043396\n", - "Surface training t=1011, loss=0.6247154176235199\n", - "Surface training t=1012, loss=0.6820748150348663\n", - "Surface training t=1013, loss=0.6850292086601257\n", - "Surface training t=1014, loss=0.682645708322525\n", - "Surface training t=1015, loss=0.6577948033809662\n", - "Surface training t=1016, loss=0.6906583607196808\n", - "Surface training t=1017, loss=0.7317796945571899\n", - "Surface training t=1018, loss=0.7157589793205261\n", - "Surface training t=1019, loss=0.6833800077438354\n", - "Surface training t=1020, loss=0.6934943199157715\n", - "Surface training t=1021, loss=0.724451333284378\n", - "Surface training t=1022, loss=0.6733899116516113\n", - "Surface training t=1023, loss=0.7203476428985596\n", - "Surface training t=1024, loss=0.6905409693717957\n", - "Surface training t=1025, loss=0.6703505218029022\n", - "Surface training t=1026, loss=0.684205025434494\n", - "Surface training t=1027, loss=0.7012571692466736\n", - "Surface training t=1028, loss=0.7501386404037476\n", - "Surface training t=1029, loss=0.6951741576194763\n", - "Surface training t=1030, loss=0.7127212285995483\n", - "Surface training t=1031, loss=0.6923122107982635\n", - "Surface training t=1032, loss=0.7006103992462158\n", - "Surface training t=1033, loss=0.7424720525741577\n", - "Surface training t=1034, loss=0.6735334694385529\n", - "Surface training t=1035, loss=0.6910350620746613\n", - "Surface training t=1036, loss=0.6839320659637451\n", - "Surface training t=1037, loss=0.6575536131858826\n", - "Surface training t=1038, loss=0.6720835864543915\n", - "Surface training t=1039, loss=0.7692006826400757\n", - "Surface training t=1040, loss=0.6397226452827454\n", - "Surface training t=1041, loss=0.6778115630149841\n", - "Surface training t=1042, loss=0.6932428479194641\n", - "Surface training t=1043, loss=0.7050404250621796\n", - "Surface training t=1044, loss=0.6900419294834137\n", - "Surface training t=1045, loss=0.705246090888977\n", - "Surface training t=1046, loss=0.6574556231498718\n", - "Surface training t=1047, loss=0.6896183490753174\n", - "Surface training t=1048, loss=0.6849222183227539\n", - "Surface training t=1049, loss=0.6790200173854828\n", - "Surface training t=1050, loss=0.7250482439994812\n", - "Surface training t=1051, loss=0.6751938462257385\n", - "Surface training t=1052, loss=0.6538428068161011\n", - "Surface training t=1053, loss=0.6833003461360931\n", - "Surface training t=1054, loss=0.6630050837993622\n", - "Surface training t=1055, loss=0.6859205961227417\n", - "Surface training t=1056, loss=0.7172695100307465\n", - "Surface training t=1057, loss=0.6644984781742096\n", - "Surface training t=1058, loss=0.6877329349517822\n", - "Surface training t=1059, loss=0.6777876913547516\n", - "Surface training t=1060, loss=0.6717898845672607\n", - "Surface training t=1061, loss=0.6394211053848267\n", - "Surface training t=1062, loss=0.6683301627635956\n", - "Surface training t=1063, loss=0.6787392795085907\n", - "Surface training t=1064, loss=0.6573807597160339\n", - "Surface training t=1065, loss=0.6995311677455902\n", - "Surface training t=1066, loss=0.7170377373695374\n", - "Surface training t=1067, loss=0.675233006477356\n", - "Surface training t=1068, loss=0.6754898130893707\n", - "Surface training t=1069, loss=0.6852890849113464\n", - "Surface training t=1070, loss=0.6326330304145813\n", - "Surface training t=1071, loss=0.6960288882255554\n", - "Surface training t=1072, loss=0.6929100751876831\n", - "Surface training t=1073, loss=0.6749621629714966\n", - "Surface training t=1074, loss=0.6524979174137115\n", - "Surface training t=1075, loss=0.6650071144104004\n", - "Surface training t=1076, loss=0.6878277063369751\n", - "Surface training t=1077, loss=0.6175493001937866\n", - "Surface training t=1078, loss=0.6348842084407806\n", - "Surface training t=1079, loss=0.6711897253990173\n", - "Surface training t=1080, loss=0.6558950841426849\n", - "Surface training t=1081, loss=0.6567469835281372\n", - "Surface training t=1082, loss=0.6420493125915527\n", - "Surface training t=1083, loss=0.715196281671524\n", - "Surface training t=1084, loss=0.6296580135822296\n", - "Surface training t=1085, loss=0.6504335105419159\n", - "Surface training t=1086, loss=0.6771742403507233\n", - "Surface training t=1087, loss=0.6978764832019806\n", - "Surface training t=1088, loss=0.6689033508300781\n", - "Surface training t=1089, loss=0.6426859200000763\n", - "Surface training t=1090, loss=0.7082830667495728\n", - "Surface training t=1091, loss=0.6792106926441193\n", - "Surface training t=1092, loss=0.6504223346710205\n", - "Surface training t=1093, loss=0.709683895111084\n", - "Surface training t=1094, loss=0.7190245985984802\n", - "Surface training t=1095, loss=0.6753195524215698\n", - "Surface training t=1096, loss=0.6548386216163635\n", - "Surface training t=1097, loss=0.7234072387218475\n", - "Surface training t=1098, loss=0.7324091494083405\n", - "Surface training t=1099, loss=0.6913995742797852\n", - "Surface training t=1100, loss=0.6487596333026886\n", - "Surface training t=1101, loss=0.6640420258045197\n", - "Surface training t=1102, loss=0.6729129552841187\n", - "Surface training t=1103, loss=0.6575277149677277\n", - "Surface training t=1104, loss=0.7010212242603302\n", - "Surface training t=1105, loss=0.6671848297119141\n", - "Surface training t=1106, loss=0.6866084635257721\n", - "Surface training t=1107, loss=0.6665715873241425\n", - "Surface training t=1108, loss=0.6222450435161591\n", - "Surface training t=1109, loss=0.6572255492210388\n", - "Surface training t=1110, loss=0.6247903406620026\n", - "Surface training t=1111, loss=0.6860941648483276\n", - "Surface training t=1112, loss=0.6846833229064941\n", - "Surface training t=1113, loss=0.6997367441654205\n", - "Surface training t=1114, loss=0.6372667551040649\n", - "Surface training t=1115, loss=0.705870509147644\n", - "Surface training t=1116, loss=0.6891512274742126\n", - "Surface training t=1117, loss=0.6580718457698822\n", - "Surface training t=1118, loss=0.6836693286895752\n", - "Surface training t=1119, loss=0.6635927557945251\n", - "Surface training t=1120, loss=0.671014815568924\n", - "Surface training t=1121, loss=0.643324226140976\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=1122, loss=0.7075830101966858\n", - "Surface training t=1123, loss=0.6628279685974121\n", - "Surface training t=1124, loss=0.6975196897983551\n", - "Surface training t=1125, loss=0.7008666396141052\n", - "Surface training t=1126, loss=0.6622483730316162\n", - "Surface training t=1127, loss=0.7012901306152344\n", - "Surface training t=1128, loss=0.7001358866691589\n", - "Surface training t=1129, loss=0.694210022687912\n", - "Surface training t=1130, loss=0.6750964522361755\n", - "Surface training t=1131, loss=0.7082045078277588\n", - "Surface training t=1132, loss=0.6631987690925598\n", - "Surface training t=1133, loss=0.655221700668335\n", - "Surface training t=1134, loss=0.6902758777141571\n", - "Surface training t=1135, loss=0.6910734474658966\n", - "Surface training t=1136, loss=0.6668674051761627\n", - "Surface training t=1137, loss=0.7298831939697266\n", - "Surface training t=1138, loss=0.7263806760311127\n", - "Surface training t=1139, loss=0.651524007320404\n", - "Surface training t=1140, loss=0.7008337676525116\n", - "Surface training t=1141, loss=0.6203329265117645\n", - "Surface training t=1142, loss=0.6630546450614929\n", - "Surface training t=1143, loss=0.6686806082725525\n", - "Surface training t=1144, loss=0.6528687179088593\n", - "Surface training t=1145, loss=0.7186869084835052\n", - "Surface training t=1146, loss=0.6973227262496948\n", - "Surface training t=1147, loss=0.6867348253726959\n", - "Surface training t=1148, loss=0.6844745576381683\n", - "Surface training t=1149, loss=0.6696422398090363\n", - "Surface training t=1150, loss=0.6601164937019348\n", - "Surface training t=1151, loss=0.6384834945201874\n", - "Surface training t=1152, loss=0.6620150804519653\n", - "Surface training t=1153, loss=0.6510109603404999\n", - "Surface training t=1154, loss=0.6617465019226074\n", - "Surface training t=1155, loss=0.6885905265808105\n", - "Surface training t=1156, loss=0.6939286589622498\n", - "Surface training t=1157, loss=0.6461641490459442\n", - "Surface training t=1158, loss=0.6386644542217255\n", - "Surface training t=1159, loss=0.6656429469585419\n", - "Surface training t=1160, loss=0.6572025716304779\n", - "Surface training t=1161, loss=0.7286104559898376\n", - "Surface training t=1162, loss=0.6540512144565582\n", - "Surface training t=1163, loss=0.686859518289566\n", - "Surface training t=1164, loss=0.6783392131328583\n", - "Surface training t=1165, loss=0.6947443187236786\n", - "Surface training t=1166, loss=0.6880854666233063\n", - "Surface training t=1167, loss=0.666443943977356\n", - "Surface training t=1168, loss=0.6729408204555511\n", - "Surface training t=1169, loss=0.6639948785305023\n", - "Surface training t=1170, loss=0.6917353272438049\n", - "Surface training t=1171, loss=0.7110790312290192\n", - "Surface training t=1172, loss=0.6686237454414368\n", - "Surface training t=1173, loss=0.650213897228241\n", - "Surface training t=1174, loss=0.6909910440444946\n", - "Surface training t=1175, loss=0.6927328407764435\n", - "Surface training t=1176, loss=0.6307791471481323\n", - "Surface training t=1177, loss=0.7006763815879822\n", - "Surface training t=1178, loss=0.6862099468708038\n", - "Surface training t=1179, loss=0.7247652411460876\n", - "Surface training t=1180, loss=0.669969916343689\n", - "Surface training t=1181, loss=0.6664757430553436\n", - "Surface training t=1182, loss=0.655024379491806\n", - "Surface training t=1183, loss=0.6919271647930145\n", - "Surface training t=1184, loss=0.6393730938434601\n", - "Surface training t=1185, loss=0.6812922954559326\n", - "Surface training t=1186, loss=0.7157495319843292\n", - "Surface training t=1187, loss=0.6431835889816284\n", - "Surface training t=1188, loss=0.6571968495845795\n", - "Surface training t=1189, loss=0.5983819514513016\n", - "Surface training t=1190, loss=0.663386881351471\n", - "Surface training t=1191, loss=0.641249269247055\n", - "Surface training t=1192, loss=0.6606305837631226\n", - "Surface training t=1193, loss=0.6417627930641174\n", - "Surface training t=1194, loss=0.6522400081157684\n", - "Surface training t=1195, loss=0.6772884726524353\n", - "Surface training t=1196, loss=0.6735355854034424\n", - "Surface training t=1197, loss=0.6706971824169159\n", - "Surface training t=1198, loss=0.6860787272453308\n", - "Surface training t=1199, loss=0.6616505682468414\n", - "Surface training t=1200, loss=0.6307619214057922\n", - "Surface training t=1201, loss=0.6663891971111298\n", - "Surface training t=1202, loss=0.6584620177745819\n", - "Surface training t=1203, loss=0.6446681618690491\n", - "Surface training t=1204, loss=0.6522848010063171\n", - "Surface training t=1205, loss=0.687468945980072\n", - "Surface training t=1206, loss=0.6709845960140228\n", - "Surface training t=1207, loss=0.7025942802429199\n", - "Surface training t=1208, loss=0.599884957075119\n", - "Surface training t=1209, loss=0.7120988070964813\n", - "Surface training t=1210, loss=0.6340690553188324\n", - "Surface training t=1211, loss=0.6836814880371094\n", - "Surface training t=1212, loss=0.6305160820484161\n", - "Surface training t=1213, loss=0.6400548815727234\n", - "Surface training t=1214, loss=0.6831071674823761\n", - "Surface training t=1215, loss=0.6458635628223419\n", - "Surface training t=1216, loss=0.6270315945148468\n", - "Surface training t=1217, loss=0.6234356164932251\n", - "Surface training t=1218, loss=0.6760910153388977\n", - "Surface training t=1219, loss=0.6395479440689087\n", - "Surface training t=1220, loss=0.663374662399292\n", - "Surface training t=1221, loss=0.653458833694458\n", - "Surface training t=1222, loss=0.6732891201972961\n", - "Surface training t=1223, loss=0.6670283675193787\n", - "Surface training t=1224, loss=0.5784599781036377\n", - "Surface training t=1225, loss=0.6835560202598572\n", - "Surface training t=1226, loss=0.6502988040447235\n", - "Surface training t=1227, loss=0.6670026481151581\n", - "Surface training t=1228, loss=0.6321229338645935\n", - "Surface training t=1229, loss=0.6318168640136719\n", - "Surface training t=1230, loss=0.6569034457206726\n", - "Surface training t=1231, loss=0.7115351557731628\n", - "Surface training t=1232, loss=0.607630580663681\n", - "Surface training t=1233, loss=0.6395075917243958\n", - "Surface training t=1234, loss=0.7037356197834015\n", - "Surface training t=1235, loss=0.631188690662384\n", - "Surface training t=1236, loss=0.6541767418384552\n", - "Surface training t=1237, loss=0.6619177758693695\n", - "Surface training t=1238, loss=0.6467519700527191\n", - "Surface training t=1239, loss=0.6934819519519806\n", - "Surface training t=1240, loss=0.672546923160553\n", - "Surface training t=1241, loss=0.6248534023761749\n", - "Surface training t=1242, loss=0.5748581141233444\n", - "Surface training t=1243, loss=0.6615704894065857\n", - "Surface training t=1244, loss=0.6432574689388275\n", - "Surface training t=1245, loss=0.660497784614563\n", - "Surface training t=1246, loss=0.6663821339607239\n", - "Surface training t=1247, loss=0.6371552050113678\n", - "Surface training t=1248, loss=0.6792635917663574\n", - "Surface training t=1249, loss=0.6389552056789398\n", - "Surface training t=1250, loss=0.6396661698818207\n", - "Surface training t=1251, loss=0.6745524406433105\n", - "Surface training t=1252, loss=0.5947019308805466\n", - "Surface training t=1253, loss=0.6591916084289551\n", - "Surface training t=1254, loss=0.6936317384243011\n", - "Surface training t=1255, loss=0.6604065597057343\n", - "Surface training t=1256, loss=0.7030053436756134\n", - "Surface training t=1257, loss=0.5951824188232422\n", - "Surface training t=1258, loss=0.6606370210647583\n", - "Surface training t=1259, loss=0.6289448738098145\n", - "Surface training t=1260, loss=0.652921050786972\n", - "Surface training t=1261, loss=0.6444889008998871\n", - "Surface training t=1262, loss=0.6737412810325623\n", - "Surface training t=1263, loss=0.615460604429245\n", - "Surface training t=1264, loss=0.6458952128887177\n", - "Surface training t=1265, loss=0.6463958621025085\n", - "Surface training t=1266, loss=0.6767255961894989\n", - "Surface training t=1267, loss=0.6648136377334595\n", - "Surface training t=1268, loss=0.6399444341659546\n", - "Surface training t=1269, loss=0.6208926439285278\n", - "Surface training t=1270, loss=0.6512565910816193\n", - "Surface training t=1271, loss=0.6201372146606445\n", - "Surface training t=1272, loss=0.678394615650177\n", - "Surface training t=1273, loss=0.6767788529396057\n", - "Surface training t=1274, loss=0.655029684305191\n", - "Surface training t=1275, loss=0.6827405393123627\n", - "Surface training t=1276, loss=0.6225075423717499\n", - "Surface training t=1277, loss=0.7193161845207214\n", - "Surface training t=1278, loss=0.6508383452892303\n", - "Surface training t=1279, loss=0.6242869794368744\n", - "Surface training t=1280, loss=0.672045886516571\n", - "Surface training t=1281, loss=0.6954369843006134\n", - "Surface training t=1282, loss=0.6526513397693634\n", - "Surface training t=1283, loss=0.612136721611023\n", - "Surface training t=1284, loss=0.6354774832725525\n", - "Surface training t=1285, loss=0.6054664254188538\n", - "Surface training t=1286, loss=0.6406698524951935\n", - "Surface training t=1287, loss=0.6292072236537933\n", - "Surface training t=1288, loss=0.698925644159317\n", - "Surface training t=1289, loss=0.6244089901447296\n", - "Surface training t=1290, loss=0.6891805529594421\n", - "Surface training t=1291, loss=0.6551369428634644\n", - "Surface training t=1292, loss=0.7123220264911652\n", - "Surface training t=1293, loss=0.6881983280181885\n", - "Surface training t=1294, loss=0.6276598572731018\n", - "Surface training t=1295, loss=0.6488191485404968\n", - "Surface training t=1296, loss=0.6301617622375488\n", - "Surface training t=1297, loss=0.6713285148143768\n", - "Surface training t=1298, loss=0.6333914399147034\n", - "Surface training t=1299, loss=0.6676086187362671\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=1300, loss=0.6603861153125763\n", - "Surface training t=1301, loss=0.6215182542800903\n", - "Surface training t=1302, loss=0.6846041083335876\n", - "Surface training t=1303, loss=0.625732958316803\n", - "Surface training t=1304, loss=0.7239500284194946\n", - "Surface training t=1305, loss=0.6646137833595276\n", - "Surface training t=1306, loss=0.6074694097042084\n", - "Surface training t=1307, loss=0.6691764295101166\n", - "Surface training t=1308, loss=0.6534204483032227\n", - "Surface training t=1309, loss=0.6594311594963074\n", - "Surface training t=1310, loss=0.5915285497903824\n", - "Surface training t=1311, loss=0.6616460978984833\n", - "Surface training t=1312, loss=0.6456650197505951\n", - "Surface training t=1313, loss=0.685455858707428\n", - "Surface training t=1314, loss=0.6400328278541565\n", - "Surface training t=1315, loss=0.6523101031780243\n", - "Surface training t=1316, loss=0.6646439731121063\n", - "Surface training t=1317, loss=0.6451307237148285\n", - "Surface training t=1318, loss=0.6849786341190338\n", - "Surface training t=1319, loss=0.6363668143749237\n", - "Surface training t=1320, loss=0.6163534224033356\n", - "Surface training t=1321, loss=0.6460579633712769\n", - "Surface training t=1322, loss=0.6236027777194977\n", - "Surface training t=1323, loss=0.6420817077159882\n", - "Surface training t=1324, loss=0.628928154706955\n", - "Surface training t=1325, loss=0.6582735776901245\n", - "Surface training t=1326, loss=0.6142328977584839\n", - "Surface training t=1327, loss=0.6217162609100342\n", - "Surface training t=1328, loss=0.6182471513748169\n", - "Surface training t=1329, loss=0.6235172748565674\n", - "Surface training t=1330, loss=0.6533673107624054\n", - "Surface training t=1331, loss=0.6535628437995911\n", - "Surface training t=1332, loss=0.6801361739635468\n", - "Surface training t=1333, loss=0.6643293499946594\n", - "Surface training t=1334, loss=0.5989289283752441\n", - "Surface training t=1335, loss=0.6119298934936523\n", - "Surface training t=1336, loss=0.6293862462043762\n", - "Surface training t=1337, loss=0.6328511834144592\n", - "Surface training t=1338, loss=0.7099388539791107\n", - "Surface training t=1339, loss=0.602483481168747\n", - "Surface training t=1340, loss=0.6020293831825256\n", - "Surface training t=1341, loss=0.6365745961666107\n", - "Surface training t=1342, loss=0.6390696167945862\n", - "Surface training t=1343, loss=0.6771174669265747\n", - "Surface training t=1344, loss=0.6101353168487549\n", - "Surface training t=1345, loss=0.655613899230957\n", - "Surface training t=1346, loss=0.6117248833179474\n", - "Surface training t=1347, loss=0.6435916423797607\n", - "Surface training t=1348, loss=0.6514075398445129\n", - "Surface training t=1349, loss=0.6723738014698029\n", - "Surface training t=1350, loss=0.6762326955795288\n", - "Surface training t=1351, loss=0.6452055275440216\n", - "Surface training t=1352, loss=0.6740718483924866\n", - "Surface training t=1353, loss=0.6447959542274475\n", - "Surface training t=1354, loss=0.6261891424655914\n", - "Surface training t=1355, loss=0.6226188242435455\n", - "Surface training t=1356, loss=0.6467366516590118\n", - "Surface training t=1357, loss=0.614318460226059\n", - "Surface training t=1358, loss=0.6037726700305939\n", - "Surface training t=1359, loss=0.6718493103981018\n", - "Surface training t=1360, loss=0.6347018182277679\n", - "Surface training t=1361, loss=0.596694678068161\n", - "Surface training t=1362, loss=0.6583932638168335\n", - "Surface training t=1363, loss=0.6254995167255402\n", - "Surface training t=1364, loss=0.6395244002342224\n", - "Surface training t=1365, loss=0.6442410945892334\n", - "Surface training t=1366, loss=0.6209297180175781\n", - "Surface training t=1367, loss=0.5873103439807892\n", - "Surface training t=1368, loss=0.6717439889907837\n", - "Surface training t=1369, loss=0.5814811438322067\n", - "Surface training t=1370, loss=0.6347451210021973\n", - "Surface training t=1371, loss=0.5976554453372955\n", - "Surface training t=1372, loss=0.6001757085323334\n", - "Surface training t=1373, loss=0.6305055916309357\n", - "Surface training t=1374, loss=0.6660210490226746\n", - "Surface training t=1375, loss=0.6318836212158203\n", - "Surface training t=1376, loss=0.607777327299118\n", - "Surface training t=1377, loss=0.6415330767631531\n", - "Surface training t=1378, loss=0.6094745397567749\n", - "Surface training t=1379, loss=0.6414063274860382\n", - "Surface training t=1380, loss=0.6859756112098694\n", - "Surface training t=1381, loss=0.6196469962596893\n", - "Surface training t=1382, loss=0.6083597242832184\n", - "Surface training t=1383, loss=0.6437655687332153\n", - "Surface training t=1384, loss=0.6411689519882202\n", - "Surface training t=1385, loss=0.6179415285587311\n", - "Surface training t=1386, loss=0.6480084359645844\n", - "Surface training t=1387, loss=0.6324549615383148\n", - "Surface training t=1388, loss=0.6592339873313904\n", - "Surface training t=1389, loss=0.5950922667980194\n", - "Surface training t=1390, loss=0.6426184177398682\n", - "Surface training t=1391, loss=0.5942370593547821\n", - "Surface training t=1392, loss=0.6434265971183777\n", - "Surface training t=1393, loss=0.6577836275100708\n", - "Surface training t=1394, loss=0.6854157149791718\n", - "Surface training t=1395, loss=0.6253171563148499\n", - "Surface training t=1396, loss=0.6162525415420532\n", - "Surface training t=1397, loss=0.572499543428421\n", - "Surface training t=1398, loss=0.627261221408844\n", - "Surface training t=1399, loss=0.6211438179016113\n", - "Surface training t=1400, loss=0.6077205538749695\n", - "Surface training t=1401, loss=0.6506679356098175\n", - "Surface training t=1402, loss=0.6103848516941071\n", - "Surface training t=1403, loss=0.6838327646255493\n", - "Surface training t=1404, loss=0.5655101239681244\n", - "Surface training t=1405, loss=0.596840113401413\n", - "Surface training t=1406, loss=0.6313133239746094\n", - "Surface training t=1407, loss=0.6156461238861084\n", - "Surface training t=1408, loss=0.6337225139141083\n", - "Surface training t=1409, loss=0.6423831880092621\n", - "Surface training t=1410, loss=0.5881217420101166\n", - "Surface training t=1411, loss=0.6246346235275269\n", - "Surface training t=1412, loss=0.5770542621612549\n", - "Surface training t=1413, loss=0.6633304953575134\n", - "Surface training t=1414, loss=0.6344187259674072\n", - "Surface training t=1415, loss=0.598046064376831\n", - "Surface training t=1416, loss=0.6247725784778595\n", - "Surface training t=1417, loss=0.6133538782596588\n", - "Surface training t=1418, loss=0.6641361713409424\n", - "Surface training t=1419, loss=0.6069384813308716\n", - "Surface training t=1420, loss=0.6253416538238525\n", - "Surface training t=1421, loss=0.6260331869125366\n", - "Surface training t=1422, loss=0.653234988451004\n", - "Surface training t=1423, loss=0.5723119229078293\n", - "Surface training t=1424, loss=0.6084069907665253\n", - "Surface training t=1425, loss=0.5965034365653992\n", - "Surface training t=1426, loss=0.5929324328899384\n", - "Surface training t=1427, loss=0.5930534303188324\n", - "Surface training t=1428, loss=0.5899311900138855\n", - "Surface training t=1429, loss=0.6183781027793884\n", - "Surface training t=1430, loss=0.6129874885082245\n", - "Surface training t=1431, loss=0.6554121673107147\n", - "Surface training t=1432, loss=0.6485585272312164\n", - "Surface training t=1433, loss=0.6322175860404968\n", - "Surface training t=1434, loss=0.6629759073257446\n", - "Surface training t=1435, loss=0.6034106910228729\n", - "Surface training t=1436, loss=0.5904211401939392\n", - "Surface training t=1437, loss=0.6198680400848389\n", - "Surface training t=1438, loss=0.6011269092559814\n", - "Surface training t=1439, loss=0.6052214205265045\n", - "Surface training t=1440, loss=0.6363197565078735\n", - "Surface training t=1441, loss=0.622616320848465\n", - "Surface training t=1442, loss=0.6346160471439362\n", - "Surface training t=1443, loss=0.657717376947403\n", - "Surface training t=1444, loss=0.5867609083652496\n", - "Surface training t=1445, loss=0.577114075422287\n", - "Surface training t=1446, loss=0.616787314414978\n", - "Surface training t=1447, loss=0.6112343966960907\n", - "Surface training t=1448, loss=0.5894627273082733\n", - "Surface training t=1449, loss=0.5985011458396912\n", - "Surface training t=1450, loss=0.624166876077652\n", - "Surface training t=1451, loss=0.6214512884616852\n", - "Surface training t=1452, loss=0.6438569724559784\n", - "Surface training t=1453, loss=0.6844114661216736\n", - "Surface training t=1454, loss=0.6033623218536377\n", - "Surface training t=1455, loss=0.6059119701385498\n", - "Surface training t=1456, loss=0.6420363485813141\n", - "Surface training t=1457, loss=0.6185729503631592\n", - "Surface training t=1458, loss=0.5946282744407654\n", - "Surface training t=1459, loss=0.6950341165065765\n", - "Surface training t=1460, loss=0.5916966795921326\n", - "Surface training t=1461, loss=0.5611128211021423\n", - "Surface training t=1462, loss=0.5852953493595123\n", - "Surface training t=1463, loss=0.6101351380348206\n", - "Surface training t=1464, loss=0.6130150854587555\n", - "Surface training t=1465, loss=0.5958472192287445\n", - "Surface training t=1466, loss=0.6162252426147461\n", - "Surface training t=1467, loss=0.628896951675415\n", - "Surface training t=1468, loss=0.6270131170749664\n", - "Surface training t=1469, loss=0.5816408693790436\n", - "Surface training t=1470, loss=0.5685837268829346\n", - "Surface training t=1471, loss=0.5458214432001114\n", - "Surface training t=1472, loss=0.6116551160812378\n", - "Surface training t=1473, loss=0.6085171699523926\n", - "Surface training t=1474, loss=0.5904819071292877\n", - "Surface training t=1475, loss=0.5873957872390747\n", - "Surface training t=1476, loss=0.6647518873214722\n", - "Surface training t=1477, loss=0.6355721056461334\n", - "Surface training t=1478, loss=0.6084147393703461\n", - "Surface training t=1479, loss=0.546796441078186\n", - "Surface training t=1480, loss=0.5876783132553101\n", - "Surface training t=1481, loss=0.61070317029953\n", - "Surface training t=1482, loss=0.5962350070476532\n", - "Surface training t=1483, loss=0.5637257397174835\n", - "Surface training t=1484, loss=0.6143780946731567\n", - "Surface training t=1485, loss=0.6349098980426788\n", - "Surface training t=1486, loss=0.5783853530883789\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=1487, loss=0.5689104199409485\n", - "Surface training t=1488, loss=0.5585772395133972\n", - "Surface training t=1489, loss=0.5979010462760925\n", - "Surface training t=1490, loss=0.6223300993442535\n", - "Surface training t=1491, loss=0.6123720705509186\n", - "Surface training t=1492, loss=0.5621511340141296\n", - "Surface training t=1493, loss=0.6119587123394012\n", - "Surface training t=1494, loss=0.6314526796340942\n", - "Surface training t=1495, loss=0.5961701273918152\n", - "Surface training t=1496, loss=0.6131268441677094\n", - "Surface training t=1497, loss=0.578490674495697\n", - "Surface training t=1498, loss=0.6054146587848663\n", - "Surface training t=1499, loss=0.5906876027584076\n", - "Surface training t=1500, loss=0.6114715337753296\n", - "Surface training t=1501, loss=0.5967662036418915\n", - "Surface training t=1502, loss=0.6745475828647614\n", - "Surface training t=1503, loss=0.6350761651992798\n", - "Surface training t=1504, loss=0.5549453496932983\n", - "Surface training t=1505, loss=0.5673704743385315\n", - "Surface training t=1506, loss=0.6004814207553864\n", - "Surface training t=1507, loss=0.5915707051753998\n", - "Surface training t=1508, loss=0.608356386423111\n", - "Surface training t=1509, loss=0.6199952661991119\n", - "Surface training t=1510, loss=0.5684323310852051\n", - "Surface training t=1511, loss=0.5961944162845612\n", - "Surface training t=1512, loss=0.6223937571048737\n", - "Surface training t=1513, loss=0.5993738174438477\n", - "Surface training t=1514, loss=0.5736366510391235\n", - "Surface training t=1515, loss=0.5856226980686188\n", - "Surface training t=1516, loss=0.6183093190193176\n", - "Surface training t=1517, loss=0.5880855917930603\n", - "Surface training t=1518, loss=0.6329732537269592\n", - "Surface training t=1519, loss=0.5820735692977905\n", - "Surface training t=1520, loss=0.544177457690239\n", - "Surface training t=1521, loss=0.5710714757442474\n", - "Surface training t=1522, loss=0.6082746982574463\n", - "Surface training t=1523, loss=0.5667178332805634\n", - "Surface training t=1524, loss=0.5881072282791138\n", - "Surface training t=1525, loss=0.5782542526721954\n", - "Surface training t=1526, loss=0.5898803770542145\n", - "Surface training t=1527, loss=0.6108886897563934\n", - "Surface training t=1528, loss=0.6118485927581787\n", - "Surface training t=1529, loss=0.5505621135234833\n", - "Surface training t=1530, loss=0.598789632320404\n", - "Surface training t=1531, loss=0.5885669589042664\n", - "Surface training t=1532, loss=0.5888790190219879\n", - "Surface training t=1533, loss=0.6387850046157837\n", - "Surface training t=1534, loss=0.619098573923111\n", - "Surface training t=1535, loss=0.5651270151138306\n", - "Surface training t=1536, loss=0.60708087682724\n", - "Surface training t=1537, loss=0.5850948989391327\n", - "Surface training t=1538, loss=0.5895655155181885\n", - "Surface training t=1539, loss=0.5768668055534363\n", - "Surface training t=1540, loss=0.5997496247291565\n", - "Surface training t=1541, loss=0.5883563756942749\n", - "Surface training t=1542, loss=0.6220763623714447\n", - "Surface training t=1543, loss=0.5751241743564606\n", - "Surface training t=1544, loss=0.655835747718811\n", - "Surface training t=1545, loss=0.5634807348251343\n", - "Surface training t=1546, loss=0.5576308220624924\n", - "Surface training t=1547, loss=0.5597370117902756\n", - "Surface training t=1548, loss=0.5483444333076477\n", - "Surface training t=1549, loss=0.5703310966491699\n", - "Surface training t=1550, loss=0.6276520788669586\n", - "Surface training t=1551, loss=0.6052182614803314\n", - "Surface training t=1552, loss=0.5719888806343079\n", - "Surface training t=1553, loss=0.5923294425010681\n", - "Surface training t=1554, loss=0.6032650470733643\n", - "Surface training t=1555, loss=0.579056441783905\n", - "Surface training t=1556, loss=0.5812872052192688\n", - "Surface training t=1557, loss=0.5793512463569641\n", - "Surface training t=1558, loss=0.6037356555461884\n", - "Surface training t=1559, loss=0.6198834776878357\n", - "Surface training t=1560, loss=0.5619344413280487\n", - "Surface training t=1561, loss=0.5775902271270752\n", - "Surface training t=1562, loss=0.5769711136817932\n", - "Surface training t=1563, loss=0.5989309847354889\n", - "Surface training t=1564, loss=0.5790144205093384\n", - "Surface training t=1565, loss=0.5268832445144653\n", - "Surface training t=1566, loss=0.610869824886322\n", - "Surface training t=1567, loss=0.5688000321388245\n", - "Surface training t=1568, loss=0.5731547176837921\n", - "Surface training t=1569, loss=0.5578331649303436\n", - "Surface training t=1570, loss=0.5773864090442657\n", - "Surface training t=1571, loss=0.576775848865509\n", - "Surface training t=1572, loss=0.5731522440910339\n", - "Surface training t=1573, loss=0.5756945013999939\n", - "Surface training t=1574, loss=0.5476462990045547\n", - "Surface training t=1575, loss=0.5932696759700775\n", - "Surface training t=1576, loss=0.5455142259597778\n", - "Surface training t=1577, loss=0.5557557642459869\n", - "Surface training t=1578, loss=0.5803872644901276\n", - "Surface training t=1579, loss=0.5937509834766388\n", - "Surface training t=1580, loss=0.5979250371456146\n", - "Surface training t=1581, loss=0.5995366275310516\n", - "Surface training t=1582, loss=0.5792238116264343\n", - "Surface training t=1583, loss=0.5761803984642029\n", - "Surface training t=1584, loss=0.5918707251548767\n", - "Surface training t=1585, loss=0.6419258415699005\n", - "Surface training t=1586, loss=0.5863294303417206\n", - "Surface training t=1587, loss=0.5662004053592682\n", - "Surface training t=1588, loss=0.5590190589427948\n", - "Surface training t=1589, loss=0.5907467305660248\n", - "Surface training t=1590, loss=0.5873545706272125\n", - "Surface training t=1591, loss=0.5760540962219238\n", - "Surface training t=1592, loss=0.5538704693317413\n", - "Surface training t=1593, loss=0.5499265491962433\n", - "Surface training t=1594, loss=0.6317692995071411\n", - "Surface training t=1595, loss=0.5665058195590973\n", - "Surface training t=1596, loss=0.5671867430210114\n", - "Surface training t=1597, loss=0.6231180429458618\n", - "Surface training t=1598, loss=0.5569963753223419\n", - "Surface training t=1599, loss=0.5227815806865692\n", - "Surface training t=1600, loss=0.6468845009803772\n", - "Surface training t=1601, loss=0.5849844515323639\n", - "Surface training t=1602, loss=0.5686385631561279\n", - "Surface training t=1603, loss=0.5922043919563293\n", - "Surface training t=1604, loss=0.5624347627162933\n", - "Surface training t=1605, loss=0.5497264266014099\n", - "Surface training t=1606, loss=0.6131913363933563\n", - "Surface training t=1607, loss=0.5007703304290771\n", - "Surface training t=1608, loss=0.5791040062904358\n", - "Surface training t=1609, loss=0.5778333246707916\n", - "Surface training t=1610, loss=0.5227562785148621\n", - "Surface training t=1611, loss=0.5908627212047577\n", - "Surface training t=1612, loss=0.5265398025512695\n", - "Surface training t=1613, loss=0.566598504781723\n", - "Surface training t=1614, loss=0.5701106786727905\n", - "Surface training t=1615, loss=0.5481728315353394\n", - "Surface training t=1616, loss=0.5555581450462341\n", - "Surface training t=1617, loss=0.5702228844165802\n", - "Surface training t=1618, loss=0.5744666457176208\n", - "Surface training t=1619, loss=0.548890084028244\n", - "Surface training t=1620, loss=0.5406183302402496\n", - "Surface training t=1621, loss=0.5993082523345947\n", - "Surface training t=1622, loss=0.5408166348934174\n", - "Surface training t=1623, loss=0.5521177649497986\n", - "Surface training t=1624, loss=0.5865168571472168\n", - "Surface training t=1625, loss=0.6146858632564545\n", - "Surface training t=1626, loss=0.5431950688362122\n", - "Surface training t=1627, loss=0.5166088342666626\n", - "Surface training t=1628, loss=0.531822144985199\n", - "Surface training t=1629, loss=0.553172379732132\n", - "Surface training t=1630, loss=0.5819437801837921\n", - "Surface training t=1631, loss=0.5568383932113647\n", - "Surface training t=1632, loss=0.5385760515928268\n", - "Surface training t=1633, loss=0.5707316696643829\n", - "Surface training t=1634, loss=0.5556356608867645\n", - "Surface training t=1635, loss=0.5515784621238708\n", - "Surface training t=1636, loss=0.5613628625869751\n", - "Surface training t=1637, loss=0.5125326812267303\n", - "Surface training t=1638, loss=0.5913249850273132\n", - "Surface training t=1639, loss=0.5344155430793762\n", - "Surface training t=1640, loss=0.5921924412250519\n", - "Surface training t=1641, loss=0.6158732175827026\n", - "Surface training t=1642, loss=0.5855247378349304\n", - "Surface training t=1643, loss=0.5795102417469025\n", - "Surface training t=1644, loss=0.5545766055583954\n", - "Surface training t=1645, loss=0.6049953103065491\n", - "Surface training t=1646, loss=0.5543981492519379\n", - "Surface training t=1647, loss=0.5723985433578491\n", - "Surface training t=1648, loss=0.5484710037708282\n", - "Surface training t=1649, loss=0.5606903731822968\n", - "Surface training t=1650, loss=0.5816543400287628\n", - "Surface training t=1651, loss=0.5612285733222961\n", - "Surface training t=1652, loss=0.5348918437957764\n", - "Surface training t=1653, loss=0.6074813604354858\n", - "Surface training t=1654, loss=0.566905677318573\n", - "Surface training t=1655, loss=0.5626762509346008\n", - "Surface training t=1656, loss=0.5642643868923187\n", - "Surface training t=1657, loss=0.5802262723445892\n", - "Surface training t=1658, loss=0.5444507002830505\n", - "Surface training t=1659, loss=0.5343849062919617\n", - "Surface training t=1660, loss=0.573631078004837\n", - "Surface training t=1661, loss=0.5094979703426361\n", - "Surface training t=1662, loss=0.5825095176696777\n", - "Surface training t=1663, loss=0.602462649345398\n", - "Surface training t=1664, loss=0.576489120721817\n", - "Surface training t=1665, loss=0.5887234807014465\n", - "Surface training t=1666, loss=0.5777486562728882\n", - "Surface training t=1667, loss=0.521567314863205\n", - "Surface training t=1668, loss=0.5717287063598633\n", - "Surface training t=1669, loss=0.5248273015022278\n", - "Surface training t=1670, loss=0.5803937911987305\n", - "Surface training t=1671, loss=0.5550538897514343\n", - "Surface training t=1672, loss=0.6025519371032715\n", - "Surface training t=1673, loss=0.5425985157489777\n", - "Surface training t=1674, loss=0.5517261922359467\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=1675, loss=0.546603798866272\n", - "Surface training t=1676, loss=0.5410367250442505\n", - "Surface training t=1677, loss=0.580089271068573\n", - "Surface training t=1678, loss=0.5583784282207489\n", - "Surface training t=1679, loss=0.5700943171977997\n", - "Surface training t=1680, loss=0.5673936307430267\n", - "Surface training t=1681, loss=0.5926579535007477\n", - "Surface training t=1682, loss=0.5694858133792877\n", - "Surface training t=1683, loss=0.5569345951080322\n", - "Surface training t=1684, loss=0.5718838572502136\n", - "Surface training t=1685, loss=0.5598309934139252\n", - "Surface training t=1686, loss=0.5310629606246948\n", - "Surface training t=1687, loss=0.5134412497282028\n", - "Surface training t=1688, loss=0.5408430099487305\n", - "Surface training t=1689, loss=0.4971855580806732\n", - "Surface training t=1690, loss=0.5153980553150177\n", - "Surface training t=1691, loss=0.5599159002304077\n", - "Surface training t=1692, loss=0.5497987568378448\n", - "Surface training t=1693, loss=0.5258941054344177\n", - "Surface training t=1694, loss=0.5803600549697876\n", - "Surface training t=1695, loss=0.5122186839580536\n", - "Surface training t=1696, loss=0.5919989347457886\n", - "Surface training t=1697, loss=0.5478089451789856\n", - "Surface training t=1698, loss=0.500688448548317\n", - "Surface training t=1699, loss=0.5270397067070007\n", - "Surface training t=1700, loss=0.5407401025295258\n", - "Surface training t=1701, loss=0.5286331176757812\n", - "Surface training t=1702, loss=0.5270324051380157\n", - "Surface training t=1703, loss=0.540240615606308\n", - "Surface training t=1704, loss=0.5780258774757385\n", - "Surface training t=1705, loss=0.5481935739517212\n", - "Surface training t=1706, loss=0.5930746793746948\n", - "Surface training t=1707, loss=0.5528964698314667\n", - "Surface training t=1708, loss=0.5215512812137604\n", - "Surface training t=1709, loss=0.5258941948413849\n", - "Surface training t=1710, loss=0.5513822436332703\n", - "Surface training t=1711, loss=0.5144731104373932\n", - "Surface training t=1712, loss=0.5353690981864929\n", - "Surface training t=1713, loss=0.5732090175151825\n", - "Surface training t=1714, loss=0.5169036090373993\n", - "Surface training t=1715, loss=0.5816029906272888\n", - "Surface training t=1716, loss=0.5428482592105865\n", - "Surface training t=1717, loss=0.5601636171340942\n", - "Surface training t=1718, loss=0.5389315783977509\n", - "Surface training t=1719, loss=0.5381743013858795\n", - "Surface training t=1720, loss=0.5679199695587158\n", - "Surface training t=1721, loss=0.569322019815445\n", - "Surface training t=1722, loss=0.5581793487071991\n", - "Surface training t=1723, loss=0.5603066980838776\n", - "Surface training t=1724, loss=0.5426735877990723\n", - "Surface training t=1725, loss=0.5131924003362656\n", - "Surface training t=1726, loss=0.5369418859481812\n", - "Surface training t=1727, loss=0.45093728601932526\n", - "Surface training t=1728, loss=0.569396048784256\n", - "Surface training t=1729, loss=0.5294174551963806\n", - "Surface training t=1730, loss=0.5470679402351379\n", - "Surface training t=1731, loss=0.540203720331192\n", - "Surface training t=1732, loss=0.5195617079734802\n", - "Surface training t=1733, loss=0.5153538286685944\n", - "Surface training t=1734, loss=0.5187582075595856\n", - "Surface training t=1735, loss=0.5676852166652679\n", - "Surface training t=1736, loss=0.522682398557663\n", - "Surface training t=1737, loss=0.5287748277187347\n", - "Surface training t=1738, loss=0.537882536649704\n", - "Surface training t=1739, loss=0.5454129576683044\n", - "Surface training t=1740, loss=0.5684202015399933\n", - "Surface training t=1741, loss=0.5526989698410034\n", - "Surface training t=1742, loss=0.5495752394199371\n", - "Surface training t=1743, loss=0.511784240603447\n", - "Surface training t=1744, loss=0.5319581627845764\n", - "Surface training t=1745, loss=0.5332968831062317\n", - "Surface training t=1746, loss=0.6103714406490326\n", - "Surface training t=1747, loss=0.5597752630710602\n", - "Surface training t=1748, loss=0.561674028635025\n", - "Surface training t=1749, loss=0.539885938167572\n", - "Surface training t=1750, loss=0.5090260803699493\n", - "Surface training t=1751, loss=0.5710765719413757\n", - "Surface training t=1752, loss=0.4914882034063339\n", - "Surface training t=1753, loss=0.5842322111129761\n", - "Surface training t=1754, loss=0.5215686559677124\n", - "Surface training t=1755, loss=0.5189706087112427\n", - "Surface training t=1756, loss=0.5071735382080078\n", - "Surface training t=1757, loss=0.5631357729434967\n", - "Surface training t=1758, loss=0.5523510277271271\n", - "Surface training t=1759, loss=0.5009432733058929\n", - "Surface training t=1760, loss=0.5101053714752197\n", - "Surface training t=1761, loss=0.5172011852264404\n", - "Surface training t=1762, loss=0.5104911923408508\n", - "Surface training t=1763, loss=0.5118454992771149\n", - "Surface training t=1764, loss=0.5224879384040833\n", - "Surface training t=1765, loss=0.5022819936275482\n", - "Surface training t=1766, loss=0.5072099566459656\n", - "Surface training t=1767, loss=0.5502354800701141\n", - "Surface training t=1768, loss=0.49551019072532654\n", - "Surface training t=1769, loss=0.5721024870872498\n", - "Surface training t=1770, loss=0.5015315264463425\n", - "Surface training t=1771, loss=0.5170785784721375\n", - "Surface training t=1772, loss=0.5093269348144531\n", - "Surface training t=1773, loss=0.5095750689506531\n", - "Surface training t=1774, loss=0.5443229675292969\n", - "Surface training t=1775, loss=0.48660290241241455\n", - "Surface training t=1776, loss=0.5727396607398987\n", - "Surface training t=1777, loss=0.5272437930107117\n", - "Surface training t=1778, loss=0.511463463306427\n", - "Surface training t=1779, loss=0.472331702709198\n", - "Surface training t=1780, loss=0.5293459594249725\n", - "Surface training t=1781, loss=0.512712836265564\n", - "Surface training t=1782, loss=0.5513966679573059\n", - "Surface training t=1783, loss=0.5383711606264114\n", - "Surface training t=1784, loss=0.5117442607879639\n", - "Surface training t=1785, loss=0.509988397359848\n", - "Surface training t=1786, loss=0.5553814023733139\n", - "Surface training t=1787, loss=0.521844893693924\n", - "Surface training t=1788, loss=0.5155487060546875\n", - "Surface training t=1789, loss=0.5445486903190613\n", - "Surface training t=1790, loss=0.5489479303359985\n", - "Surface training t=1791, loss=0.5460342466831207\n", - "Surface training t=1792, loss=0.5303633809089661\n", - "Surface training t=1793, loss=0.5143677592277527\n", - "Surface training t=1794, loss=0.49580827355384827\n", - "Surface training t=1795, loss=0.5132316052913666\n", - "Surface training t=1796, loss=0.5235854983329773\n", - "Surface training t=1797, loss=0.4976385533809662\n", - "Surface training t=1798, loss=0.5334295332431793\n", - "Surface training t=1799, loss=0.5344972759485245\n", - "Surface training t=1800, loss=0.478397861123085\n", - "Surface training t=1801, loss=0.5057857930660248\n", - "Surface training t=1802, loss=0.5322512090206146\n", - "Surface training t=1803, loss=0.4983079135417938\n", - "Surface training t=1804, loss=0.5171728730201721\n", - "Surface training t=1805, loss=0.5287827253341675\n", - "Surface training t=1806, loss=0.48571400344371796\n", - "Surface training t=1807, loss=0.5144541263580322\n", - "Surface training t=1808, loss=0.48613524436950684\n", - "Surface training t=1809, loss=0.48911258578300476\n", - "Surface training t=1810, loss=0.5242182314395905\n", - "Surface training t=1811, loss=0.5421418100595474\n", - "Surface training t=1812, loss=0.48947155475616455\n", - "Surface training t=1813, loss=0.582664430141449\n", - "Surface training t=1814, loss=0.521762490272522\n", - "Surface training t=1815, loss=0.47981032729148865\n", - "Surface training t=1816, loss=0.5375482439994812\n", - "Surface training t=1817, loss=0.5118400156497955\n", - "Surface training t=1818, loss=0.531592071056366\n", - "Surface training t=1819, loss=0.49534980952739716\n", - "Surface training t=1820, loss=0.49712875485420227\n", - "Surface training t=1821, loss=0.5416240692138672\n", - "Surface training t=1822, loss=0.5325545370578766\n", - "Surface training t=1823, loss=0.5052334666252136\n", - "Surface training t=1824, loss=0.4777977019548416\n", - "Surface training t=1825, loss=0.5034140348434448\n", - "Surface training t=1826, loss=0.4904908686876297\n", - "Surface training t=1827, loss=0.46310292184352875\n", - "Surface training t=1828, loss=0.5083224326372147\n", - "Surface training t=1829, loss=0.5009748786687851\n", - "Surface training t=1830, loss=0.5442925691604614\n", - "Surface training t=1831, loss=0.5407524406909943\n", - "Surface training t=1832, loss=0.5096377730369568\n", - "Surface training t=1833, loss=0.4879950135946274\n", - "Surface training t=1834, loss=0.5058115124702454\n", - "Surface training t=1835, loss=0.5104922205209732\n", - "Surface training t=1836, loss=0.48677369952201843\n", - "Surface training t=1837, loss=0.5337058305740356\n", - "Surface training t=1838, loss=0.5114421546459198\n", - "Surface training t=1839, loss=0.5288587808609009\n", - "Surface training t=1840, loss=0.505891740322113\n", - "Surface training t=1841, loss=0.5081346929073334\n", - "Surface training t=1842, loss=0.5119224786758423\n", - "Surface training t=1843, loss=0.5111746490001678\n", - "Surface training t=1844, loss=0.5089800953865051\n", - "Surface training t=1845, loss=0.473514199256897\n", - "Surface training t=1846, loss=0.49266883730888367\n", - "Surface training t=1847, loss=0.4835021197795868\n", - "Surface training t=1848, loss=0.46750161051750183\n", - "Surface training t=1849, loss=0.47433990240097046\n", - "Surface training t=1850, loss=0.5494632422924042\n", - "Surface training t=1851, loss=0.4889235496520996\n", - "Surface training t=1852, loss=0.49862371385097504\n", - "Surface training t=1853, loss=0.512090265750885\n", - "Surface training t=1854, loss=0.4781784266233444\n", - "Surface training t=1855, loss=0.4959116578102112\n", - "Surface training t=1856, loss=0.5122604072093964\n", - "Surface training t=1857, loss=0.5763310194015503\n", - "Surface training t=1858, loss=0.508299931883812\n", - "Surface training t=1859, loss=0.5515112280845642\n", - "Surface training t=1860, loss=0.5287187695503235\n", - "Surface training t=1861, loss=0.5671471655368805\n", - "Surface training t=1862, loss=0.4985746294260025\n", - "Surface training t=1863, loss=0.5088127553462982\n", - "Surface training t=1864, loss=0.5117132812738419\n", - "Surface training t=1865, loss=0.4671809524297714\n", - "Surface training t=1866, loss=0.48380447924137115\n", - "Surface training t=1867, loss=0.5200865268707275\n", - "Surface training t=1868, loss=0.5096611082553864\n", - "Surface training t=1869, loss=0.5074110925197601\n", - "Surface training t=1870, loss=0.46659351885318756\n", - "Surface training t=1871, loss=0.5422630608081818\n", - "Surface training t=1872, loss=0.4875025004148483\n", - "Surface training t=1873, loss=0.532322883605957\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=1874, loss=0.5379021018743515\n", - "Surface training t=1875, loss=0.4973258078098297\n", - "Surface training t=1876, loss=0.5177575051784515\n", - "Surface training t=1877, loss=0.531436949968338\n", - "Surface training t=1878, loss=0.49741213023662567\n", - "Surface training t=1879, loss=0.5623323917388916\n", - "Surface training t=1880, loss=0.5434858798980713\n", - "Surface training t=1881, loss=0.49501122534275055\n", - "Surface training t=1882, loss=0.4742158502340317\n", - "Surface training t=1883, loss=0.5006833970546722\n", - "Surface training t=1884, loss=0.4963088482618332\n", - "Surface training t=1885, loss=0.49148157238960266\n", - "Surface training t=1886, loss=0.49665313959121704\n", - "Surface training t=1887, loss=0.47965218126773834\n", - "Surface training t=1888, loss=0.4791076332330704\n", - "Surface training t=1889, loss=0.47329235076904297\n", - "Surface training t=1890, loss=0.47605228424072266\n", - "Surface training t=1891, loss=0.5830558985471725\n", - "Surface training t=1892, loss=0.49355660378932953\n", - "Surface training t=1893, loss=0.4998473972082138\n", - "Surface training t=1894, loss=0.47362636029720306\n", - "Surface training t=1895, loss=0.5266425907611847\n", - "Surface training t=1896, loss=0.504825085401535\n", - "Surface training t=1897, loss=0.5256170630455017\n", - "Surface training t=1898, loss=0.48414507508277893\n", - "Surface training t=1899, loss=0.5083892643451691\n", - "Surface training t=1900, loss=0.49895232915878296\n", - "Surface training t=1901, loss=0.5060631185770035\n", - "Surface training t=1902, loss=0.45790041983127594\n", - "Surface training t=1903, loss=0.46450942754745483\n", - "Surface training t=1904, loss=0.5000090897083282\n", - "Surface training t=1905, loss=0.4911537170410156\n", - "Surface training t=1906, loss=0.4844715744256973\n", - "Surface training t=1907, loss=0.5131893306970596\n", - "Surface training t=1908, loss=0.47566697001457214\n", - "Surface training t=1909, loss=0.5054825842380524\n", - "Surface training t=1910, loss=0.48316170275211334\n", - "Surface training t=1911, loss=0.4266125559806824\n", - "Surface training t=1912, loss=0.479911744594574\n", - "Surface training t=1913, loss=0.4437848925590515\n", - "Surface training t=1914, loss=0.506125271320343\n", - "Surface training t=1915, loss=0.4516299217939377\n", - "Surface training t=1916, loss=0.48587949573993683\n", - "Surface training t=1917, loss=0.45795200765132904\n", - "Surface training t=1918, loss=0.5042965412139893\n", - "Surface training t=1919, loss=0.5513158291578293\n", - "Surface training t=1920, loss=0.5321182459592819\n", - "Surface training t=1921, loss=0.42460374534130096\n", - "Surface training t=1922, loss=0.5049746185541153\n", - "Surface training t=1923, loss=0.47365419566631317\n", - "Surface training t=1924, loss=0.4792071580886841\n", - "Surface training t=1925, loss=0.5086800903081894\n", - "Surface training t=1926, loss=0.4743753522634506\n", - "Surface training t=1927, loss=0.5255634188652039\n", - "Surface training t=1928, loss=0.4891660213470459\n", - "Surface training t=1929, loss=0.4503292441368103\n", - "Surface training t=1930, loss=0.5045455247163773\n", - "Surface training t=1931, loss=0.4554952085018158\n", - "Surface training t=1932, loss=0.5123895555734634\n", - "Surface training t=1933, loss=0.4520217031240463\n", - "Surface training t=1934, loss=0.49207885563373566\n", - "Surface training t=1935, loss=0.4829970598220825\n", - "Surface training t=1936, loss=0.49479296803474426\n", - "Surface training t=1937, loss=0.48372839391231537\n", - "Surface training t=1938, loss=0.5077699571847916\n", - "Surface training t=1939, loss=0.5194637179374695\n", - "Surface training t=1940, loss=0.5039614737033844\n", - "Surface training t=1941, loss=0.48030439019203186\n", - "Surface training t=1942, loss=0.487669438123703\n", - "Surface training t=1943, loss=0.46794140338897705\n", - "Surface training t=1944, loss=0.5134047716856003\n", - "Surface training t=1945, loss=0.5072907507419586\n", - "Surface training t=1946, loss=0.5034403502941132\n", - "Surface training t=1947, loss=0.512673944234848\n", - "Surface training t=1948, loss=0.47749263048171997\n", - "Surface training t=1949, loss=0.4709629714488983\n", - "Surface training t=1950, loss=0.5111448019742966\n", - "Surface training t=1951, loss=0.4514176696538925\n", - "Surface training t=1952, loss=0.4781041145324707\n", - "Surface training t=1953, loss=0.50590880215168\n", - "Surface training t=1954, loss=0.534041628241539\n", - "Surface training t=1955, loss=0.4838692843914032\n", - "Surface training t=1956, loss=0.5268835872411728\n", - "Surface training t=1957, loss=0.4747440814971924\n", - "Surface training t=1958, loss=0.5568418353796005\n", - "Surface training t=1959, loss=0.5049805641174316\n", - "Surface training t=1960, loss=0.4372447431087494\n", - "Surface training t=1961, loss=0.5042705833911896\n", - "Surface training t=1962, loss=0.4908369183540344\n", - "Surface training t=1963, loss=0.5015744864940643\n", - "Surface training t=1964, loss=0.49862608313560486\n", - "Surface training t=1965, loss=0.4489113986492157\n", - "Surface training t=1966, loss=0.491102010011673\n", - "Surface training t=1967, loss=0.5022270977497101\n", - "Surface training t=1968, loss=0.4958924353122711\n", - "Surface training t=1969, loss=0.5130857229232788\n", - "Surface training t=1970, loss=0.4236487150192261\n", - "Surface training t=1971, loss=0.4196661412715912\n", - "Surface training t=1972, loss=0.46100619435310364\n", - "Surface training t=1973, loss=0.5095604509115219\n", - "Surface training t=1974, loss=0.5612853467464447\n", - "Surface training t=1975, loss=0.49362534284591675\n", - "Surface training t=1976, loss=0.4841706156730652\n", - "Surface training t=1977, loss=0.45835816860198975\n", - "Surface training t=1978, loss=0.4425622671842575\n", - "Surface training t=1979, loss=0.48147398233413696\n", - "Surface training t=1980, loss=0.5137482583522797\n", - "Surface training t=1981, loss=0.4834415167570114\n", - "Surface training t=1982, loss=0.48517800867557526\n", - "Surface training t=1983, loss=0.5397340655326843\n", - "Surface training t=1984, loss=0.45301999151706696\n", - "Surface training t=1985, loss=0.4907718598842621\n", - "Surface training t=1986, loss=0.4946315586566925\n", - "Surface training t=1987, loss=0.49535781145095825\n", - "Surface training t=1988, loss=0.5002654939889908\n", - "Surface training t=1989, loss=0.5559131950139999\n", - "Surface training t=1990, loss=0.4919121712446213\n", - "Surface training t=1991, loss=0.4721379280090332\n", - "Surface training t=1992, loss=0.481987327337265\n", - "Surface training t=1993, loss=0.5610778480768204\n", - "Surface training t=1994, loss=0.5179659128189087\n", - "Surface training t=1995, loss=0.5131275206804276\n", - "Surface training t=1996, loss=0.49628494679927826\n", - "Surface training t=1997, loss=0.47715407609939575\n", - "Surface training t=1998, loss=0.4898977130651474\n", - "Surface training t=1999, loss=0.4905552566051483\n", - "Surface training t=2000, loss=0.4167484939098358\n", - "Surface training t=2001, loss=0.5216405540704727\n", - "Surface training t=2002, loss=0.46292443573474884\n", - "Surface training t=2003, loss=0.47390642762184143\n", - "Surface training t=2004, loss=0.5046434551477432\n", - "Surface training t=2005, loss=0.5133406817913055\n", - "Surface training t=2006, loss=0.4871605187654495\n", - "Surface training t=2007, loss=0.4770279675722122\n", - "Surface training t=2008, loss=0.5059212148189545\n", - "Surface training t=2009, loss=0.5222014784812927\n", - "Surface training t=2010, loss=0.44510985910892487\n", - "Surface training t=2011, loss=0.44832535088062286\n", - "Surface training t=2012, loss=0.5115835815668106\n", - "Surface training t=2013, loss=0.4741647094488144\n", - "Surface training t=2014, loss=0.4352775663137436\n", - "Surface training t=2015, loss=0.5015784054994583\n", - "Surface training t=2016, loss=0.4830327332019806\n", - "Surface training t=2017, loss=0.4974598288536072\n", - "Surface training t=2018, loss=0.5236848443746567\n", - "Surface training t=2019, loss=0.4920715391635895\n", - "Surface training t=2020, loss=0.5202351063489914\n", - "Surface training t=2021, loss=0.4857856035232544\n", - "Surface training t=2022, loss=0.48419299721717834\n", - "Surface training t=2023, loss=0.4453105330467224\n", - "Surface training t=2024, loss=0.43877220153808594\n", - "Surface training t=2025, loss=0.43351227045059204\n", - "Surface training t=2026, loss=0.4895244985818863\n", - "Surface training t=2027, loss=0.5026067644357681\n", - "Surface training t=2028, loss=0.5207934230566025\n", - "Surface training t=2029, loss=0.5216234028339386\n", - "Surface training t=2030, loss=0.44078975915908813\n", - "Surface training t=2031, loss=0.47101935744285583\n", - "Surface training t=2032, loss=0.45899148285388947\n", - "Surface training t=2033, loss=0.47256051003932953\n", - "Surface training t=2034, loss=0.4730498343706131\n", - "Surface training t=2035, loss=0.43821966648101807\n", - "Surface training t=2036, loss=0.45331643521785736\n", - "Surface training t=2037, loss=0.43782754242420197\n", - "Surface training t=2038, loss=0.4712892472743988\n", - "Surface training t=2039, loss=0.4959627091884613\n", - "Surface training t=2040, loss=0.5312819629907608\n", - "Surface training t=2041, loss=0.4623580276966095\n", - "Surface training t=2042, loss=0.49866442382335663\n", - "Surface training t=2043, loss=0.47704721987247467\n", - "Surface training t=2044, loss=0.4562046080827713\n", - "Surface training t=2045, loss=0.47056879103183746\n", - "Surface training t=2046, loss=0.4770067483186722\n", - "Surface training t=2047, loss=0.45990970730781555\n", - "Surface training t=2048, loss=0.4728747010231018\n", - "Surface training t=2049, loss=0.4792155474424362\n", - "Surface training t=2050, loss=0.49826017022132874\n", - "Surface training t=2051, loss=0.4300519973039627\n", - "Surface training t=2052, loss=0.46831902861595154\n", - "Surface training t=2053, loss=0.47275614738464355\n", - "Surface training t=2054, loss=0.4997975528240204\n", - "Surface training t=2055, loss=0.4238484799861908\n", - "Surface training t=2056, loss=0.49787911772727966\n", - "Surface training t=2057, loss=0.4646986573934555\n", - "Surface training t=2058, loss=0.4898216128349304\n", - "Surface training t=2059, loss=0.46394023299217224\n", - "Surface training t=2060, loss=0.49670903384685516\n", - "Surface training t=2061, loss=0.45136111974716187\n", - "Surface training t=2062, loss=0.43717731535434723\n", - "Surface training t=2063, loss=0.42503686249256134\n", - "Surface training t=2064, loss=0.5191025733947754\n", - "Surface training t=2065, loss=0.47303304076194763\n", - "Surface training t=2066, loss=0.48526301980018616\n", - "Surface training t=2067, loss=0.4358968883752823\n", - "Surface training t=2068, loss=0.4950833022594452\n", - "Surface training t=2069, loss=0.4152902811765671\n", - "Surface training t=2070, loss=0.5114808678627014\n", - "Surface training t=2071, loss=0.5138852894306183\n", - "Surface training t=2072, loss=0.48336076736450195\n", - "Surface training t=2073, loss=0.4769268333911896\n", - "Surface training t=2074, loss=0.47657062113285065\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=2075, loss=0.46488356590270996\n", - "Surface training t=2076, loss=0.5423241406679153\n", - "Surface training t=2077, loss=0.5007453411817551\n", - "Surface training t=2078, loss=0.4987400025129318\n", - "Surface training t=2079, loss=0.4745458662509918\n", - "Surface training t=2080, loss=0.4694027155637741\n", - "Surface training t=2081, loss=0.4929600656032562\n", - "Surface training t=2082, loss=0.46064721047878265\n", - "Surface training t=2083, loss=0.4592372477054596\n", - "Surface training t=2084, loss=0.4927379935979843\n", - "Surface training t=2085, loss=0.48948240280151367\n", - "Surface training t=2086, loss=0.4784228354692459\n", - "Surface training t=2087, loss=0.5000792443752289\n", - "Surface training t=2088, loss=0.43505507707595825\n", - "Surface training t=2089, loss=0.42575179040431976\n", - "Surface training t=2090, loss=0.4874122589826584\n", - "Surface training t=2091, loss=0.4729376435279846\n", - "Surface training t=2092, loss=0.5162804424762726\n", - "Surface training t=2093, loss=0.45672507584095\n", - "Surface training t=2094, loss=0.45309972763061523\n", - "Surface training t=2095, loss=0.5249010175466537\n", - "Surface training t=2096, loss=0.45743776857852936\n", - "Surface training t=2097, loss=0.42898818850517273\n", - "Surface training t=2098, loss=0.5454890877008438\n", - "Surface training t=2099, loss=0.46313971281051636\n", - "Surface training t=2100, loss=0.4857766926288605\n", - "Surface training t=2101, loss=0.49287913739681244\n", - "Surface training t=2102, loss=0.48002609610557556\n", - "Surface training t=2103, loss=0.47370612621307373\n", - "Surface training t=2104, loss=0.459817573428154\n", - "Surface training t=2105, loss=0.47454601526260376\n", - "Surface training t=2106, loss=0.4686594605445862\n", - "Surface training t=2107, loss=0.46781450510025024\n", - "Surface training t=2108, loss=0.48482823371887207\n", - "Surface training t=2109, loss=0.5147205740213394\n", - "Surface training t=2110, loss=0.49714140594005585\n", - "Surface training t=2111, loss=0.5191384702920914\n", - "Surface training t=2112, loss=0.45508289337158203\n", - "Surface training t=2113, loss=0.4975791573524475\n", - "Surface training t=2114, loss=0.5214228928089142\n", - "Surface training t=2115, loss=0.47849297523498535\n", - "Surface training t=2116, loss=0.522653266787529\n", - "Surface training t=2117, loss=0.4931619465351105\n", - "Surface training t=2118, loss=0.525516539812088\n", - "Surface training t=2119, loss=0.5071321874856949\n", - "Surface training t=2120, loss=0.49627557396888733\n", - "Surface training t=2121, loss=0.5057078748941422\n", - "Surface training t=2122, loss=0.5004344582557678\n", - "Surface training t=2123, loss=0.4533490091562271\n", - "Surface training t=2124, loss=0.45445020496845245\n", - "Surface training t=2125, loss=0.4919516444206238\n", - "Surface training t=2126, loss=0.5383554995059967\n", - "Surface training t=2127, loss=0.4935632348060608\n", - "Surface training t=2128, loss=0.4721735417842865\n", - "Surface training t=2129, loss=0.41917750239372253\n", - "Surface training t=2130, loss=0.4885101318359375\n", - "Surface training t=2131, loss=0.4865420013666153\n", - "Surface training t=2132, loss=0.48627978563308716\n", - "Surface training t=2133, loss=0.4629368185997009\n", - "Surface training t=2134, loss=0.4557785242795944\n", - "Surface training t=2135, loss=0.5004743188619614\n", - "Surface training t=2136, loss=0.44006970524787903\n", - "Surface training t=2137, loss=0.42503705620765686\n", - "Surface training t=2138, loss=0.42780904471874237\n", - "Surface training t=2139, loss=0.430559366941452\n", - "Surface training t=2140, loss=0.4868127107620239\n", - "Surface training t=2141, loss=0.45823128521442413\n", - "Surface training t=2142, loss=0.48163339495658875\n", - "Surface training t=2143, loss=0.47269317507743835\n", - "Surface training t=2144, loss=0.45766711235046387\n", - "Surface training t=2145, loss=0.46143537759780884\n", - "Surface training t=2146, loss=0.45889002084732056\n", - "Surface training t=2147, loss=0.508842945098877\n", - "Surface training t=2148, loss=0.464867040514946\n", - "Surface training t=2149, loss=0.4787631779909134\n", - "Surface training t=2150, loss=0.4789429306983948\n", - "Surface training t=2151, loss=0.4659637212753296\n", - "Surface training t=2152, loss=0.519342303276062\n", - "Surface training t=2153, loss=0.48184923827648163\n", - "Surface training t=2154, loss=0.46959833800792694\n", - "Surface training t=2155, loss=0.44323015213012695\n", - "Surface training t=2156, loss=0.45063066482543945\n", - "Surface training t=2157, loss=0.5030816793441772\n", - "Surface training t=2158, loss=0.46294979751110077\n", - "Surface training t=2159, loss=0.5011892765760422\n", - "Surface training t=2160, loss=0.4550723433494568\n", - "Surface training t=2161, loss=0.42470593750476837\n", - "Surface training t=2162, loss=0.4700409770011902\n", - "Surface training t=2163, loss=0.4968256950378418\n", - "Surface training t=2164, loss=0.44506384432315826\n", - "Surface training t=2165, loss=0.5493239313364029\n", - "Surface training t=2166, loss=0.4769390970468521\n", - "Surface training t=2167, loss=0.48264026641845703\n", - "Surface training t=2168, loss=0.47097350656986237\n", - "Surface training t=2169, loss=0.47485506534576416\n", - "Surface training t=2170, loss=0.5034992098808289\n", - "Surface training t=2171, loss=0.45858609676361084\n", - "Surface training t=2172, loss=0.4913608133792877\n", - "Surface training t=2173, loss=0.4857865869998932\n", - "Surface training t=2174, loss=0.5103944092988968\n", - "Surface training t=2175, loss=0.4910905659198761\n", - "Surface training t=2176, loss=0.46528109908103943\n", - "Surface training t=2177, loss=0.4844330847263336\n", - "Surface training t=2178, loss=0.4450978636741638\n", - "Surface training t=2179, loss=0.501603290438652\n", - "Surface training t=2180, loss=0.4434077739715576\n", - "Surface training t=2181, loss=0.5223994702100754\n", - "Surface training t=2182, loss=0.47626253962516785\n", - "Surface training t=2183, loss=0.4654204249382019\n", - "Surface training t=2184, loss=0.4575188606977463\n", - "Surface training t=2185, loss=0.4568076729774475\n", - "Surface training t=2186, loss=0.43458178639411926\n", - "Surface training t=2187, loss=0.4493234157562256\n", - "Surface training t=2188, loss=0.4451456069946289\n", - "Surface training t=2189, loss=0.453231081366539\n", - "Surface training t=2190, loss=0.5079368948936462\n", - "Surface training t=2191, loss=0.4499937742948532\n", - "Surface training t=2192, loss=0.4174107164144516\n", - "Surface training t=2193, loss=0.47814711928367615\n", - "Surface training t=2194, loss=0.4790232926607132\n", - "Surface training t=2195, loss=0.45606493949890137\n", - "Surface training t=2196, loss=0.4949982762336731\n", - "Surface training t=2197, loss=0.49691344797611237\n", - "Surface training t=2198, loss=0.4795127660036087\n", - "Surface training t=2199, loss=0.5282194018363953\n", - "Surface training t=2200, loss=0.5503292679786682\n", - "Surface training t=2201, loss=0.42937737703323364\n", - "Surface training t=2202, loss=0.4946095496416092\n", - "Surface training t=2203, loss=0.49185970425605774\n", - "Surface training t=2204, loss=0.4848899394273758\n", - "Surface training t=2205, loss=0.4613574147224426\n", - "Surface training t=2206, loss=0.46661506593227386\n", - "Surface training t=2207, loss=0.4727509319782257\n", - "Surface training t=2208, loss=0.51520636677742\n", - "Surface training t=2209, loss=0.4551607370376587\n", - "Surface training t=2210, loss=0.49787014722824097\n", - "Surface training t=2211, loss=0.5027516037225723\n", - "Surface training t=2212, loss=0.4975229799747467\n", - "Surface training t=2213, loss=0.46220429241657257\n", - "Surface training t=2214, loss=0.4244663715362549\n", - "Surface training t=2215, loss=0.499215304851532\n", - "Surface training t=2216, loss=0.47994759678840637\n", - "Surface training t=2217, loss=0.4744897335767746\n", - "Surface training t=2218, loss=0.4345564842224121\n", - "Surface training t=2219, loss=0.46689847111701965\n", - "Surface training t=2220, loss=0.4550541043281555\n", - "Surface training t=2221, loss=0.4755503833293915\n", - "Surface training t=2222, loss=0.45454154908657074\n", - "Surface training t=2223, loss=0.46296900510787964\n", - "Surface training t=2224, loss=0.49488501250743866\n", - "Surface training t=2225, loss=0.5177054554224014\n", - "Surface training t=2226, loss=0.5709320455789566\n", - "Surface training t=2227, loss=0.4499702751636505\n", - "Surface training t=2228, loss=0.5450514554977417\n", - "Surface training t=2229, loss=0.4862492084503174\n", - "Surface training t=2230, loss=0.49016593396663666\n", - "Surface training t=2231, loss=0.5114628076553345\n", - "Surface training t=2232, loss=0.4213905781507492\n", - "Surface training t=2233, loss=0.45766738057136536\n", - "Surface training t=2234, loss=0.5255933701992035\n", - "Surface training t=2235, loss=0.4768713116645813\n", - "Surface training t=2236, loss=0.4687572568655014\n", - "Surface training t=2237, loss=0.4713568538427353\n", - "Surface training t=2238, loss=0.47982384264469147\n", - "Surface training t=2239, loss=0.4651360958814621\n", - "Surface training t=2240, loss=0.3901226222515106\n", - "Surface training t=2241, loss=0.41973939538002014\n", - "Surface training t=2242, loss=0.4568171054124832\n", - "Surface training t=2243, loss=0.4725368171930313\n", - "Surface training t=2244, loss=0.42284877598285675\n", - "Surface training t=2245, loss=0.4579278975725174\n", - "Surface training t=2246, loss=0.4885680675506592\n", - "Surface training t=2247, loss=0.444024920463562\n", - "Surface training t=2248, loss=0.4774513393640518\n", - "Surface training t=2249, loss=0.44254130125045776\n", - "Surface training t=2250, loss=0.46775220334529877\n", - "Surface training t=2251, loss=0.5271599590778351\n", - "Surface training t=2252, loss=0.4378230571746826\n", - "Surface training t=2253, loss=0.47785186767578125\n", - "Surface training t=2254, loss=0.45240624248981476\n", - "Surface training t=2255, loss=0.44092774391174316\n", - "Surface training t=2256, loss=0.47701460123062134\n", - "Surface training t=2257, loss=0.4952920824289322\n", - "Surface training t=2258, loss=0.43764612078666687\n", - "Surface training t=2259, loss=0.4332226812839508\n", - "Surface training t=2260, loss=0.4105566293001175\n", - "Surface training t=2261, loss=0.45078764855861664\n", - "Surface training t=2262, loss=0.48952215909957886\n", - "Surface training t=2263, loss=0.4828062504529953\n", - "Surface training t=2264, loss=0.49010200798511505\n", - "Surface training t=2265, loss=0.3935987651348114\n", - "Surface training t=2266, loss=0.43920665979385376\n", - "Surface training t=2267, loss=0.46313804388046265\n", - "Surface training t=2268, loss=0.44617804884910583\n", - "Surface training t=2269, loss=0.5113305449485779\n", - "Surface training t=2270, loss=0.49629129469394684\n", - "Surface training t=2271, loss=0.4149736166000366\n", - "Surface training t=2272, loss=0.4724368453025818\n", - "Surface training t=2273, loss=0.3994073122739792\n", - "Surface training t=2274, loss=0.4756891280412674\n", - "Surface training t=2275, loss=0.5012193620204926\n", - "Surface training t=2276, loss=0.4186559468507767\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=2277, loss=0.5093806087970734\n", - "Surface training t=2278, loss=0.4217377305030823\n", - "Surface training t=2279, loss=0.45689284801483154\n", - "Surface training t=2280, loss=0.450832262635231\n", - "Surface training t=2281, loss=0.4928884506225586\n", - "Surface training t=2282, loss=0.46605396270751953\n", - "Surface training t=2283, loss=0.44463157653808594\n", - "Surface training t=2284, loss=0.43005968630313873\n", - "Surface training t=2285, loss=0.4916885495185852\n", - "Surface training t=2286, loss=0.4701341986656189\n", - "Surface training t=2287, loss=0.47022195160388947\n", - "Surface training t=2288, loss=0.4440317451953888\n", - "Surface training t=2289, loss=0.43848271667957306\n", - "Surface training t=2290, loss=0.44702258706092834\n", - "Surface training t=2291, loss=0.51389579474926\n", - "Surface training t=2292, loss=0.4566081017255783\n", - "Surface training t=2293, loss=0.48316872119903564\n", - "Surface training t=2294, loss=0.45359261333942413\n", - "Surface training t=2295, loss=0.46988068521022797\n", - "Surface training t=2296, loss=0.47830232977867126\n", - "Surface training t=2297, loss=0.4337712526321411\n", - "Surface training t=2298, loss=0.43865035474300385\n", - "Surface training t=2299, loss=0.44255274534225464\n", - "Surface training t=2300, loss=0.4631293714046478\n", - "Surface training t=2301, loss=0.4558151662349701\n", - "Surface training t=2302, loss=0.49568571150302887\n", - "Surface training t=2303, loss=0.42196136713027954\n", - "Surface training t=2304, loss=0.4804787039756775\n", - "Surface training t=2305, loss=0.4935883581638336\n", - "Surface training t=2306, loss=0.46270039677619934\n", - "Surface training t=2307, loss=0.46368081867694855\n", - "Surface training t=2308, loss=0.4434983432292938\n", - "Surface training t=2309, loss=0.4651482105255127\n", - "Surface training t=2310, loss=0.41727881133556366\n", - "Surface training t=2311, loss=0.45021912455558777\n", - "Surface training t=2312, loss=0.4286007732152939\n", - "Surface training t=2313, loss=0.44733719527721405\n", - "Surface training t=2314, loss=0.43499311804771423\n", - "Surface training t=2315, loss=0.43241868913173676\n", - "Surface training t=2316, loss=0.44509342312812805\n", - "Surface training t=2317, loss=0.4196122884750366\n", - "Surface training t=2318, loss=0.4873697757720947\n", - "Surface training t=2319, loss=0.46039384603500366\n", - "Surface training t=2320, loss=0.3912816792726517\n", - "Surface training t=2321, loss=0.4269917905330658\n", - "Surface training t=2322, loss=0.45943647623062134\n", - "Surface training t=2323, loss=0.5223762392997742\n", - "Surface training t=2324, loss=0.4793076813220978\n", - "Surface training t=2325, loss=0.431251585483551\n", - "Surface training t=2326, loss=0.46846380829811096\n", - "Surface training t=2327, loss=0.49397963285446167\n", - "Surface training t=2328, loss=0.4830149710178375\n", - "Surface training t=2329, loss=0.4470537006855011\n", - "Surface training t=2330, loss=0.49213550984859467\n", - "Surface training t=2331, loss=0.44267868995666504\n", - "Surface training t=2332, loss=0.39004547894001007\n", - "Surface training t=2333, loss=0.44076357781887054\n", - "Surface training t=2334, loss=0.44840869307518005\n", - "Surface training t=2335, loss=0.44045212864875793\n", - "Surface training t=2336, loss=0.43420036137104034\n", - "Surface training t=2337, loss=0.45925046503543854\n", - "Surface training t=2338, loss=0.4364212304353714\n", - "Surface training t=2339, loss=0.45167312026023865\n", - "Surface training t=2340, loss=0.4694027304649353\n", - "Surface training t=2341, loss=0.4539984166622162\n", - "Surface training t=2342, loss=0.4000183641910553\n", - "Surface training t=2343, loss=0.4372195899486542\n", - "Surface training t=2344, loss=0.4870999902486801\n", - "Surface training t=2345, loss=0.40228842198848724\n", - "Surface training t=2346, loss=0.43937577307224274\n", - "Surface training t=2347, loss=0.4618932604789734\n", - "Surface training t=2348, loss=0.45053577423095703\n", - "Surface training t=2349, loss=0.44140832126140594\n", - "Surface training t=2350, loss=0.4536312371492386\n", - "Surface training t=2351, loss=0.44680775701999664\n", - "Surface training t=2352, loss=0.43483051657676697\n", - "Surface training t=2353, loss=0.5009468793869019\n", - "Surface training t=2354, loss=0.4452032744884491\n", - "Surface training t=2355, loss=0.4594237357378006\n", - "Surface training t=2356, loss=0.433262437582016\n", - "Surface training t=2357, loss=0.4258800894021988\n", - "Surface training t=2358, loss=0.4307253211736679\n", - "Surface training t=2359, loss=0.475247859954834\n", - "Surface training t=2360, loss=0.47493430972099304\n", - "Surface training t=2361, loss=0.4906495213508606\n", - "Surface training t=2362, loss=0.43377506732940674\n", - "Surface training t=2363, loss=0.48437686264514923\n", - "Surface training t=2364, loss=0.47097957134246826\n", - "Surface training t=2365, loss=0.4258027821779251\n", - "Surface training t=2366, loss=0.48248860239982605\n", - "Surface training t=2367, loss=0.4821564853191376\n", - "Surface training t=2368, loss=0.3986027091741562\n", - "Surface training t=2369, loss=0.4882350265979767\n", - "Surface training t=2370, loss=0.4286239296197891\n", - "Surface training t=2371, loss=0.4762611836194992\n", - "Surface training t=2372, loss=0.4388095587491989\n", - "Surface training t=2373, loss=0.4587491750717163\n", - "Surface training t=2374, loss=0.5022853165864944\n", - "Surface training t=2375, loss=0.4394278824329376\n", - "Surface training t=2376, loss=0.43494008481502533\n", - "Surface training t=2377, loss=0.4737127125263214\n", - "Surface training t=2378, loss=0.4708395004272461\n", - "Surface training t=2379, loss=0.4924057424068451\n", - "Surface training t=2380, loss=0.4299047887325287\n", - "Surface training t=2381, loss=0.5172192454338074\n", - "Surface training t=2382, loss=0.4599854648113251\n", - "Surface training t=2383, loss=0.550648957490921\n", - "Surface training t=2384, loss=0.46598103642463684\n", - "Surface training t=2385, loss=0.4509255886077881\n", - "Surface training t=2386, loss=0.3988208770751953\n", - "Surface training t=2387, loss=0.4879069924354553\n", - "Surface training t=2388, loss=0.4864855408668518\n", - "Surface training t=2389, loss=0.4324488788843155\n", - "Surface training t=2390, loss=0.43778882920742035\n", - "Surface training t=2391, loss=0.45471222698688507\n", - "Surface training t=2392, loss=0.48700959980487823\n", - "Surface training t=2393, loss=0.4707252085208893\n", - "Surface training t=2394, loss=0.47040748596191406\n", - "Surface training t=2395, loss=0.4013660252094269\n", - "Surface training t=2396, loss=0.44437040388584137\n", - "Surface training t=2397, loss=0.5109895318746567\n", - "Surface training t=2398, loss=0.4389312118291855\n", - "Surface training t=2399, loss=0.4388284683227539\n", - "Surface training t=2400, loss=0.41374440491199493\n", - "Surface training t=2401, loss=0.4333238899707794\n", - "Surface training t=2402, loss=0.45529904961586\n", - "Surface training t=2403, loss=0.45828603208065033\n", - "Surface training t=2404, loss=0.437972292304039\n", - "Surface training t=2405, loss=0.47369593381881714\n", - "Surface training t=2406, loss=0.41841503977775574\n", - "Surface training t=2407, loss=0.42828039824962616\n", - "Surface training t=2408, loss=0.47863076627254486\n", - "Surface training t=2409, loss=0.4130120277404785\n", - "Surface training t=2410, loss=0.4321991801261902\n", - "Surface training t=2411, loss=0.4156711846590042\n", - "Surface training t=2412, loss=0.46732522547245026\n", - "Surface training t=2413, loss=0.47314275801181793\n", - "Surface training t=2414, loss=0.4316529631614685\n", - "Surface training t=2415, loss=0.440229207277298\n", - "Surface training t=2416, loss=0.43377916514873505\n", - "Surface training t=2417, loss=0.47597864270210266\n", - "Surface training t=2418, loss=0.45246751606464386\n", - "Surface training t=2419, loss=0.48665158450603485\n", - "Surface training t=2420, loss=0.4310016334056854\n", - "Surface training t=2421, loss=0.4725162386894226\n", - "Surface training t=2422, loss=0.4424178898334503\n", - "Surface training t=2423, loss=0.46576982736587524\n", - "Surface training t=2424, loss=0.4601992219686508\n", - "Surface training t=2425, loss=0.41612525284290314\n", - "Surface training t=2426, loss=0.4840676486492157\n", - "Surface training t=2427, loss=0.42648138105869293\n", - "Surface training t=2428, loss=0.42969028651714325\n", - "Surface training t=2429, loss=0.42430083453655243\n", - "Surface training t=2430, loss=0.465731605887413\n", - "Surface training t=2431, loss=0.41511285305023193\n", - "Surface training t=2432, loss=0.4499029666185379\n", - "Surface training t=2433, loss=0.4532727152109146\n", - "Surface training t=2434, loss=0.47585882246494293\n", - "Surface training t=2435, loss=0.4430667459964752\n", - "Surface training t=2436, loss=0.4682779163122177\n", - "Surface training t=2437, loss=0.46477261185646057\n", - "Surface training t=2438, loss=0.4819622337818146\n", - "Surface training t=2439, loss=0.43913380801677704\n", - "Surface training t=2440, loss=0.42452189326286316\n", - "Surface training t=2441, loss=0.4427340179681778\n", - "Surface training t=2442, loss=0.4344731420278549\n", - "Surface training t=2443, loss=0.4543614089488983\n", - "Surface training t=2444, loss=0.4728992283344269\n", - "Surface training t=2445, loss=0.444390207529068\n", - "Surface training t=2446, loss=0.47522930800914764\n", - "Surface training t=2447, loss=0.41572311520576477\n", - "Surface training t=2448, loss=0.42966993153095245\n", - "Surface training t=2449, loss=0.45334626734256744\n", - "Surface training t=2450, loss=0.47299206256866455\n", - "Surface training t=2451, loss=0.4200698584318161\n", - "Surface training t=2452, loss=0.4316685050725937\n", - "Surface training t=2453, loss=0.45093488693237305\n", - "Surface training t=2454, loss=0.4357118457555771\n", - "Surface training t=2455, loss=0.4494685232639313\n", - "Surface training t=2456, loss=0.4536765515804291\n", - "Surface training t=2457, loss=0.4496036022901535\n", - "Surface training t=2458, loss=0.4317857623100281\n", - "Surface training t=2459, loss=0.4196336567401886\n", - "Surface training t=2460, loss=0.5081759244203568\n", - "Surface training t=2461, loss=0.4289526641368866\n", - "Surface training t=2462, loss=0.46765629947185516\n", - "Surface training t=2463, loss=0.4216556251049042\n", - "Surface training t=2464, loss=0.4165830761194229\n", - "Surface training t=2465, loss=0.46583667397499084\n", - "Surface training t=2466, loss=0.40364281833171844\n", - "Surface training t=2467, loss=0.44845037162303925\n", - "Surface training t=2468, loss=0.4771161377429962\n", - "Surface training t=2469, loss=0.4268873333930969\n", - "Surface training t=2470, loss=0.45924462378025055\n", - "Surface training t=2471, loss=0.4332585036754608\n", - "Surface training t=2472, loss=0.431717649102211\n", - "Surface training t=2473, loss=0.4319503754377365\n", - "Surface training t=2474, loss=0.4158182591199875\n", - "Surface training t=2475, loss=0.45471732318401337\n", - "Surface training t=2476, loss=0.4729878604412079\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=2477, loss=0.4280197471380234\n", - "Surface training t=2478, loss=0.4886616915464401\n", - "Surface training t=2479, loss=0.44441643357276917\n", - "Surface training t=2480, loss=0.4214712530374527\n", - "Surface training t=2481, loss=0.4630506932735443\n", - "Surface training t=2482, loss=0.48081274330616\n", - "Surface training t=2483, loss=0.44096820056438446\n", - "Surface training t=2484, loss=0.49130500853061676\n", - "Surface training t=2485, loss=0.4279588311910629\n", - "Surface training t=2486, loss=0.46644924581050873\n", - "Surface training t=2487, loss=0.4289615750312805\n", - "Surface training t=2488, loss=0.48505645990371704\n", - "Surface training t=2489, loss=0.4006112962961197\n", - "Surface training t=2490, loss=0.45519615709781647\n", - "Surface training t=2491, loss=0.4473772495985031\n", - "Surface training t=2492, loss=0.4332427978515625\n", - "Surface training t=2493, loss=0.4443042278289795\n", - "Surface training t=2494, loss=0.44020161032676697\n", - "Surface training t=2495, loss=0.4436393976211548\n", - "Surface training t=2496, loss=0.5029331296682358\n", - "Surface training t=2497, loss=0.4669591784477234\n", - "Surface training t=2498, loss=0.44315752387046814\n", - "Surface training t=2499, loss=0.4648231118917465\n", - "Surface training t=2500, loss=0.45199868083000183\n", - "Surface training t=2501, loss=0.4640672951936722\n", - "Surface training t=2502, loss=0.4305734634399414\n", - "Surface training t=2503, loss=0.5249668806791306\n", - "Surface training t=2504, loss=0.43087083101272583\n", - "Surface training t=2505, loss=0.42239515483379364\n", - "Surface training t=2506, loss=0.46435263752937317\n", - "Surface training t=2507, loss=0.45488667488098145\n", - "Surface training t=2508, loss=0.4509362429380417\n", - "Surface training t=2509, loss=0.4402446746826172\n", - "Surface training t=2510, loss=0.47126948833465576\n", - "Surface training t=2511, loss=0.4422680139541626\n", - "Surface training t=2512, loss=0.37894558906555176\n", - "Surface training t=2513, loss=0.4734024405479431\n", - "Surface training t=2514, loss=0.44413718581199646\n", - "Surface training t=2515, loss=0.45683471858501434\n", - "Surface training t=2516, loss=0.4257318675518036\n", - "Surface training t=2517, loss=0.4432451128959656\n", - "Surface training t=2518, loss=0.42278316617012024\n", - "Surface training t=2519, loss=0.4185015708208084\n", - "Surface training t=2520, loss=0.47250717878341675\n", - "Surface training t=2521, loss=0.4380248486995697\n", - "Surface training t=2522, loss=0.41288116574287415\n", - "Surface training t=2523, loss=0.46707941591739655\n", - "Surface training t=2524, loss=0.45406731963157654\n", - "Surface training t=2525, loss=0.41599535942077637\n", - "Surface training t=2526, loss=0.44359634816646576\n", - "Surface training t=2527, loss=0.4616897702217102\n", - "Surface training t=2528, loss=0.4202815890312195\n", - "Surface training t=2529, loss=0.44808363914489746\n", - "Surface training t=2530, loss=0.4574175477027893\n", - "Surface training t=2531, loss=0.4542705565690994\n", - "Surface training t=2532, loss=0.41611798107624054\n", - "Surface training t=2533, loss=0.43645329773426056\n", - "Surface training t=2534, loss=0.4642383009195328\n", - "Surface training t=2535, loss=0.5259026139974594\n", - "Surface training t=2536, loss=0.44450725615024567\n", - "Surface training t=2537, loss=0.4147041290998459\n", - "Surface training t=2538, loss=0.45469440519809723\n", - "Surface training t=2539, loss=0.4294178932905197\n", - "Surface training t=2540, loss=0.43891093134880066\n", - "Surface training t=2541, loss=0.4244363456964493\n", - "Surface training t=2542, loss=0.4125492125749588\n", - "Surface training t=2543, loss=0.4248572885990143\n", - "Surface training t=2544, loss=0.46120527386665344\n", - "Surface training t=2545, loss=0.46373724937438965\n", - "Surface training t=2546, loss=0.4085434526205063\n", - "Surface training t=2547, loss=0.4442037343978882\n", - "Surface training t=2548, loss=0.43130330741405487\n", - "Surface training t=2549, loss=0.4515143930912018\n", - "Surface training t=2550, loss=0.3743239790201187\n", - "Surface training t=2551, loss=0.44419972598552704\n", - "Surface training t=2552, loss=0.4385967254638672\n", - "Surface training t=2553, loss=0.40435370802879333\n", - "Surface training t=2554, loss=0.43701331317424774\n", - "Surface training t=2555, loss=0.47660787403583527\n", - "Surface training t=2556, loss=0.5002947747707367\n", - "Surface training t=2557, loss=0.47061188519001007\n", - "Surface training t=2558, loss=0.48263661563396454\n", - "Surface training t=2559, loss=0.4547279328107834\n", - "Surface training t=2560, loss=0.41406387090682983\n", - "Surface training t=2561, loss=0.43766218423843384\n", - "Surface training t=2562, loss=0.4329909235239029\n", - "Surface training t=2563, loss=0.4480447769165039\n", - "Surface training t=2564, loss=0.43004412949085236\n", - "Surface training t=2565, loss=0.41512855887413025\n", - "Surface training t=2566, loss=0.40321068465709686\n", - "Surface training t=2567, loss=0.39019380509853363\n", - "Surface training t=2568, loss=0.4736756980419159\n", - "Surface training t=2569, loss=0.4424511343240738\n", - "Surface training t=2570, loss=0.42125286161899567\n", - "Surface training t=2571, loss=0.4645068198442459\n", - "Surface training t=2572, loss=0.42306968569755554\n", - "Surface training t=2573, loss=0.48407626152038574\n", - "Surface training t=2574, loss=0.41244570910930634\n", - "Surface training t=2575, loss=0.4961950480937958\n", - "Surface training t=2576, loss=0.46937738358974457\n", - "Surface training t=2577, loss=0.3967142701148987\n", - "Surface training t=2578, loss=0.40771155059337616\n", - "Surface training t=2579, loss=0.4777066260576248\n", - "Surface training t=2580, loss=0.445117712020874\n", - "Surface training t=2581, loss=0.43871840834617615\n", - "Surface training t=2582, loss=0.49974825978279114\n", - "Surface training t=2583, loss=0.41184937953948975\n", - "Surface training t=2584, loss=0.4538731426000595\n", - "Surface training t=2585, loss=0.4112272411584854\n", - "Surface training t=2586, loss=0.41998255252838135\n", - "Surface training t=2587, loss=0.4105670154094696\n", - "Surface training t=2588, loss=0.519885241985321\n", - "Surface training t=2589, loss=0.459293469786644\n", - "Surface training t=2590, loss=0.45693761110305786\n", - "Surface training t=2591, loss=0.42364414036273956\n", - "Surface training t=2592, loss=0.4119683653116226\n", - "Surface training t=2593, loss=0.45598144829273224\n", - "Surface training t=2594, loss=0.4012940675020218\n", - "Surface training t=2595, loss=0.4296187609434128\n", - "Surface training t=2596, loss=0.4521905183792114\n", - "Surface training t=2597, loss=0.4932403713464737\n", - "Surface training t=2598, loss=0.4649076610803604\n", - "Surface training t=2599, loss=0.43575598299503326\n", - "Surface training t=2600, loss=0.43169161677360535\n", - "Surface training t=2601, loss=0.41752275824546814\n", - "Surface training t=2602, loss=0.48210498690605164\n", - "Surface training t=2603, loss=0.42785754799842834\n", - "Surface training t=2604, loss=0.4336940497159958\n", - "Surface training t=2605, loss=0.4114547520875931\n", - "Surface training t=2606, loss=0.43976281583309174\n", - "Surface training t=2607, loss=0.4307340830564499\n", - "Surface training t=2608, loss=0.4915078580379486\n", - "Surface training t=2609, loss=0.44639524817466736\n", - "Surface training t=2610, loss=0.49705109000205994\n", - "Surface training t=2611, loss=0.5064425319433212\n", - "Surface training t=2612, loss=0.4478997141122818\n", - "Surface training t=2613, loss=0.45986348390579224\n", - "Surface training t=2614, loss=0.43456290662288666\n", - "Surface training t=2615, loss=0.4126190096139908\n", - "Surface training t=2616, loss=0.44706369936466217\n", - "Surface training t=2617, loss=0.42827436327934265\n", - "Surface training t=2618, loss=0.42196378111839294\n", - "Surface training t=2619, loss=0.4543697088956833\n", - "Surface training t=2620, loss=0.44955630600452423\n", - "Surface training t=2621, loss=0.4411451369524002\n", - "Surface training t=2622, loss=0.4623919278383255\n", - "Surface training t=2623, loss=0.5017364621162415\n", - "Surface training t=2624, loss=0.45995956659317017\n", - "Surface training t=2625, loss=0.49504706263542175\n", - "Surface training t=2626, loss=0.40752506256103516\n", - "Surface training t=2627, loss=0.45029202103614807\n", - "Surface training t=2628, loss=0.45550285279750824\n", - "Surface training t=2629, loss=0.4426627457141876\n", - "Surface training t=2630, loss=0.4165128469467163\n", - "Surface training t=2631, loss=0.49611788988113403\n", - "Surface training t=2632, loss=0.4465514123439789\n", - "Surface training t=2633, loss=0.42610426247119904\n", - "Surface training t=2634, loss=0.46928828954696655\n", - "Surface training t=2635, loss=0.40565550327301025\n", - "Surface training t=2636, loss=0.43115757405757904\n", - "Surface training t=2637, loss=0.4354996830224991\n", - "Surface training t=2638, loss=0.4242754578590393\n", - "Surface training t=2639, loss=0.44471125304698944\n", - "Surface training t=2640, loss=0.4549597352743149\n", - "Surface training t=2641, loss=0.43518272042274475\n", - "Surface training t=2642, loss=0.45391468703746796\n", - "Surface training t=2643, loss=0.4800315648317337\n", - "Surface training t=2644, loss=0.4453864246606827\n", - "Surface training t=2645, loss=0.45177948474884033\n", - "Surface training t=2646, loss=0.43288157880306244\n", - "Surface training t=2647, loss=0.4523323029279709\n", - "Surface training t=2648, loss=0.4442186504602432\n", - "Surface training t=2649, loss=0.4233606308698654\n", - "Surface training t=2650, loss=0.45107637345790863\n", - "Surface training t=2651, loss=0.4120365083217621\n", - "Surface training t=2652, loss=0.3959147185087204\n", - "Surface training t=2653, loss=0.438400536775589\n", - "Surface training t=2654, loss=0.4438493251800537\n", - "Surface training t=2655, loss=0.46538907289505005\n", - "Surface training t=2656, loss=0.4375161975622177\n", - "Surface training t=2657, loss=0.3720599114894867\n", - "Surface training t=2658, loss=0.48364800214767456\n", - "Surface training t=2659, loss=0.47564224898815155\n", - "Surface training t=2660, loss=0.46973955631256104\n", - "Surface training t=2661, loss=0.4612117111682892\n", - "Surface training t=2662, loss=0.47907841205596924\n", - "Surface training t=2663, loss=0.4884217083454132\n", - "Surface training t=2664, loss=0.480089470744133\n", - "Surface training t=2665, loss=0.40639549493789673\n", - "Surface training t=2666, loss=0.4787805527448654\n", - "Surface training t=2667, loss=0.43754807114601135\n", - "Surface training t=2668, loss=0.4500379413366318\n", - "Surface training t=2669, loss=0.38408389687538147\n", - "Surface training t=2670, loss=0.35808632522821426\n", - "Surface training t=2671, loss=0.42525237798690796\n", - "Surface training t=2672, loss=0.4377374202013016\n", - "Surface training t=2673, loss=0.4997027516365051\n", - "Surface training t=2674, loss=0.4433935284614563\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=2675, loss=0.40685024857521057\n", - "Surface training t=2676, loss=0.4492455869913101\n", - "Surface training t=2677, loss=0.45587649941444397\n", - "Surface training t=2678, loss=0.4870077818632126\n", - "Surface training t=2679, loss=0.4429570734500885\n", - "Surface training t=2680, loss=0.40543481707572937\n", - "Surface training t=2681, loss=0.4968390315771103\n", - "Surface training t=2682, loss=0.4439271092414856\n", - "Surface training t=2683, loss=0.410059317946434\n", - "Surface training t=2684, loss=0.4116511791944504\n", - "Surface training t=2685, loss=0.47885146737098694\n", - "Surface training t=2686, loss=0.4134751111268997\n", - "Surface training t=2687, loss=0.44608037173748016\n", - "Surface training t=2688, loss=0.434186726808548\n", - "Surface training t=2689, loss=0.4574678838253021\n", - "Surface training t=2690, loss=0.4294685274362564\n", - "Surface training t=2691, loss=0.5232710242271423\n", - "Surface training t=2692, loss=0.4624559134244919\n", - "Surface training t=2693, loss=0.4327257573604584\n", - "Surface training t=2694, loss=0.44765983521938324\n", - "Surface training t=2695, loss=0.41630080342292786\n", - "Surface training t=2696, loss=0.43385568261146545\n", - "Surface training t=2697, loss=0.37840044498443604\n", - "Surface training t=2698, loss=0.4577694237232208\n", - "Surface training t=2699, loss=0.4280238747596741\n", - "Surface training t=2700, loss=0.4579979479312897\n", - "Surface training t=2701, loss=0.43824203312397003\n", - "Surface training t=2702, loss=0.4271388202905655\n", - "Surface training t=2703, loss=0.44134531915187836\n", - "Surface training t=2704, loss=0.4961259514093399\n", - "Surface training t=2705, loss=0.45740118622779846\n", - "Surface training t=2706, loss=0.43609002232551575\n", - "Surface training t=2707, loss=0.44066865742206573\n", - "Surface training t=2708, loss=0.4381219297647476\n", - "Surface training t=2709, loss=0.43751390278339386\n", - "Surface training t=2710, loss=0.4582587629556656\n", - "Surface training t=2711, loss=0.38575005531311035\n", - "Surface training t=2712, loss=0.3898153603076935\n", - "Surface training t=2713, loss=0.4643641710281372\n", - "Surface training t=2714, loss=0.42262808978557587\n", - "Surface training t=2715, loss=0.43325796723365784\n", - "Surface training t=2716, loss=0.43029919266700745\n", - "Surface training t=2717, loss=0.44821402430534363\n", - "Surface training t=2718, loss=0.4148426949977875\n", - "Surface training t=2719, loss=0.46901607513427734\n", - "Surface training t=2720, loss=0.41337350010871887\n", - "Surface training t=2721, loss=0.4321979880332947\n", - "Surface training t=2722, loss=0.43596938252449036\n", - "Surface training t=2723, loss=0.4342954307794571\n", - "Surface training t=2724, loss=0.4474518299102783\n", - "Surface training t=2725, loss=0.4613483250141144\n", - "Surface training t=2726, loss=0.43699343502521515\n", - "Surface training t=2727, loss=0.41136325895786285\n", - "Surface training t=2728, loss=0.4414520114660263\n", - "Surface training t=2729, loss=0.44109226763248444\n", - "Surface training t=2730, loss=0.43062809109687805\n", - "Surface training t=2731, loss=0.4210311323404312\n", - "Surface training t=2732, loss=0.43390825390815735\n", - "Surface training t=2733, loss=0.4096633195877075\n", - "Surface training t=2734, loss=0.42255227267742157\n", - "Surface training t=2735, loss=0.38460031151771545\n", - "Surface training t=2736, loss=0.4880220592021942\n", - "Surface training t=2737, loss=0.4372742474079132\n", - "Surface training t=2738, loss=0.44589026272296906\n", - "Surface training t=2739, loss=0.46676020324230194\n", - "Surface training t=2740, loss=0.4352492541074753\n", - "Surface training t=2741, loss=0.40974539518356323\n", - "Surface training t=2742, loss=0.42337577044963837\n", - "Surface training t=2743, loss=0.4686381369829178\n", - "Surface training t=2744, loss=0.42550028860569\n", - "Surface training t=2745, loss=0.44719041883945465\n", - "Surface training t=2746, loss=0.4143693447113037\n", - "Surface training t=2747, loss=0.43588295578956604\n", - "Surface training t=2748, loss=0.4022800922393799\n", - "Surface training t=2749, loss=0.38430823385715485\n", - "Surface training t=2750, loss=0.4945787042379379\n", - "Surface training t=2751, loss=0.43626657128334045\n", - "Surface training t=2752, loss=0.4482137858867645\n", - "Surface training t=2753, loss=0.456814780831337\n", - "Surface training t=2754, loss=0.4568330943584442\n", - "Surface training t=2755, loss=0.49306926131248474\n", - "Surface training t=2756, loss=0.48817265033721924\n", - "Surface training t=2757, loss=0.43063901364803314\n", - "Surface training t=2758, loss=0.4937821924686432\n", - "Surface training t=2759, loss=0.46469929814338684\n", - "Surface training t=2760, loss=0.4288076162338257\n", - "Surface training t=2761, loss=0.4093495309352875\n", - "Surface training t=2762, loss=0.4424913823604584\n", - "Surface training t=2763, loss=0.4870847165584564\n", - "Surface training t=2764, loss=0.41144855320453644\n", - "Surface training t=2765, loss=0.43266111612319946\n", - "Surface training t=2766, loss=0.454938679933548\n", - "Surface training t=2767, loss=0.4750462621450424\n", - "Surface training t=2768, loss=0.48598727583885193\n", - "Surface training t=2769, loss=0.4288644641637802\n", - "Surface training t=2770, loss=0.44131138920783997\n", - "Surface training t=2771, loss=0.4129468649625778\n", - "Surface training t=2772, loss=0.485460564494133\n", - "Surface training t=2773, loss=0.41418930888175964\n", - "Surface training t=2774, loss=0.3986743092536926\n", - "Surface training t=2775, loss=0.41010087728500366\n", - "Surface training t=2776, loss=0.3915896564722061\n", - "Surface training t=2777, loss=0.4538984000682831\n", - "Surface training t=2778, loss=0.452263280749321\n", - "Surface training t=2779, loss=0.43318694829940796\n", - "Surface training t=2780, loss=0.3711920827627182\n", - "Surface training t=2781, loss=0.3557734042406082\n", - "Surface training t=2782, loss=0.4071514904499054\n", - "Surface training t=2783, loss=0.46036916971206665\n", - "Surface training t=2784, loss=0.439655065536499\n", - "Surface training t=2785, loss=0.36641643941402435\n", - "Surface training t=2786, loss=0.40098807215690613\n", - "Surface training t=2787, loss=0.4680723249912262\n", - "Surface training t=2788, loss=0.4471549689769745\n", - "Surface training t=2789, loss=0.45218120515346527\n", - "Surface training t=2790, loss=0.4499516487121582\n", - "Surface training t=2791, loss=0.46250583231449127\n", - "Surface training t=2792, loss=0.4847807139158249\n", - "Surface training t=2793, loss=0.5026731044054031\n", - "Surface training t=2794, loss=0.40303607285022736\n", - "Surface training t=2795, loss=0.46399061381816864\n", - "Surface training t=2796, loss=0.4402104318141937\n", - "Surface training t=2797, loss=0.4312276691198349\n", - "Surface training t=2798, loss=0.4539404511451721\n", - "Surface training t=2799, loss=0.423775315284729\n", - "Surface training t=2800, loss=0.43706484138965607\n", - "Surface training t=2801, loss=0.43663859367370605\n", - "Surface training t=2802, loss=0.46721693873405457\n", - "Surface training t=2803, loss=0.5030122995376587\n", - "Surface training t=2804, loss=0.4221787303686142\n", - "Surface training t=2805, loss=0.440659299492836\n", - "Surface training t=2806, loss=0.45385056734085083\n", - "Surface training t=2807, loss=0.48489952087402344\n", - "Surface training t=2808, loss=0.41641269624233246\n", - "Surface training t=2809, loss=0.4443153738975525\n", - "Surface training t=2810, loss=0.3833140730857849\n", - "Surface training t=2811, loss=0.4452705830335617\n", - "Surface training t=2812, loss=0.41272851824760437\n", - "Surface training t=2813, loss=0.4331370145082474\n", - "Surface training t=2814, loss=0.41153572499752045\n", - "Surface training t=2815, loss=0.42851802706718445\n", - "Surface training t=2816, loss=0.43474453687667847\n", - "Surface training t=2817, loss=0.4740803390741348\n", - "Surface training t=2818, loss=0.4021419882774353\n", - "Surface training t=2819, loss=0.4129765182733536\n", - "Surface training t=2820, loss=0.45524367690086365\n", - "Surface training t=2821, loss=0.39998874068260193\n", - "Surface training t=2822, loss=0.4531801640987396\n", - "Surface training t=2823, loss=0.4562435746192932\n", - "Surface training t=2824, loss=0.40314000844955444\n", - "Surface training t=2825, loss=0.4368685930967331\n", - "Surface training t=2826, loss=0.45514941215515137\n", - "Surface training t=2827, loss=0.4304534047842026\n", - "Surface training t=2828, loss=0.4066314101219177\n", - "Surface training t=2829, loss=0.3911987990140915\n", - "Surface training t=2830, loss=0.46972352266311646\n", - "Surface training t=2831, loss=0.4650869816541672\n", - "Surface training t=2832, loss=0.39995259046554565\n", - "Surface training t=2833, loss=0.5050683915615082\n", - "Surface training t=2834, loss=0.4082129895687103\n", - "Surface training t=2835, loss=0.44089898467063904\n", - "Surface training t=2836, loss=0.4624529629945755\n", - "Surface training t=2837, loss=0.4511287063360214\n", - "Surface training t=2838, loss=0.4644586741924286\n", - "Surface training t=2839, loss=0.44390422105789185\n", - "Surface training t=2840, loss=0.42986758053302765\n", - "Surface training t=2841, loss=0.4079495519399643\n", - "Surface training t=2842, loss=0.395296573638916\n", - "Surface training t=2843, loss=0.43780672550201416\n", - "Surface training t=2844, loss=0.4530857503414154\n", - "Surface training t=2845, loss=0.41939398646354675\n", - "Surface training t=2846, loss=0.4091823399066925\n", - "Surface training t=2847, loss=0.47213341295719147\n", - "Surface training t=2848, loss=0.39167381823062897\n", - "Surface training t=2849, loss=0.4379121959209442\n", - "Surface training t=2850, loss=0.41094522178173065\n", - "Surface training t=2851, loss=0.38709139823913574\n", - "Surface training t=2852, loss=0.41488490998744965\n", - "Surface training t=2853, loss=0.37886300683021545\n", - "Surface training t=2854, loss=0.4015091061592102\n", - "Surface training t=2855, loss=0.4179067313671112\n", - "Surface training t=2856, loss=0.4372371733188629\n", - "Surface training t=2857, loss=0.40035372972488403\n", - "Surface training t=2858, loss=0.4165664613246918\n", - "Surface training t=2859, loss=0.43324099481105804\n", - "Surface training t=2860, loss=0.43059056997299194\n", - "Surface training t=2861, loss=0.4587452858686447\n", - "Surface training t=2862, loss=0.4591493010520935\n", - "Surface training t=2863, loss=0.48545321822166443\n", - "Surface training t=2864, loss=0.4294491410255432\n", - "Surface training t=2865, loss=0.47589850425720215\n", - "Surface training t=2866, loss=0.4588399827480316\n", - "Surface training t=2867, loss=0.4292658567428589\n", - "Surface training t=2868, loss=0.4313245713710785\n", - "Surface training t=2869, loss=0.4190388023853302\n", - "Surface training t=2870, loss=0.4386388957500458\n", - "Surface training t=2871, loss=0.4163713604211807\n", - "Surface training t=2872, loss=0.4193822741508484\n", - "Surface training t=2873, loss=0.4308767765760422\n", - "Surface training t=2874, loss=0.4493454396724701\n", - "Surface training t=2875, loss=0.4765673875808716\n", - "Surface training t=2876, loss=0.48470868170261383\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=2877, loss=0.48279042541980743\n", - "Surface training t=2878, loss=0.4419693797826767\n", - "Surface training t=2879, loss=0.4657391756772995\n", - "Surface training t=2880, loss=0.43593497574329376\n", - "Surface training t=2881, loss=0.42491425573825836\n", - "Surface training t=2882, loss=0.46391403675079346\n", - "Surface training t=2883, loss=0.4265882819890976\n", - "Surface training t=2884, loss=0.4214331656694412\n", - "Surface training t=2885, loss=0.4486294090747833\n", - "Surface training t=2886, loss=0.4373985081911087\n", - "Surface training t=2887, loss=0.41185885667800903\n", - "Surface training t=2888, loss=0.39825139939785004\n", - "Surface training t=2889, loss=0.4791064113378525\n", - "Surface training t=2890, loss=0.44182732701301575\n", - "Surface training t=2891, loss=0.46555787324905396\n", - "Surface training t=2892, loss=0.44631753861904144\n", - "Surface training t=2893, loss=0.4982694685459137\n", - "Surface training t=2894, loss=0.4083549827337265\n", - "Surface training t=2895, loss=0.44999605417251587\n", - "Surface training t=2896, loss=0.4513818323612213\n", - "Surface training t=2897, loss=0.43882906436920166\n", - "Surface training t=2898, loss=0.5470249950885773\n", - "Surface training t=2899, loss=0.3974836468696594\n", - "Surface training t=2900, loss=0.3847958594560623\n", - "Surface training t=2901, loss=0.4767402559518814\n", - "Surface training t=2902, loss=0.4381906986236572\n", - "Surface training t=2903, loss=0.45652469992637634\n", - "Surface training t=2904, loss=0.42978061735630035\n", - "Surface training t=2905, loss=0.47186821699142456\n", - "Surface training t=2906, loss=0.4219720959663391\n", - "Surface training t=2907, loss=0.39311501383781433\n", - "Surface training t=2908, loss=0.4539540112018585\n", - "Surface training t=2909, loss=0.45203544199466705\n", - "Surface training t=2910, loss=0.4623064398765564\n", - "Surface training t=2911, loss=0.4796244353055954\n", - "Surface training t=2912, loss=0.39180684089660645\n", - "Surface training t=2913, loss=0.44209466874599457\n", - "Surface training t=2914, loss=0.4018833041191101\n", - "Surface training t=2915, loss=0.4571758061647415\n", - "Surface training t=2916, loss=0.4085492193698883\n", - "Surface training t=2917, loss=0.4333761930465698\n", - "Surface training t=2918, loss=0.4277759790420532\n", - "Surface training t=2919, loss=0.3903719037771225\n", - "Surface training t=2920, loss=0.43932919204235077\n", - "Surface training t=2921, loss=0.41900739073753357\n", - "Surface training t=2922, loss=0.44807909429073334\n", - "Surface training t=2923, loss=0.4468911737203598\n", - "Surface training t=2924, loss=0.4091973900794983\n", - "Surface training t=2925, loss=0.42410072684288025\n", - "Surface training t=2926, loss=0.41399116814136505\n", - "Surface training t=2927, loss=0.48580801486968994\n", - "Surface training t=2928, loss=0.4170319139957428\n", - "Surface training t=2929, loss=0.42780935764312744\n", - "Surface training t=2930, loss=0.45273457467556\n", - "Surface training t=2931, loss=0.4856119453907013\n", - "Surface training t=2932, loss=0.42046838998794556\n", - "Surface training t=2933, loss=0.41308633983135223\n", - "Surface training t=2934, loss=0.44731786847114563\n", - "Surface training t=2935, loss=0.4824207276105881\n", - "Surface training t=2936, loss=0.4252213388681412\n", - "Surface training t=2937, loss=0.43298882246017456\n", - "Surface training t=2938, loss=0.39631301164627075\n", - "Surface training t=2939, loss=0.44215549528598785\n", - "Surface training t=2940, loss=0.39704813063144684\n", - "Surface training t=2941, loss=0.4136830270290375\n", - "Surface training t=2942, loss=0.4322805255651474\n", - "Surface training t=2943, loss=0.3976449817419052\n", - "Surface training t=2944, loss=0.40882545709609985\n", - "Surface training t=2945, loss=0.42195814847946167\n", - "Surface training t=2946, loss=0.43144963681697845\n", - "Surface training t=2947, loss=0.46865232288837433\n", - "Surface training t=2948, loss=0.41064155101776123\n", - "Surface training t=2949, loss=0.4416223168373108\n", - "Surface training t=2950, loss=0.4520150423049927\n", - "Surface training t=2951, loss=0.42230501770973206\n", - "Surface training t=2952, loss=0.44138793647289276\n", - "Surface training t=2953, loss=0.4402788430452347\n", - "Surface training t=2954, loss=0.3900262862443924\n", - "Surface training t=2955, loss=0.4125775247812271\n", - "Surface training t=2956, loss=0.46475376188755035\n", - "Surface training t=2957, loss=0.4433797597885132\n", - "Surface training t=2958, loss=0.4696706533432007\n", - "Surface training t=2959, loss=0.383124515414238\n", - "Surface training t=2960, loss=0.4891268461942673\n", - "Surface training t=2961, loss=0.41780008375644684\n", - "Surface training t=2962, loss=0.42790544033050537\n", - "Surface training t=2963, loss=0.4121829867362976\n", - "Surface training t=2964, loss=0.43773478269577026\n", - "Surface training t=2965, loss=0.4333590120077133\n", - "Surface training t=2966, loss=0.4215322732925415\n", - "Surface training t=2967, loss=0.4367086738348007\n", - "Surface training t=2968, loss=0.46162885427474976\n", - "Surface training t=2969, loss=0.44147244095802307\n", - "Surface training t=2970, loss=0.38912928104400635\n", - "Surface training t=2971, loss=0.48150382936000824\n", - "Surface training t=2972, loss=0.42503561079502106\n", - "Surface training t=2973, loss=0.41733649373054504\n", - "Surface training t=2974, loss=0.4517136663198471\n", - "Surface training t=2975, loss=0.4507593363523483\n", - "Surface training t=2976, loss=0.46554602682590485\n", - "Surface training t=2977, loss=0.4412979185581207\n", - "Surface training t=2978, loss=0.4589996784925461\n", - "Surface training t=2979, loss=0.4222857654094696\n", - "Surface training t=2980, loss=0.44328197836875916\n", - "Surface training t=2981, loss=0.44112372398376465\n", - "Surface training t=2982, loss=0.4163595288991928\n", - "Surface training t=2983, loss=0.4440448135137558\n", - "Surface training t=2984, loss=0.3938806802034378\n", - "Surface training t=2985, loss=0.41211169958114624\n", - "Surface training t=2986, loss=0.41306236386299133\n", - "Surface training t=2987, loss=0.43033671379089355\n", - "Surface training t=2988, loss=0.4553738087415695\n", - "Surface training t=2989, loss=0.4000406116247177\n", - "Surface training t=2990, loss=0.4152120351791382\n", - "Surface training t=2991, loss=0.4259119927883148\n", - "Surface training t=2992, loss=0.4189242571592331\n", - "Surface training t=2993, loss=0.45908282697200775\n", - "Surface training t=2994, loss=0.3944312632083893\n", - "Surface training t=2995, loss=0.39340756833553314\n", - "Surface training t=2996, loss=0.43473368883132935\n", - "Surface training t=2997, loss=0.4026937186717987\n", - "Surface training t=2998, loss=0.4226559102535248\n", - "Surface training t=2999, loss=0.4158366173505783\n", - "Surface training t=3000, loss=0.4257752150297165\n", - "Surface training t=3001, loss=0.4487629234790802\n", - "Surface training t=3002, loss=0.4120959937572479\n", - "Surface training t=3003, loss=0.42722684144973755\n", - "Surface training t=3004, loss=0.4137829393148422\n", - "Surface training t=3005, loss=0.4317459762096405\n", - "Surface training t=3006, loss=0.4227207601070404\n", - "Surface training t=3007, loss=0.47735731303691864\n", - "Surface training t=3008, loss=0.4075806140899658\n", - "Surface training t=3009, loss=0.43061865866184235\n", - "Surface training t=3010, loss=0.4234689325094223\n", - "Surface training t=3011, loss=0.4305606633424759\n", - "Surface training t=3012, loss=0.46495941281318665\n", - "Surface training t=3013, loss=0.4692373722791672\n", - "Surface training t=3014, loss=0.40350715816020966\n", - "Surface training t=3015, loss=0.45261669158935547\n", - "Surface training t=3016, loss=0.4523047208786011\n", - "Surface training t=3017, loss=0.5275103449821472\n", - "Surface training t=3018, loss=0.41067303717136383\n", - "Surface training t=3019, loss=0.38681773841381073\n", - "Surface training t=3020, loss=0.40507353842258453\n", - "Surface training t=3021, loss=0.4252922087907791\n", - "Surface training t=3022, loss=0.43994593620300293\n", - "Surface training t=3023, loss=0.45785993337631226\n", - "Surface training t=3024, loss=0.38979779183864594\n", - "Surface training t=3025, loss=0.4249631315469742\n", - "Surface training t=3026, loss=0.41491614282131195\n", - "Surface training t=3027, loss=0.46236538887023926\n", - "Surface training t=3028, loss=0.4257398396730423\n", - "Surface training t=3029, loss=0.4253612160682678\n", - "Surface training t=3030, loss=0.41722263395786285\n", - "Surface training t=3031, loss=0.4051024913787842\n", - "Surface training t=3032, loss=0.4293500930070877\n", - "Surface training t=3033, loss=0.4662810266017914\n", - "Surface training t=3034, loss=0.39821311831474304\n", - "Surface training t=3035, loss=0.409147247672081\n", - "Surface training t=3036, loss=0.34060972183942795\n", - "Surface training t=3037, loss=0.38842932879924774\n", - "Surface training t=3038, loss=0.4578295797109604\n", - "Surface training t=3039, loss=0.42650045454502106\n", - "Surface training t=3040, loss=0.4241417795419693\n", - "Surface training t=3041, loss=0.39116841554641724\n", - "Surface training t=3042, loss=0.4140794724225998\n", - "Surface training t=3043, loss=0.4338454604148865\n", - "Surface training t=3044, loss=0.4821525663137436\n", - "Surface training t=3045, loss=0.4496123194694519\n", - "Surface training t=3046, loss=0.43700945377349854\n", - "Surface training t=3047, loss=0.4222859889268875\n", - "Surface training t=3048, loss=0.41856588423252106\n", - "Surface training t=3049, loss=0.4236930012702942\n", - "Surface training t=3050, loss=0.41859646141529083\n", - "Surface training t=3051, loss=0.42180676758289337\n", - "Surface training t=3052, loss=0.47377678751945496\n", - "Surface training t=3053, loss=0.42509497702121735\n", - "Surface training t=3054, loss=0.4445233643054962\n", - "Surface training t=3055, loss=0.4388369619846344\n", - "Surface training t=3056, loss=0.44670458137989044\n", - "Surface training t=3057, loss=0.44038885831832886\n", - "Surface training t=3058, loss=0.42377613484859467\n", - "Surface training t=3059, loss=0.44731663167476654\n", - "Surface training t=3060, loss=0.42074504494667053\n", - "Surface training t=3061, loss=0.47520725429058075\n", - "Surface training t=3062, loss=0.4021306037902832\n", - "Surface training t=3063, loss=0.4277746081352234\n", - "Surface training t=3064, loss=0.405879482626915\n", - "Surface training t=3065, loss=0.40522925555706024\n", - "Surface training t=3066, loss=0.414023220539093\n", - "Surface training t=3067, loss=0.4301135987043381\n", - "Surface training t=3068, loss=0.42443162202835083\n", - "Surface training t=3069, loss=0.3900146484375\n", - "Surface training t=3070, loss=0.4106752723455429\n", - "Surface training t=3071, loss=0.4002106934785843\n", - "Surface training t=3072, loss=0.4334873706102371\n", - "Surface training t=3073, loss=0.37594833970069885\n", - "Surface training t=3074, loss=0.43328922986984253\n", - "Surface training t=3075, loss=0.41034993529319763\n", - "Surface training t=3076, loss=0.4434642940759659\n", - "Surface training t=3077, loss=0.41963285207748413\n", - "Surface training t=3078, loss=0.3875427544116974\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=3079, loss=0.42405180633068085\n", - "Surface training t=3080, loss=0.4115127772092819\n", - "Surface training t=3081, loss=0.38243745267391205\n", - "Surface training t=3082, loss=0.44410568475723267\n", - "Surface training t=3083, loss=0.42696329951286316\n", - "Surface training t=3084, loss=0.4374200403690338\n", - "Surface training t=3085, loss=0.43699538707733154\n", - "Surface training t=3086, loss=0.38679561018943787\n", - "Surface training t=3087, loss=0.47015997767448425\n", - "Surface training t=3088, loss=0.43570704758167267\n", - "Surface training t=3089, loss=0.42498932778835297\n", - "Surface training t=3090, loss=0.4007487893104553\n", - "Surface training t=3091, loss=0.4019118845462799\n", - "Surface training t=3092, loss=0.44524966180324554\n", - "Surface training t=3093, loss=0.384991392493248\n", - "Surface training t=3094, loss=0.4535289704799652\n", - "Surface training t=3095, loss=0.4595429003238678\n", - "Surface training t=3096, loss=0.3837016224861145\n", - "Surface training t=3097, loss=0.43622009456157684\n", - "Surface training t=3098, loss=0.41728827357292175\n", - "Surface training t=3099, loss=0.4232490211725235\n", - "Surface training t=3100, loss=0.47745630145072937\n", - "Surface training t=3101, loss=0.402076855301857\n", - "Surface training t=3102, loss=0.40393778681755066\n", - "Surface training t=3103, loss=0.3862992227077484\n", - "Surface training t=3104, loss=0.35673126578330994\n", - "Surface training t=3105, loss=0.3821159601211548\n", - "Surface training t=3106, loss=0.4362305551767349\n", - "Surface training t=3107, loss=0.4200492650270462\n", - "Surface training t=3108, loss=0.4077787697315216\n", - "Surface training t=3109, loss=0.4627789705991745\n", - "Surface training t=3110, loss=0.45680108666419983\n", - "Surface training t=3111, loss=0.4057403951883316\n", - "Surface training t=3112, loss=0.4563547670841217\n", - "Surface training t=3113, loss=0.4499344676733017\n", - "Surface training t=3114, loss=0.4691333919763565\n", - "Surface training t=3115, loss=0.47503596544265747\n", - "Surface training t=3116, loss=0.44493669271469116\n", - "Surface training t=3117, loss=0.4208315461874008\n", - "Surface training t=3118, loss=0.38898028433322906\n", - "Surface training t=3119, loss=0.4181153476238251\n", - "Surface training t=3120, loss=0.4512633830308914\n", - "Surface training t=3121, loss=0.4047412723302841\n", - "Surface training t=3122, loss=0.46916893124580383\n", - "Surface training t=3123, loss=0.403327539563179\n", - "Surface training t=3124, loss=0.4125575125217438\n", - "Surface training t=3125, loss=0.4412606805562973\n", - "Surface training t=3126, loss=0.47967350482940674\n", - "Surface training t=3127, loss=0.42566530406475067\n", - "Surface training t=3128, loss=0.4993511587381363\n", - "Surface training t=3129, loss=0.46613578498363495\n", - "Surface training t=3130, loss=0.404192715883255\n", - "Surface training t=3131, loss=0.4128665030002594\n", - "Surface training t=3132, loss=0.42738108336925507\n", - "Surface training t=3133, loss=0.420260950922966\n", - "Surface training t=3134, loss=0.47838757932186127\n", - "Surface training t=3135, loss=0.4852907359600067\n", - "Surface training t=3136, loss=0.4357563853263855\n", - "Surface training t=3137, loss=0.41786515712738037\n", - "Surface training t=3138, loss=0.39082206785678864\n", - "Surface training t=3139, loss=0.38341017067432404\n", - "Surface training t=3140, loss=0.3834252655506134\n", - "Surface training t=3141, loss=0.442009761929512\n", - "Surface training t=3142, loss=0.42867235839366913\n", - "Surface training t=3143, loss=0.3961540460586548\n", - "Surface training t=3144, loss=0.40945257246494293\n", - "Surface training t=3145, loss=0.41623713076114655\n", - "Surface training t=3146, loss=0.4248872846364975\n", - "Surface training t=3147, loss=0.38912995159626007\n", - "Surface training t=3148, loss=0.4346236288547516\n", - "Surface training t=3149, loss=0.40439572930336\n", - "Surface training t=3150, loss=0.4149267077445984\n", - "Surface training t=3151, loss=0.4142120033502579\n", - "Surface training t=3152, loss=0.42030733823776245\n", - "Surface training t=3153, loss=0.4455641508102417\n", - "Surface training t=3154, loss=0.4644536226987839\n", - "Surface training t=3155, loss=0.4633050858974457\n", - "Surface training t=3156, loss=0.4250975251197815\n", - "Surface training t=3157, loss=0.4541427195072174\n", - "Surface training t=3158, loss=0.4173954278230667\n", - "Surface training t=3159, loss=0.40836700797080994\n", - "Surface training t=3160, loss=0.41319403052330017\n", - "Surface training t=3161, loss=0.36760249733924866\n", - "Surface training t=3162, loss=0.35758906602859497\n", - "Surface training t=3163, loss=0.39760488271713257\n", - "Surface training t=3164, loss=0.464606374502182\n", - "Surface training t=3165, loss=0.47825996577739716\n", - "Surface training t=3166, loss=0.46544161438941956\n", - "Surface training t=3167, loss=0.415699765086174\n", - "Surface training t=3168, loss=0.4204069525003433\n", - "Surface training t=3169, loss=0.4179016202688217\n", - "Surface training t=3170, loss=0.4139139950275421\n", - "Surface training t=3171, loss=0.41100597381591797\n", - "Surface training t=3172, loss=0.4148916155099869\n", - "Surface training t=3173, loss=0.4039037525653839\n", - "Surface training t=3174, loss=0.43093761801719666\n", - "Surface training t=3175, loss=0.4638066440820694\n", - "Surface training t=3176, loss=0.4394580274820328\n", - "Surface training t=3177, loss=0.4184209704399109\n", - "Surface training t=3178, loss=0.41274209320545197\n", - "Surface training t=3179, loss=0.4291650354862213\n", - "Surface training t=3180, loss=0.39597417414188385\n", - "Surface training t=3181, loss=0.4341096580028534\n", - "Surface training t=3182, loss=0.4592021405696869\n", - "Surface training t=3183, loss=0.44444236159324646\n", - "Surface training t=3184, loss=0.4557925909757614\n", - "Surface training t=3185, loss=0.39996324479579926\n", - "Surface training t=3186, loss=0.40280982851982117\n", - "Surface training t=3187, loss=0.44585856795310974\n", - "Surface training t=3188, loss=0.4090643525123596\n", - "Surface training t=3189, loss=0.4587113708257675\n", - "Surface training t=3190, loss=0.4244159460067749\n", - "Surface training t=3191, loss=0.41003669798374176\n", - "Surface training t=3192, loss=0.405429944396019\n", - "Surface training t=3193, loss=0.48098529875278473\n", - "Surface training t=3194, loss=0.4395507127046585\n", - "Surface training t=3195, loss=0.4884573519229889\n", - "Surface training t=3196, loss=0.4242125600576401\n", - "Surface training t=3197, loss=0.4268989861011505\n", - "Surface training t=3198, loss=0.43862855434417725\n", - "Surface training t=3199, loss=0.3994443714618683\n", - "Surface training t=3200, loss=0.3952530026435852\n", - "Surface training t=3201, loss=0.41303113102912903\n", - "Surface training t=3202, loss=0.4628641605377197\n", - "Surface training t=3203, loss=0.43347376585006714\n", - "Surface training t=3204, loss=0.41403360664844513\n", - "Surface training t=3205, loss=0.43005944788455963\n", - "Surface training t=3206, loss=0.44011153280735016\n", - "Surface training t=3207, loss=0.4558145999908447\n", - "Surface training t=3208, loss=0.3963277339935303\n", - "Surface training t=3209, loss=0.4377833753824234\n", - "Surface training t=3210, loss=0.45511654019355774\n", - "Surface training t=3211, loss=0.4499349296092987\n", - "Surface training t=3212, loss=0.4437277764081955\n", - "Surface training t=3213, loss=0.4278107285499573\n", - "Surface training t=3214, loss=0.4203846901655197\n", - "Surface training t=3215, loss=0.3991104066371918\n", - "Surface training t=3216, loss=0.3945137709379196\n", - "Surface training t=3217, loss=0.4327003210783005\n", - "Surface training t=3218, loss=0.39594072103500366\n", - "Surface training t=3219, loss=0.38712744414806366\n", - "Surface training t=3220, loss=0.44501306116580963\n", - "Surface training t=3221, loss=0.44530247151851654\n", - "Surface training t=3222, loss=0.4397497773170471\n", - "Surface training t=3223, loss=0.4240996986627579\n", - "Surface training t=3224, loss=0.42774930596351624\n", - "Surface training t=3225, loss=0.3731255382299423\n", - "Surface training t=3226, loss=0.4063316583633423\n", - "Surface training t=3227, loss=0.43364351987838745\n", - "Surface training t=3228, loss=0.41390232741832733\n", - "Surface training t=3229, loss=0.3759287893772125\n", - "Surface training t=3230, loss=0.40265414118766785\n", - "Surface training t=3231, loss=0.44371160864830017\n", - "Surface training t=3232, loss=0.4379241615533829\n", - "Surface training t=3233, loss=0.4109162390232086\n", - "Surface training t=3234, loss=0.3929404765367508\n", - "Surface training t=3235, loss=0.40117867290973663\n", - "Surface training t=3236, loss=0.4440494477748871\n", - "Surface training t=3237, loss=0.44560113549232483\n", - "Surface training t=3238, loss=0.4274427592754364\n", - "Surface training t=3239, loss=0.4110921323299408\n", - "Surface training t=3240, loss=0.3873293697834015\n", - "Surface training t=3241, loss=0.39865221083164215\n", - "Surface training t=3242, loss=0.4333992898464203\n", - "Surface training t=3243, loss=0.3800322711467743\n", - "Surface training t=3244, loss=0.41463547945022583\n", - "Surface training t=3245, loss=0.4401187598705292\n", - "Surface training t=3246, loss=0.4133055806159973\n", - "Surface training t=3247, loss=0.4680447578430176\n", - "Surface training t=3248, loss=0.4170420169830322\n", - "Surface training t=3249, loss=0.4161995202302933\n", - "Surface training t=3250, loss=0.45430268347263336\n", - "Surface training t=3251, loss=0.42310018837451935\n", - "Surface training t=3252, loss=0.39951369166374207\n", - "Surface training t=3253, loss=0.4028497338294983\n", - "Surface training t=3254, loss=0.3816314935684204\n", - "Surface training t=3255, loss=0.4565126448869705\n", - "Surface training t=3256, loss=0.371734082698822\n", - "Surface training t=3257, loss=0.47353535890579224\n", - "Surface training t=3258, loss=0.4408200681209564\n", - "Surface training t=3259, loss=0.4112176150083542\n", - "Surface training t=3260, loss=0.4435349106788635\n", - "Surface training t=3261, loss=0.39620743691921234\n", - "Surface training t=3262, loss=0.43613874912261963\n", - "Surface training t=3263, loss=0.38724492490291595\n", - "Surface training t=3264, loss=0.4303742051124573\n", - "Surface training t=3265, loss=0.4217597544193268\n", - "Surface training t=3266, loss=0.39590267837047577\n", - "Surface training t=3267, loss=0.4103217124938965\n", - "Surface training t=3268, loss=0.4542650878429413\n", - "Surface training t=3269, loss=0.475455641746521\n", - "Surface training t=3270, loss=0.46213656663894653\n", - "Surface training t=3271, loss=0.4284319132566452\n", - "Surface training t=3272, loss=0.41567739844322205\n", - "Surface training t=3273, loss=0.44131216406822205\n", - "Surface training t=3274, loss=0.4142257571220398\n", - "Surface training t=3275, loss=0.412133052945137\n", - "Surface training t=3276, loss=0.4210359901189804\n", - "Surface training t=3277, loss=0.39336083829402924\n", - "Surface training t=3278, loss=0.44184769690036774\n", - "Surface training t=3279, loss=0.48124559223651886\n", - "Surface training t=3280, loss=0.48076023161411285\n", - "Surface training t=3281, loss=0.3965475857257843\n", - "Surface training t=3282, loss=0.4122270792722702\n", - "Surface training t=3283, loss=0.43998320400714874\n", - "Surface training t=3284, loss=0.38954153656959534\n", - "Surface training t=3285, loss=0.4725320190191269\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=3286, loss=0.4345614016056061\n", - "Surface training t=3287, loss=0.4541679620742798\n", - "Surface training t=3288, loss=0.4129186272621155\n", - "Surface training t=3289, loss=0.38835692405700684\n", - "Surface training t=3290, loss=0.438097208738327\n", - "Surface training t=3291, loss=0.46631211042404175\n", - "Surface training t=3292, loss=0.4509003013372421\n", - "Surface training t=3293, loss=0.4177183210849762\n", - "Surface training t=3294, loss=0.43794526159763336\n", - "Surface training t=3295, loss=0.3999045193195343\n", - "Surface training t=3296, loss=0.46429985761642456\n", - "Surface training t=3297, loss=0.4278258979320526\n", - "Surface training t=3298, loss=0.4046638607978821\n", - "Surface training t=3299, loss=0.3725349009037018\n", - "Surface training t=3300, loss=0.4927196353673935\n", - "Surface training t=3301, loss=0.4291251301765442\n", - "Surface training t=3302, loss=0.40412183105945587\n", - "Surface training t=3303, loss=0.442422479391098\n", - "Surface training t=3304, loss=0.38035283982753754\n", - "Surface training t=3305, loss=0.3921457976102829\n", - "Surface training t=3306, loss=0.3856400102376938\n", - "Surface training t=3307, loss=0.4049958735704422\n", - "Surface training t=3308, loss=0.404806986451149\n", - "Surface training t=3309, loss=0.42432597279548645\n", - "Surface training t=3310, loss=0.40110306441783905\n", - "Surface training t=3311, loss=0.39588314294815063\n", - "Surface training t=3312, loss=0.40248630940914154\n", - "Surface training t=3313, loss=0.4015888571739197\n", - "Surface training t=3314, loss=0.42679552733898163\n", - "Surface training t=3315, loss=0.4589639902114868\n", - "Surface training t=3316, loss=0.40525127947330475\n", - "Surface training t=3317, loss=0.35535623133182526\n", - "Surface training t=3318, loss=0.43509942293167114\n", - "Surface training t=3319, loss=0.42455819249153137\n", - "Surface training t=3320, loss=0.45881515741348267\n", - "Surface training t=3321, loss=0.4446222633123398\n", - "Surface training t=3322, loss=0.3546772301197052\n", - "Surface training t=3323, loss=0.40651844441890717\n", - "Surface training t=3324, loss=0.41842903196811676\n", - "Surface training t=3325, loss=0.44042791426181793\n", - "Surface training t=3326, loss=0.41568732261657715\n", - "Surface training t=3327, loss=0.3913002014160156\n", - "Surface training t=3328, loss=0.36852042376995087\n", - "Surface training t=3329, loss=0.4360719919204712\n", - "Surface training t=3330, loss=0.4449445754289627\n", - "Surface training t=3331, loss=0.4554610550403595\n", - "Surface training t=3332, loss=0.43203431367874146\n", - "Surface training t=3333, loss=0.4426300525665283\n", - "Surface training t=3334, loss=0.37652355432510376\n", - "Surface training t=3335, loss=0.3975777328014374\n", - "Surface training t=3336, loss=0.39875347912311554\n", - "Surface training t=3337, loss=0.4361858367919922\n", - "Surface training t=3338, loss=0.4183109700679779\n", - "Surface training t=3339, loss=0.40269726514816284\n", - "Surface training t=3340, loss=0.40350355207920074\n", - "Surface training t=3341, loss=0.4126443862915039\n", - "Surface training t=3342, loss=0.3977590650320053\n", - "Surface training t=3343, loss=0.4823860228061676\n", - "Surface training t=3344, loss=0.3913048803806305\n", - "Surface training t=3345, loss=0.3986761122941971\n", - "Surface training t=3346, loss=0.37846551835536957\n", - "Surface training t=3347, loss=0.39199957251548767\n", - "Surface training t=3348, loss=0.40696610510349274\n", - "Surface training t=3349, loss=0.39657261967658997\n", - "Surface training t=3350, loss=0.40715526044368744\n", - "Surface training t=3351, loss=0.428027018904686\n", - "Surface training t=3352, loss=0.42289939522743225\n", - "Surface training t=3353, loss=0.44746944308280945\n", - "Surface training t=3354, loss=0.410013347864151\n", - "Surface training t=3355, loss=0.47983427345752716\n", - "Surface training t=3356, loss=0.41429226100444794\n", - "Surface training t=3357, loss=0.47210563719272614\n", - "Surface training t=3358, loss=0.3902500122785568\n", - "Surface training t=3359, loss=0.4169016629457474\n", - "Surface training t=3360, loss=0.41274620592594147\n", - "Surface training t=3361, loss=0.4441736191511154\n", - "Surface training t=3362, loss=0.44344595074653625\n", - "Surface training t=3363, loss=0.42229776084423065\n", - "Surface training t=3364, loss=0.4767402857542038\n", - "Surface training t=3365, loss=0.4086155295372009\n", - "Surface training t=3366, loss=0.4214150607585907\n", - "Surface training t=3367, loss=0.37775859236717224\n", - "Surface training t=3368, loss=0.38678233325481415\n", - "Surface training t=3369, loss=0.44574378430843353\n", - "Surface training t=3370, loss=0.4317063093185425\n", - "Surface training t=3371, loss=0.47160233557224274\n", - "Surface training t=3372, loss=0.384520560503006\n", - "Surface training t=3373, loss=0.40879739820957184\n", - "Surface training t=3374, loss=0.4180970638990402\n", - "Surface training t=3375, loss=0.3953411877155304\n", - "Surface training t=3376, loss=0.4181976318359375\n", - "Surface training t=3377, loss=0.42614389955997467\n", - "Surface training t=3378, loss=0.3582979440689087\n", - "Surface training t=3379, loss=0.4181254655122757\n", - "Surface training t=3380, loss=0.4176379591226578\n", - "Surface training t=3381, loss=0.489976242184639\n", - "Surface training t=3382, loss=0.4390326291322708\n", - "Surface training t=3383, loss=0.4540012627840042\n", - "Surface training t=3384, loss=0.40015582740306854\n", - "Surface training t=3385, loss=0.4513511657714844\n", - "Surface training t=3386, loss=0.41026148200035095\n", - "Surface training t=3387, loss=0.3832870125770569\n", - "Surface training t=3388, loss=0.3481913357973099\n", - "Surface training t=3389, loss=0.43173372745513916\n", - "Surface training t=3390, loss=0.3876558989286423\n", - "Surface training t=3391, loss=0.4194665551185608\n", - "Surface training t=3392, loss=0.43021225929260254\n", - "Surface training t=3393, loss=0.4327698200941086\n", - "Surface training t=3394, loss=0.47087036073207855\n", - "Surface training t=3395, loss=0.3727024793624878\n", - "Surface training t=3396, loss=0.3747462034225464\n", - "Surface training t=3397, loss=0.402268648147583\n", - "Surface training t=3398, loss=0.4529234766960144\n", - "Surface training t=3399, loss=0.38070589303970337\n", - "Surface training t=3400, loss=0.38637030124664307\n", - "Surface training t=3401, loss=0.40969444811344147\n", - "Surface training t=3402, loss=0.47006362676620483\n", - "Surface training t=3403, loss=0.4299580454826355\n", - "Surface training t=3404, loss=0.3804013431072235\n", - "Surface training t=3405, loss=0.47156165540218353\n", - "Surface training t=3406, loss=0.4153963774442673\n", - "Surface training t=3407, loss=0.3922395408153534\n", - "Surface training t=3408, loss=0.41879257559776306\n", - "Surface training t=3409, loss=0.4634588658809662\n", - "Surface training t=3410, loss=0.45916566252708435\n", - "Surface training t=3411, loss=0.4529654383659363\n", - "Surface training t=3412, loss=0.3767513334751129\n", - "Surface training t=3413, loss=0.42404963076114655\n", - "Surface training t=3414, loss=0.4290003031492233\n", - "Surface training t=3415, loss=0.4055783897638321\n", - "Surface training t=3416, loss=0.40047702193260193\n", - "Surface training t=3417, loss=0.3991536498069763\n", - "Surface training t=3418, loss=0.41697797179222107\n", - "Surface training t=3419, loss=0.45547623932361603\n", - "Surface training t=3420, loss=0.4126613438129425\n", - "Surface training t=3421, loss=0.4204474091529846\n", - "Surface training t=3422, loss=0.37089912593364716\n", - "Surface training t=3423, loss=0.4388750344514847\n", - "Surface training t=3424, loss=0.4635936766862869\n", - "Surface training t=3425, loss=0.4281909465789795\n", - "Surface training t=3426, loss=0.41473473608493805\n", - "Surface training t=3427, loss=0.36460499465465546\n", - "Surface training t=3428, loss=0.41033151745796204\n", - "Surface training t=3429, loss=0.42432044446468353\n", - "Surface training t=3430, loss=0.37643320858478546\n", - "Surface training t=3431, loss=0.42885032296180725\n", - "Surface training t=3432, loss=0.4407549798488617\n", - "Surface training t=3433, loss=0.39431989192962646\n", - "Surface training t=3434, loss=0.3922864943742752\n", - "Surface training t=3435, loss=0.37985509634017944\n", - "Surface training t=3436, loss=0.4217343330383301\n", - "Surface training t=3437, loss=0.35271086543798447\n", - "Surface training t=3438, loss=0.4206223338842392\n", - "Surface training t=3439, loss=0.3891605883836746\n", - "Surface training t=3440, loss=0.46697182953357697\n", - "Surface training t=3441, loss=0.3821747303009033\n", - "Surface training t=3442, loss=0.4010210931301117\n", - "Surface training t=3443, loss=0.40310119092464447\n", - "Surface training t=3444, loss=0.4492611885070801\n", - "Surface training t=3445, loss=0.40325993299484253\n", - "Surface training t=3446, loss=0.4154094010591507\n", - "Surface training t=3447, loss=0.42895571887493134\n", - "Surface training t=3448, loss=0.4383080452680588\n", - "Surface training t=3449, loss=0.4515818953514099\n", - "Surface training t=3450, loss=0.45191119611263275\n", - "Surface training t=3451, loss=0.4089052081108093\n", - "Surface training t=3452, loss=0.4302482157945633\n", - "Surface training t=3453, loss=0.3445961996912956\n", - "Surface training t=3454, loss=0.41235680878162384\n", - "Surface training t=3455, loss=0.42167967557907104\n", - "Surface training t=3456, loss=0.4785168021917343\n", - "Surface training t=3457, loss=0.353370726108551\n", - "Surface training t=3458, loss=0.4310981184244156\n", - "Surface training t=3459, loss=0.3550240993499756\n", - "Surface training t=3460, loss=0.42528408765792847\n", - "Surface training t=3461, loss=0.41396112740039825\n", - "Surface training t=3462, loss=0.39927075803279877\n", - "Surface training t=3463, loss=0.4275428056716919\n", - "Surface training t=3464, loss=0.40710215270519257\n", - "Surface training t=3465, loss=0.388123020529747\n", - "Surface training t=3466, loss=0.3824656307697296\n", - "Surface training t=3467, loss=0.46506139636039734\n", - "Surface training t=3468, loss=0.4113391935825348\n", - "Surface training t=3469, loss=0.4052019715309143\n", - "Surface training t=3470, loss=0.3628711700439453\n", - "Surface training t=3471, loss=0.4125394821166992\n", - "Surface training t=3472, loss=0.3728106915950775\n", - "Surface training t=3473, loss=0.3740396499633789\n", - "Surface training t=3474, loss=0.42864713072776794\n", - "Surface training t=3475, loss=0.41626840829849243\n", - "Surface training t=3476, loss=0.4209483116865158\n", - "Surface training t=3477, loss=0.3875781297683716\n", - "Surface training t=3478, loss=0.418461412191391\n", - "Surface training t=3479, loss=0.4325912743806839\n", - "Surface training t=3480, loss=0.413703978061676\n", - "Surface training t=3481, loss=0.4349515736103058\n", - "Surface training t=3482, loss=0.4020168036222458\n", - "Surface training t=3483, loss=0.3901880383491516\n", - "Surface training t=3484, loss=0.402623251080513\n", - "Surface training t=3485, loss=0.3600553572177887\n", - "Surface training t=3486, loss=0.38034312427043915\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=3487, loss=0.40228308737277985\n", - "Surface training t=3488, loss=0.41720297932624817\n", - "Surface training t=3489, loss=0.3733544796705246\n", - "Surface training t=3490, loss=0.3727937191724777\n", - "Surface training t=3491, loss=0.4424431622028351\n", - "Surface training t=3492, loss=0.40957750380039215\n", - "Surface training t=3493, loss=0.4078883230686188\n", - "Surface training t=3494, loss=0.40270504355430603\n", - "Surface training t=3495, loss=0.41943252086639404\n", - "Surface training t=3496, loss=0.3980149030685425\n", - "Surface training t=3497, loss=0.47766514122486115\n", - "Surface training t=3498, loss=0.39800719916820526\n", - "Surface training t=3499, loss=0.38983073830604553\n", - "Surface training t=3500, loss=0.4042108952999115\n", - "Surface training t=3501, loss=0.4414053410291672\n", - "Surface training t=3502, loss=0.41442902386188507\n", - "Surface training t=3503, loss=0.3841669261455536\n", - "Surface training t=3504, loss=0.3712655305862427\n", - "Surface training t=3505, loss=0.42662693560123444\n", - "Surface training t=3506, loss=0.40665242075920105\n", - "Surface training t=3507, loss=0.38735489547252655\n", - "Surface training t=3508, loss=0.4316225200891495\n", - "Surface training t=3509, loss=0.4260714501142502\n", - "Surface training t=3510, loss=0.41587962210178375\n", - "Surface training t=3511, loss=0.383186936378479\n", - "Surface training t=3512, loss=0.39787523448467255\n", - "Surface training t=3513, loss=0.37066029012203217\n", - "Surface training t=3514, loss=0.3925458937883377\n", - "Surface training t=3515, loss=0.40268294513225555\n", - "Surface training t=3516, loss=0.464722216129303\n", - "Surface training t=3517, loss=0.48489683866500854\n", - "Surface training t=3518, loss=0.3728925734758377\n", - "Surface training t=3519, loss=0.3834977298974991\n", - "Surface training t=3520, loss=0.3760637938976288\n", - "Surface training t=3521, loss=0.4318679869174957\n", - "Surface training t=3522, loss=0.43902595341205597\n", - "Surface training t=3523, loss=0.38303452730178833\n", - "Surface training t=3524, loss=0.3897535055875778\n", - "Surface training t=3525, loss=0.4085176885128021\n", - "Surface training t=3526, loss=0.4260907769203186\n", - "Surface training t=3527, loss=0.4313267171382904\n", - "Surface training t=3528, loss=0.4623508006334305\n", - "Surface training t=3529, loss=0.4090587943792343\n", - "Surface training t=3530, loss=0.37363460659980774\n", - "Surface training t=3531, loss=0.39021769165992737\n", - "Surface training t=3532, loss=0.3894852101802826\n", - "Surface training t=3533, loss=0.4444199353456497\n", - "Surface training t=3534, loss=0.34432409703731537\n", - "Surface training t=3535, loss=0.39843323826789856\n", - "Surface training t=3536, loss=0.41804929077625275\n", - "Surface training t=3537, loss=0.41934868693351746\n", - "Surface training t=3538, loss=0.5058681815862656\n", - "Surface training t=3539, loss=0.4162169247865677\n", - "Surface training t=3540, loss=0.351846843957901\n", - "Surface training t=3541, loss=0.402733251452446\n", - "Surface training t=3542, loss=0.36502134799957275\n", - "Surface training t=3543, loss=0.4021197259426117\n", - "Surface training t=3544, loss=0.45105360448360443\n", - "Surface training t=3545, loss=0.40385107696056366\n", - "Surface training t=3546, loss=0.45740818977355957\n", - "Surface training t=3547, loss=0.3539073318243027\n", - "Surface training t=3548, loss=0.3780725598335266\n", - "Surface training t=3549, loss=0.4138365834951401\n", - "Surface training t=3550, loss=0.41162411868572235\n", - "Surface training t=3551, loss=0.43180420994758606\n", - "Surface training t=3552, loss=0.4069443792104721\n", - "Surface training t=3553, loss=0.43183809518814087\n", - "Surface training t=3554, loss=0.38648684322834015\n", - "Surface training t=3555, loss=0.41699954867362976\n", - "Surface training t=3556, loss=0.4283899962902069\n", - "Surface training t=3557, loss=0.3937527984380722\n", - "Surface training t=3558, loss=0.3995814621448517\n", - "Surface training t=3559, loss=0.39965076744556427\n", - "Surface training t=3560, loss=0.37720775604248047\n", - "Surface training t=3561, loss=0.42534518241882324\n", - "Surface training t=3562, loss=0.43313464522361755\n", - "Surface training t=3563, loss=0.41597096621990204\n", - "Surface training t=3564, loss=0.38163919746875763\n", - "Surface training t=3565, loss=0.44182737171649933\n", - "Surface training t=3566, loss=0.41487841308116913\n", - "Surface training t=3567, loss=0.41729454696178436\n", - "Surface training t=3568, loss=0.3578677624464035\n", - "Surface training t=3569, loss=0.4331076145172119\n", - "Surface training t=3570, loss=0.3851621448993683\n", - "Surface training t=3571, loss=0.39203034341335297\n", - "Surface training t=3572, loss=0.39710159599781036\n", - "Surface training t=3573, loss=0.4316404312849045\n", - "Surface training t=3574, loss=0.386929526925087\n", - "Surface training t=3575, loss=0.45597583055496216\n", - "Surface training t=3576, loss=0.3455219268798828\n", - "Surface training t=3577, loss=0.4165022224187851\n", - "Surface training t=3578, loss=0.38123832643032074\n", - "Surface training t=3579, loss=0.40466785430908203\n", - "Surface training t=3580, loss=0.39123933017253876\n", - "Surface training t=3581, loss=0.3850105553865433\n", - "Surface training t=3582, loss=0.41390059888362885\n", - "Surface training t=3583, loss=0.378118634223938\n", - "Surface training t=3584, loss=0.4235777407884598\n", - "Surface training t=3585, loss=0.41543737053871155\n", - "Surface training t=3586, loss=0.4015570729970932\n", - "Surface training t=3587, loss=0.35980215668678284\n", - "Surface training t=3588, loss=0.40254826843738556\n", - "Surface training t=3589, loss=0.4081897586584091\n", - "Surface training t=3590, loss=0.40995945036411285\n", - "Surface training t=3591, loss=0.3896564394235611\n", - "Surface training t=3592, loss=0.4022190272808075\n", - "Surface training t=3593, loss=0.43299874663352966\n", - "Surface training t=3594, loss=0.39310064911842346\n", - "Surface training t=3595, loss=0.4328601658344269\n", - "Surface training t=3596, loss=0.3859991580247879\n", - "Surface training t=3597, loss=0.4370043873786926\n", - "Surface training t=3598, loss=0.37249214947223663\n", - "Surface training t=3599, loss=0.3933229148387909\n", - "Surface training t=3600, loss=0.4358699321746826\n", - "Surface training t=3601, loss=0.43389786779880524\n", - "Surface training t=3602, loss=0.4039624482393265\n", - "Surface training t=3603, loss=0.42377854883670807\n", - "Surface training t=3604, loss=0.4183361381292343\n", - "Surface training t=3605, loss=0.33039064705371857\n", - "Surface training t=3606, loss=0.3820832371711731\n", - "Surface training t=3607, loss=0.4426205903291702\n", - "Surface training t=3608, loss=0.39365053176879883\n", - "Surface training t=3609, loss=0.4195591062307358\n", - "Surface training t=3610, loss=0.3938687592744827\n", - "Surface training t=3611, loss=0.4481259882450104\n", - "Surface training t=3612, loss=0.4428499639034271\n", - "Surface training t=3613, loss=0.4255550056695938\n", - "Surface training t=3614, loss=0.41397541761398315\n", - "Surface training t=3615, loss=0.35993795096874237\n", - "Surface training t=3616, loss=0.44719281792640686\n", - "Surface training t=3617, loss=0.3975573778152466\n", - "Surface training t=3618, loss=0.46797293424606323\n", - "Surface training t=3619, loss=0.40079978108406067\n", - "Surface training t=3620, loss=0.3972943127155304\n", - "Surface training t=3621, loss=0.4131454825401306\n", - "Surface training t=3622, loss=0.43877893686294556\n", - "Surface training t=3623, loss=0.4470537453889847\n", - "Surface training t=3624, loss=0.37414321303367615\n", - "Surface training t=3625, loss=0.38507869839668274\n", - "Surface training t=3626, loss=0.4094766080379486\n", - "Surface training t=3627, loss=0.3898916095495224\n", - "Surface training t=3628, loss=0.3881508857011795\n", - "Surface training t=3629, loss=0.35818716883659363\n", - "Surface training t=3630, loss=0.4629838913679123\n", - "Surface training t=3631, loss=0.4538951516151428\n", - "Surface training t=3632, loss=0.42987994849681854\n", - "Surface training t=3633, loss=0.4019870311021805\n", - "Surface training t=3634, loss=0.39289338886737823\n", - "Surface training t=3635, loss=0.3569370359182358\n", - "Surface training t=3636, loss=0.3353290781378746\n", - "Surface training t=3637, loss=0.38829636573791504\n", - "Surface training t=3638, loss=0.39164917171001434\n", - "Surface training t=3639, loss=0.4026705473661423\n", - "Surface training t=3640, loss=0.4291002005338669\n", - "Surface training t=3641, loss=0.37447309494018555\n", - "Surface training t=3642, loss=0.3568629324436188\n", - "Surface training t=3643, loss=0.4362647980451584\n", - "Surface training t=3644, loss=0.3940742462873459\n", - "Surface training t=3645, loss=0.4053639620542526\n", - "Surface training t=3646, loss=0.4222984313964844\n", - "Surface training t=3647, loss=0.3903055638074875\n", - "Surface training t=3648, loss=0.42766907811164856\n", - "Surface training t=3649, loss=0.4207676351070404\n", - "Surface training t=3650, loss=0.3624044209718704\n", - "Surface training t=3651, loss=0.4719863831996918\n", - "Surface training t=3652, loss=0.423002153635025\n", - "Surface training t=3653, loss=0.3685721158981323\n", - "Surface training t=3654, loss=0.40746472775936127\n", - "Surface training t=3655, loss=0.4021735042333603\n", - "Surface training t=3656, loss=0.4391801655292511\n", - "Surface training t=3657, loss=0.45804761350154877\n", - "Surface training t=3658, loss=0.4425836205482483\n", - "Surface training t=3659, loss=0.4269881695508957\n", - "Surface training t=3660, loss=0.4299767017364502\n", - "Surface training t=3661, loss=0.3913196474313736\n", - "Surface training t=3662, loss=0.3637401759624481\n", - "Surface training t=3663, loss=0.39576759934425354\n", - "Surface training t=3664, loss=0.3838638961315155\n", - "Surface training t=3665, loss=0.4062506705522537\n", - "Surface training t=3666, loss=0.3986184298992157\n", - "Surface training t=3667, loss=0.3852640837430954\n", - "Surface training t=3668, loss=0.35523344576358795\n", - "Surface training t=3669, loss=0.41889359056949615\n", - "Surface training t=3670, loss=0.4066902846097946\n", - "Surface training t=3671, loss=0.38025297224521637\n", - "Surface training t=3672, loss=0.4344877451658249\n", - "Surface training t=3673, loss=0.4675385355949402\n", - "Surface training t=3674, loss=0.42479555308818817\n", - "Surface training t=3675, loss=0.41397911310195923\n", - "Surface training t=3676, loss=0.37401147186756134\n", - "Surface training t=3677, loss=0.400039941072464\n", - "Surface training t=3678, loss=0.3806566298007965\n", - "Surface training t=3679, loss=0.37736837565898895\n", - "Surface training t=3680, loss=0.38147488236427307\n", - "Surface training t=3681, loss=0.3978186398744583\n", - "Surface training t=3682, loss=0.3969861567020416\n", - "Surface training t=3683, loss=0.36411498486995697\n", - "Surface training t=3684, loss=0.4341927170753479\n", - "Surface training t=3685, loss=0.43571290373802185\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=3686, loss=0.4614860564470291\n", - "Surface training t=3687, loss=0.4128866344690323\n", - "Surface training t=3688, loss=0.4465310871601105\n", - "Surface training t=3689, loss=0.35154400765895844\n", - "Surface training t=3690, loss=0.418986976146698\n", - "Surface training t=3691, loss=0.40213197469711304\n", - "Surface training t=3692, loss=0.39991454780101776\n", - "Surface training t=3693, loss=0.4119623899459839\n", - "Surface training t=3694, loss=0.3798840641975403\n", - "Surface training t=3695, loss=0.35434170067310333\n", - "Surface training t=3696, loss=0.40182188153266907\n", - "Surface training t=3697, loss=0.3714784234762192\n", - "Surface training t=3698, loss=0.34777551889419556\n", - "Surface training t=3699, loss=0.36471064388751984\n", - "Surface training t=3700, loss=0.38661518692970276\n", - "Surface training t=3701, loss=0.36750657856464386\n", - "Surface training t=3702, loss=0.43145619332790375\n", - "Surface training t=3703, loss=0.41595734655857086\n", - "Surface training t=3704, loss=0.41410768032073975\n", - "Surface training t=3705, loss=0.3772621303796768\n", - "Surface training t=3706, loss=0.42050181329250336\n", - "Surface training t=3707, loss=0.3864767849445343\n", - "Surface training t=3708, loss=0.4159497320652008\n", - "Surface training t=3709, loss=0.3811473697423935\n", - "Surface training t=3710, loss=0.3705185502767563\n", - "Surface training t=3711, loss=0.3977174907922745\n", - "Surface training t=3712, loss=0.3873118609189987\n", - "Surface training t=3713, loss=0.41153278946876526\n", - "Surface training t=3714, loss=0.41571810841560364\n", - "Surface training t=3715, loss=0.4008481204509735\n", - "Surface training t=3716, loss=0.4436929374933243\n", - "Surface training t=3717, loss=0.4201730191707611\n", - "Surface training t=3718, loss=0.4119427353143692\n", - "Surface training t=3719, loss=0.40684159100055695\n", - "Surface training t=3720, loss=0.37326158583164215\n", - "Surface training t=3721, loss=0.4404856711626053\n", - "Surface training t=3722, loss=0.42768992483615875\n", - "Surface training t=3723, loss=0.384817898273468\n", - "Surface training t=3724, loss=0.41057121753692627\n", - "Surface training t=3725, loss=0.41818128526210785\n", - "Surface training t=3726, loss=0.43001367151737213\n", - "Surface training t=3727, loss=0.4015935957431793\n", - "Surface training t=3728, loss=0.3460698425769806\n", - "Surface training t=3729, loss=0.3717762231826782\n", - "Surface training t=3730, loss=0.35580874979496\n", - "Surface training t=3731, loss=0.40974724292755127\n", - "Surface training t=3732, loss=0.36200766265392303\n", - "Surface training t=3733, loss=0.4097868800163269\n", - "Surface training t=3734, loss=0.4173985868692398\n", - "Surface training t=3735, loss=0.36462023854255676\n", - "Surface training t=3736, loss=0.4431215524673462\n", - "Surface training t=3737, loss=0.42679961025714874\n", - "Surface training t=3738, loss=0.36411114037036896\n", - "Surface training t=3739, loss=0.41382794082164764\n", - "Surface training t=3740, loss=0.40376000106334686\n", - "Surface training t=3741, loss=0.41952575743198395\n", - "Surface training t=3742, loss=0.4667031019926071\n", - "Surface training t=3743, loss=0.4293315261602402\n", - "Surface training t=3744, loss=0.43884240090847015\n", - "Surface training t=3745, loss=0.3699595332145691\n", - "Surface training t=3746, loss=0.39469246566295624\n", - "Surface training t=3747, loss=0.36471299827098846\n", - "Surface training t=3748, loss=0.4234427362680435\n", - "Surface training t=3749, loss=0.436571940779686\n", - "Surface training t=3750, loss=0.3907364308834076\n", - "Surface training t=3751, loss=0.4536646604537964\n", - "Surface training t=3752, loss=0.40696652233600616\n", - "Surface training t=3753, loss=0.36122438311576843\n", - "Surface training t=3754, loss=0.40497928857803345\n", - "Surface training t=3755, loss=0.36988280713558197\n", - "Surface training t=3756, loss=0.344209685921669\n", - "Surface training t=3757, loss=0.38695961236953735\n", - "Surface training t=3758, loss=0.40397968888282776\n", - "Surface training t=3759, loss=0.3802668899297714\n", - "Surface training t=3760, loss=0.49184660613536835\n", - "Surface training t=3761, loss=0.3737316280603409\n", - "Surface training t=3762, loss=0.42562222480773926\n", - "Surface training t=3763, loss=0.39880433678627014\n", - "Surface training t=3764, loss=0.41031210124492645\n", - "Surface training t=3765, loss=0.3954891562461853\n", - "Surface training t=3766, loss=0.42809709906578064\n", - "Surface training t=3767, loss=0.4144077003002167\n", - "Surface training t=3768, loss=0.4025924801826477\n", - "Surface training t=3769, loss=0.41385112702846527\n", - "Surface training t=3770, loss=0.42184846103191376\n", - "Surface training t=3771, loss=0.42711469531059265\n", - "Surface training t=3772, loss=0.46526627242565155\n", - "Surface training t=3773, loss=0.41075606644153595\n", - "Surface training t=3774, loss=0.3901883065700531\n", - "Surface training t=3775, loss=0.39348727464675903\n", - "Surface training t=3776, loss=0.3715956509113312\n", - "Surface training t=3777, loss=0.4631786495447159\n", - "Surface training t=3778, loss=0.4253021329641342\n", - "Surface training t=3779, loss=0.3732531666755676\n", - "Surface training t=3780, loss=0.4069055914878845\n", - "Surface training t=3781, loss=0.44440214335918427\n", - "Surface training t=3782, loss=0.4069933146238327\n", - "Surface training t=3783, loss=0.40050238370895386\n", - "Surface training t=3784, loss=0.375227689743042\n", - "Surface training t=3785, loss=0.4167502075433731\n", - "Surface training t=3786, loss=0.41265465319156647\n", - "Surface training t=3787, loss=0.37637875974178314\n", - "Surface training t=3788, loss=0.40780700743198395\n", - "Surface training t=3789, loss=0.3777640014886856\n", - "Surface training t=3790, loss=0.37646709382534027\n", - "Surface training t=3791, loss=0.40408240258693695\n", - "Surface training t=3792, loss=0.40129703283309937\n", - "Surface training t=3793, loss=0.41591258347034454\n", - "Surface training t=3794, loss=0.4249241501092911\n", - "Surface training t=3795, loss=0.38835394382476807\n", - "Surface training t=3796, loss=0.39470410346984863\n", - "Surface training t=3797, loss=0.41432857513427734\n", - "Surface training t=3798, loss=0.42496544122695923\n", - "Surface training t=3799, loss=0.404966801404953\n", - "Surface training t=3800, loss=0.4014527350664139\n", - "Surface training t=3801, loss=0.35146912932395935\n", - "Surface training t=3802, loss=0.43449005484580994\n", - "Surface training t=3803, loss=0.412505567073822\n", - "Surface training t=3804, loss=0.42299938201904297\n", - "Surface training t=3805, loss=0.43570955097675323\n", - "Surface training t=3806, loss=0.38713520765304565\n", - "Surface training t=3807, loss=0.44605299830436707\n", - "Surface training t=3808, loss=0.3923485279083252\n", - "Surface training t=3809, loss=0.40696685016155243\n", - "Surface training t=3810, loss=0.3788508474826813\n", - "Surface training t=3811, loss=0.4475942403078079\n", - "Surface training t=3812, loss=0.3966699689626694\n", - "Surface training t=3813, loss=0.41375432908535004\n", - "Surface training t=3814, loss=0.40462371706962585\n", - "Surface training t=3815, loss=0.41098831593990326\n", - "Surface training t=3816, loss=0.4353414475917816\n", - "Surface training t=3817, loss=0.38532155752182007\n", - "Surface training t=3818, loss=0.37299612164497375\n", - "Surface training t=3819, loss=0.37619951367378235\n", - "Surface training t=3820, loss=0.4158749580383301\n", - "Surface training t=3821, loss=0.4007754623889923\n", - "Surface training t=3822, loss=0.39284248650074005\n", - "Surface training t=3823, loss=0.34728942811489105\n", - "Surface training t=3824, loss=0.4333714246749878\n", - "Surface training t=3825, loss=0.37832868099212646\n", - "Surface training t=3826, loss=0.38941168785095215\n", - "Surface training t=3827, loss=0.413541316986084\n", - "Surface training t=3828, loss=0.3649388700723648\n", - "Surface training t=3829, loss=0.3713937997817993\n", - "Surface training t=3830, loss=0.3162544444203377\n", - "Surface training t=3831, loss=0.3923357129096985\n", - "Surface training t=3832, loss=0.3590468019247055\n", - "Surface training t=3833, loss=0.3981168568134308\n", - "Surface training t=3834, loss=0.388483390212059\n", - "Surface training t=3835, loss=0.395929753780365\n", - "Surface training t=3836, loss=0.3535633534193039\n", - "Surface training t=3837, loss=0.4047149270772934\n", - "Surface training t=3838, loss=0.362693190574646\n", - "Surface training t=3839, loss=0.36918848752975464\n", - "Surface training t=3840, loss=0.3925859034061432\n", - "Surface training t=3841, loss=0.3977292776107788\n", - "Surface training t=3842, loss=0.41250234842300415\n", - "Surface training t=3843, loss=0.3902607858181\n", - "Surface training t=3844, loss=0.4081851691007614\n", - "Surface training t=3845, loss=0.40367482602596283\n", - "Surface training t=3846, loss=0.3772220015525818\n", - "Surface training t=3847, loss=0.41526296734809875\n", - "Surface training t=3848, loss=0.42244723439216614\n", - "Surface training t=3849, loss=0.4207916110754013\n", - "Surface training t=3850, loss=0.4143332690000534\n", - "Surface training t=3851, loss=0.37096546590328217\n", - "Surface training t=3852, loss=0.3638824373483658\n", - "Surface training t=3853, loss=0.39341261982917786\n", - "Surface training t=3854, loss=0.4447659105062485\n", - "Surface training t=3855, loss=0.42821477353572845\n", - "Surface training t=3856, loss=0.40236884355545044\n", - "Surface training t=3857, loss=0.3870856463909149\n", - "Surface training t=3858, loss=0.38243550062179565\n", - "Surface training t=3859, loss=0.3531835079193115\n", - "Surface training t=3860, loss=0.3712564557790756\n", - "Surface training t=3861, loss=0.32792410999536514\n", - "Surface training t=3862, loss=0.4590943604707718\n", - "Surface training t=3863, loss=0.36978791654109955\n", - "Surface training t=3864, loss=0.39221037924289703\n", - "Surface training t=3865, loss=0.3654337525367737\n", - "Surface training t=3866, loss=0.34084323048591614\n", - "Surface training t=3867, loss=0.33294765651226044\n", - "Surface training t=3868, loss=0.36370837688446045\n", - "Surface training t=3869, loss=0.3843619078397751\n", - "Surface training t=3870, loss=0.4135150909423828\n", - "Surface training t=3871, loss=0.373654842376709\n", - "Surface training t=3872, loss=0.4481412172317505\n", - "Surface training t=3873, loss=0.37121425569057465\n", - "Surface training t=3874, loss=0.36227135360240936\n", - "Surface training t=3875, loss=0.38177230954170227\n", - "Surface training t=3876, loss=0.416293740272522\n", - "Surface training t=3877, loss=0.38004907965660095\n", - "Surface training t=3878, loss=0.38429637253284454\n", - "Surface training t=3879, loss=0.3485984653234482\n", - "Surface training t=3880, loss=0.34944023191928864\n", - "Surface training t=3881, loss=0.3675970584154129\n", - "Surface training t=3882, loss=0.42056208848953247\n", - "Surface training t=3883, loss=0.3694578856229782\n", - "Surface training t=3884, loss=0.40312008559703827\n", - "Surface training t=3885, loss=0.4315395951271057\n", - "Surface training t=3886, loss=0.4139598459005356\n", - "Surface training t=3887, loss=0.39058759808540344\n", - "Surface training t=3888, loss=0.40753862261772156\n", - "Surface training t=3889, loss=0.41365547478199005\n", - "Surface training t=3890, loss=0.35065409541130066\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=3891, loss=0.382595494389534\n", - "Surface training t=3892, loss=0.3566640615463257\n", - "Surface training t=3893, loss=0.3998414874076843\n", - "Surface training t=3894, loss=0.42439933121204376\n", - "Surface training t=3895, loss=0.4387615919113159\n", - "Surface training t=3896, loss=0.3908649682998657\n", - "Surface training t=3897, loss=0.3950778990983963\n", - "Surface training t=3898, loss=0.4337518513202667\n", - "Surface training t=3899, loss=0.365881010890007\n", - "Surface training t=3900, loss=0.3963879197835922\n", - "Surface training t=3901, loss=0.3779745548963547\n", - "Surface training t=3902, loss=0.4579348564147949\n", - "Surface training t=3903, loss=0.37012559175491333\n", - "Surface training t=3904, loss=0.3796849548816681\n", - "Surface training t=3905, loss=0.39015568792819977\n", - "Surface training t=3906, loss=0.43914443254470825\n", - "Surface training t=3907, loss=0.4375288188457489\n", - "Surface training t=3908, loss=0.4303712546825409\n", - "Surface training t=3909, loss=0.3774087727069855\n", - "Surface training t=3910, loss=0.37381233274936676\n", - "Surface training t=3911, loss=0.36021655797958374\n", - "Surface training t=3912, loss=0.36969299614429474\n", - "Surface training t=3913, loss=0.383622869849205\n", - "Surface training t=3914, loss=0.38945919275283813\n", - "Surface training t=3915, loss=0.34193308651447296\n", - "Surface training t=3916, loss=0.41289372742176056\n", - "Surface training t=3917, loss=0.43605104088783264\n", - "Surface training t=3918, loss=0.4117777645587921\n", - "Surface training t=3919, loss=0.39850129187107086\n", - "Surface training t=3920, loss=0.39629845321178436\n", - "Surface training t=3921, loss=0.39605532586574554\n", - "Surface training t=3922, loss=0.43914756178855896\n", - "Surface training t=3923, loss=0.3824612647294998\n", - "Surface training t=3924, loss=0.42861656844615936\n", - "Surface training t=3925, loss=0.4182557910680771\n", - "Surface training t=3926, loss=0.43684227764606476\n", - "Surface training t=3927, loss=0.3505563884973526\n", - "Surface training t=3928, loss=0.4151829332113266\n", - "Surface training t=3929, loss=0.4309549033641815\n", - "Surface training t=3930, loss=0.39471442997455597\n", - "Surface training t=3931, loss=0.4535412937402725\n", - "Surface training t=3932, loss=0.40390630066394806\n", - "Surface training t=3933, loss=0.38810895383358\n", - "Surface training t=3934, loss=0.35541287064552307\n", - "Surface training t=3935, loss=0.3631506413221359\n", - "Surface training t=3936, loss=0.38959619402885437\n", - "Surface training t=3937, loss=0.3456219583749771\n", - "Surface training t=3938, loss=0.4245997965335846\n", - "Surface training t=3939, loss=0.4389762729406357\n", - "Surface training t=3940, loss=0.4158787280321121\n", - "Surface training t=3941, loss=0.41510817408561707\n", - "Surface training t=3942, loss=0.3934088498353958\n", - "Surface training t=3943, loss=0.41329166293144226\n", - "Surface training t=3944, loss=0.37385083734989166\n", - "Surface training t=3945, loss=0.38788585364818573\n", - "Surface training t=3946, loss=0.386840358376503\n", - "Surface training t=3947, loss=0.37978649139404297\n", - "Surface training t=3948, loss=0.4330186992883682\n", - "Surface training t=3949, loss=0.432710736989975\n", - "Surface training t=3950, loss=0.40179505944252014\n", - "Surface training t=3951, loss=0.4228330999612808\n", - "Surface training t=3952, loss=0.4243570864200592\n", - "Surface training t=3953, loss=0.3699745237827301\n", - "Surface training t=3954, loss=0.3642343580722809\n", - "Surface training t=3955, loss=0.3716723322868347\n", - "Surface training t=3956, loss=0.4569324553012848\n", - "Surface training t=3957, loss=0.3993402123451233\n", - "Surface training t=3958, loss=0.39399857819080353\n", - "Surface training t=3959, loss=0.376907542347908\n", - "Surface training t=3960, loss=0.39444904029369354\n", - "Surface training t=3961, loss=0.4187479615211487\n", - "Surface training t=3962, loss=0.3706560283899307\n", - "Surface training t=3963, loss=0.3876808434724808\n", - "Surface training t=3964, loss=0.39292606711387634\n", - "Surface training t=3965, loss=0.4061991423368454\n", - "Surface training t=3966, loss=0.44984787702560425\n", - "Surface training t=3967, loss=0.3446061611175537\n", - "Surface training t=3968, loss=0.34656041860580444\n", - "Surface training t=3969, loss=0.37805232405662537\n", - "Surface training t=3970, loss=0.4068017154932022\n", - "Surface training t=3971, loss=0.38410866260528564\n", - "Surface training t=3972, loss=0.3877701014280319\n", - "Surface training t=3973, loss=0.34584277868270874\n", - "Surface training t=3974, loss=0.378815233707428\n", - "Surface training t=3975, loss=0.4185832291841507\n", - "Surface training t=3976, loss=0.47080838680267334\n", - "Surface training t=3977, loss=0.42339618504047394\n", - "Surface training t=3978, loss=0.3918813169002533\n", - "Surface training t=3979, loss=0.4075955003499985\n", - "Surface training t=3980, loss=0.3716157227754593\n", - "Surface training t=3981, loss=0.3353898674249649\n", - "Surface training t=3982, loss=0.4159669578075409\n", - "Surface training t=3983, loss=0.3521314561367035\n", - "Surface training t=3984, loss=0.3344568908214569\n", - "Surface training t=3985, loss=0.3833710849285126\n", - "Surface training t=3986, loss=0.3508526682853699\n", - "Surface training t=3987, loss=0.37997449934482574\n", - "Surface training t=3988, loss=0.39501073956489563\n", - "Surface training t=3989, loss=0.3567165583372116\n", - "Surface training t=3990, loss=0.37947559356689453\n", - "Surface training t=3991, loss=0.43692079186439514\n", - "Surface training t=3992, loss=0.34080010652542114\n", - "Surface training t=3993, loss=0.3960205018520355\n", - "Surface training t=3994, loss=0.39740996062755585\n", - "Surface training t=3995, loss=0.3756101429462433\n", - "Surface training t=3996, loss=0.3641689121723175\n", - "Surface training t=3997, loss=0.3583558052778244\n", - "Surface training t=3998, loss=0.36836858093738556\n", - "Surface training t=3999, loss=0.3435385674238205\n", - "Surface training t=4000, loss=0.409637987613678\n", - "Surface training t=4001, loss=0.38892732560634613\n", - "Surface training t=4002, loss=0.4270562529563904\n", - "Surface training t=4003, loss=0.4058266878128052\n", - "Surface training t=4004, loss=0.37067826092243195\n", - "Surface training t=4005, loss=0.3254050090909004\n", - "Surface training t=4006, loss=0.35168980062007904\n", - "Surface training t=4007, loss=0.3296745866537094\n", - "Surface training t=4008, loss=0.37882469594478607\n", - "Surface training t=4009, loss=0.3507038652896881\n", - "Surface training t=4010, loss=0.3357045203447342\n", - "Surface training t=4011, loss=0.38330164551734924\n", - "Surface training t=4012, loss=0.42293302714824677\n", - "Surface training t=4013, loss=0.36648617684841156\n", - "Surface training t=4014, loss=0.3634517043828964\n", - "Surface training t=4015, loss=0.351458802819252\n", - "Surface training t=4016, loss=0.4743202179670334\n", - "Surface training t=4017, loss=0.40084798634052277\n", - "Surface training t=4018, loss=0.44031384587287903\n", - "Surface training t=4019, loss=0.3767589330673218\n", - "Surface training t=4020, loss=0.3844790607690811\n", - "Surface training t=4021, loss=0.3724527060985565\n", - "Surface training t=4022, loss=0.4045083820819855\n", - "Surface training t=4023, loss=0.37398387491703033\n", - "Surface training t=4024, loss=0.3809877783060074\n", - "Surface training t=4025, loss=0.3374379277229309\n", - "Surface training t=4026, loss=0.43137986958026886\n", - "Surface training t=4027, loss=0.3759382367134094\n", - "Surface training t=4028, loss=0.35098719596862793\n", - "Surface training t=4029, loss=0.3728739470243454\n", - "Surface training t=4030, loss=0.34928378462791443\n", - "Surface training t=4031, loss=0.39423152804374695\n", - "Surface training t=4032, loss=0.38743099570274353\n", - "Surface training t=4033, loss=0.416525736451149\n", - "Surface training t=4034, loss=0.38467973470687866\n", - "Surface training t=4035, loss=0.38857659697532654\n", - "Surface training t=4036, loss=0.4353574067354202\n", - "Surface training t=4037, loss=0.3887976408004761\n", - "Surface training t=4038, loss=0.39335185289382935\n", - "Surface training t=4039, loss=0.38406993448734283\n", - "Surface training t=4040, loss=0.373444139957428\n", - "Surface training t=4041, loss=0.38323958218097687\n", - "Surface training t=4042, loss=0.3265801966190338\n", - "Surface training t=4043, loss=0.39211365580558777\n", - "Surface training t=4044, loss=0.38867422938346863\n", - "Surface training t=4045, loss=0.3873738497495651\n", - "Surface training t=4046, loss=0.43757757544517517\n", - "Surface training t=4047, loss=0.3677407056093216\n", - "Surface training t=4048, loss=0.4077727198600769\n", - "Surface training t=4049, loss=0.3965855538845062\n", - "Surface training t=4050, loss=0.3979056179523468\n", - "Surface training t=4051, loss=0.35740913450717926\n", - "Surface training t=4052, loss=0.3610760420560837\n", - "Surface training t=4053, loss=0.3967045396566391\n", - "Surface training t=4054, loss=0.36939017474651337\n", - "Surface training t=4055, loss=0.4434190094470978\n", - "Surface training t=4056, loss=0.43022456765174866\n", - "Surface training t=4057, loss=0.39645953476428986\n", - "Surface training t=4058, loss=0.39086538553237915\n", - "Surface training t=4059, loss=0.39733119308948517\n", - "Surface training t=4060, loss=0.36998818814754486\n", - "Surface training t=4061, loss=0.3852626532316208\n", - "Surface training t=4062, loss=0.38113299012184143\n", - "Surface training t=4063, loss=0.37375055253505707\n", - "Surface training t=4064, loss=0.40432092547416687\n", - "Surface training t=4065, loss=0.36766569316387177\n", - "Surface training t=4066, loss=0.3465363681316376\n", - "Surface training t=4067, loss=0.4257071912288666\n", - "Surface training t=4068, loss=0.3449620008468628\n", - "Surface training t=4069, loss=0.34773704409599304\n", - "Surface training t=4070, loss=0.36228621006011963\n", - "Surface training t=4071, loss=0.3666769117116928\n", - "Surface training t=4072, loss=0.4189414530992508\n", - "Surface training t=4073, loss=0.4410899579524994\n", - "Surface training t=4074, loss=0.38597579300403595\n", - "Surface training t=4075, loss=0.39998768270015717\n", - "Surface training t=4076, loss=0.3465607762336731\n", - "Surface training t=4077, loss=0.3859240859746933\n", - "Surface training t=4078, loss=0.3571488857269287\n", - "Surface training t=4079, loss=0.3607383519411087\n", - "Surface training t=4080, loss=0.38721780478954315\n", - "Surface training t=4081, loss=0.37213464081287384\n", - "Surface training t=4082, loss=0.4252523332834244\n", - "Surface training t=4083, loss=0.36385639011859894\n", - "Surface training t=4084, loss=0.3973305821418762\n", - "Surface training t=4085, loss=0.3575633019208908\n", - "Surface training t=4086, loss=0.36962638795375824\n", - "Surface training t=4087, loss=0.3753281831741333\n", - "Surface training t=4088, loss=0.4019504189491272\n", - "Surface training t=4089, loss=0.4139963537454605\n", - "Surface training t=4090, loss=0.3780279606580734\n", - "Surface training t=4091, loss=0.3772754669189453\n", - "Surface training t=4092, loss=0.3817938417196274\n", - "Surface training t=4093, loss=0.4328308701515198\n", - "Surface training t=4094, loss=0.45881161093711853\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=4095, loss=0.41994403302669525\n", - "Surface training t=4096, loss=0.39601004123687744\n", - "Surface training t=4097, loss=0.3545200079679489\n", - "Surface training t=4098, loss=0.4010414034128189\n", - "Surface training t=4099, loss=0.4276617467403412\n", - "Surface training t=4100, loss=0.36783313751220703\n", - "Surface training t=4101, loss=0.4488302767276764\n", - "Surface training t=4102, loss=0.35793887078762054\n", - "Surface training t=4103, loss=0.43075820803642273\n", - "Surface training t=4104, loss=0.43811123073101044\n", - "Surface training t=4105, loss=0.4268387407064438\n", - "Surface training t=4106, loss=0.349450945854187\n", - "Surface training t=4107, loss=0.43594884872436523\n", - "Surface training t=4108, loss=0.4391603618860245\n", - "Surface training t=4109, loss=0.4291580021381378\n", - "Surface training t=4110, loss=0.4475468695163727\n", - "Surface training t=4111, loss=0.47509847581386566\n", - "Surface training t=4112, loss=0.41697831451892853\n", - "Surface training t=4113, loss=0.3822086751461029\n", - "Surface training t=4114, loss=0.39252781867980957\n", - "Surface training t=4115, loss=0.40391072630882263\n", - "Surface training t=4116, loss=0.37091633677482605\n", - "Surface training t=4117, loss=0.39232976734638214\n", - "Surface training t=4118, loss=0.4048681855201721\n", - "Surface training t=4119, loss=0.3529878705739975\n", - "Surface training t=4120, loss=0.3426922410726547\n", - "Surface training t=4121, loss=0.4253324270248413\n", - "Surface training t=4122, loss=0.4020538330078125\n", - "Surface training t=4123, loss=0.3909311890602112\n", - "Surface training t=4124, loss=0.41557714343070984\n", - "Surface training t=4125, loss=0.34813132882118225\n", - "Surface training t=4126, loss=0.38773106038570404\n", - "Surface training t=4127, loss=0.3721057176589966\n", - "Surface training t=4128, loss=0.42544397711753845\n", - "Surface training t=4129, loss=0.42727823555469513\n", - "Surface training t=4130, loss=0.3973812460899353\n", - "Surface training t=4131, loss=0.40755386650562286\n", - "Surface training t=4132, loss=0.38877080380916595\n", - "Surface training t=4133, loss=0.3599332123994827\n", - "Surface training t=4134, loss=0.32269053161144257\n", - "Surface training t=4135, loss=0.37859803438186646\n", - "Surface training t=4136, loss=0.4064124524593353\n", - "Surface training t=4137, loss=0.39740459620952606\n", - "Surface training t=4138, loss=0.3626261204481125\n", - "Surface training t=4139, loss=0.39855483174324036\n", - "Surface training t=4140, loss=0.40231335163116455\n", - "Surface training t=4141, loss=0.42325013875961304\n", - "Surface training t=4142, loss=0.38463257253170013\n", - "Surface training t=4143, loss=0.366448774933815\n", - "Surface training t=4144, loss=0.3800498694181442\n", - "Surface training t=4145, loss=0.3542203903198242\n", - "Surface training t=4146, loss=0.3474828749895096\n", - "Surface training t=4147, loss=0.36078429222106934\n", - "Surface training t=4148, loss=0.37698711454868317\n", - "Surface training t=4149, loss=0.3963042050600052\n", - "Surface training t=4150, loss=0.36357901990413666\n", - "Surface training t=4151, loss=0.3718159645795822\n", - "Surface training t=4152, loss=0.4092058539390564\n", - "Surface training t=4153, loss=0.35153262317180634\n", - "Surface training t=4154, loss=0.42049872875213623\n", - "Surface training t=4155, loss=0.3856717497110367\n", - "Surface training t=4156, loss=0.3175186365842819\n", - "Surface training t=4157, loss=0.36749987304210663\n", - "Surface training t=4158, loss=0.3502698093652725\n", - "Surface training t=4159, loss=0.34319645166397095\n", - "Surface training t=4160, loss=0.3721591681241989\n", - "Surface training t=4161, loss=0.3698679059743881\n", - "Surface training t=4162, loss=0.3867473900318146\n", - "Surface training t=4163, loss=0.34956149756908417\n", - "Surface training t=4164, loss=0.3543694466352463\n", - "Surface training t=4165, loss=0.35541488230228424\n", - "Surface training t=4166, loss=0.41137368977069855\n", - "Surface training t=4167, loss=0.3678307384252548\n", - "Surface training t=4168, loss=0.37762030959129333\n", - "Surface training t=4169, loss=0.3714462220668793\n", - "Surface training t=4170, loss=0.3581669479608536\n", - "Surface training t=4171, loss=0.3692961037158966\n", - "Surface training t=4172, loss=0.4193257987499237\n", - "Surface training t=4173, loss=0.37316685914993286\n", - "Surface training t=4174, loss=0.3735458254814148\n", - "Surface training t=4175, loss=0.36882010102272034\n", - "Surface training t=4176, loss=0.3998199850320816\n", - "Surface training t=4177, loss=0.3797539323568344\n", - "Surface training t=4178, loss=0.39830824732780457\n", - "Surface training t=4179, loss=0.3956163376569748\n", - "Surface training t=4180, loss=0.37247322499752045\n", - "Surface training t=4181, loss=0.38055330514907837\n", - "Surface training t=4182, loss=0.40685053169727325\n", - "Surface training t=4183, loss=0.35422077775001526\n", - "Surface training t=4184, loss=0.36972078680992126\n", - "Surface training t=4185, loss=0.45684246718883514\n", - "Surface training t=4186, loss=0.3901918828487396\n", - "Surface training t=4187, loss=0.36839255690574646\n", - "Surface training t=4188, loss=0.3414861857891083\n", - "Surface training t=4189, loss=0.3951801210641861\n", - "Surface training t=4190, loss=0.3494921028614044\n", - "Surface training t=4191, loss=0.3527307063341141\n", - "Surface training t=4192, loss=0.40332140028476715\n", - "Surface training t=4193, loss=0.4383063316345215\n", - "Surface training t=4194, loss=0.32969439029693604\n", - "Surface training t=4195, loss=0.4088033586740494\n", - "Surface training t=4196, loss=0.36945943534374237\n", - "Surface training t=4197, loss=0.38277022540569305\n", - "Surface training t=4198, loss=0.37135469913482666\n", - "Surface training t=4199, loss=0.3509122133255005\n", - "Surface training t=4200, loss=0.3685434013605118\n", - "Surface training t=4201, loss=0.4316415935754776\n", - "Surface training t=4202, loss=0.3265676125884056\n", - "Surface training t=4203, loss=0.4227770119905472\n", - "Surface training t=4204, loss=0.3956560641527176\n", - "Surface training t=4205, loss=0.3766481727361679\n", - "Surface training t=4206, loss=0.4196038842201233\n", - "Surface training t=4207, loss=0.38544370234012604\n", - "Surface training t=4208, loss=0.39068926870822906\n", - "Surface training t=4209, loss=0.40415629744529724\n", - "Surface training t=4210, loss=0.4677385985851288\n", - "Surface training t=4211, loss=0.3539865016937256\n", - "Surface training t=4212, loss=0.3675006479024887\n", - "Surface training t=4213, loss=0.4006839990615845\n", - "Surface training t=4214, loss=0.37274913489818573\n", - "Surface training t=4215, loss=0.40927964448928833\n", - "Surface training t=4216, loss=0.3708546906709671\n", - "Surface training t=4217, loss=0.3998330533504486\n", - "Surface training t=4218, loss=0.35461197793483734\n", - "Surface training t=4219, loss=0.3931937515735626\n", - "Surface training t=4220, loss=0.3133840039372444\n", - "Surface training t=4221, loss=0.36659227311611176\n", - "Surface training t=4222, loss=0.3409139811992645\n", - "Surface training t=4223, loss=0.36081068217754364\n", - "Surface training t=4224, loss=0.43566958606243134\n", - "Surface training t=4225, loss=0.35886722803115845\n", - "Surface training t=4226, loss=0.4151398688554764\n", - "Surface training t=4227, loss=0.36254939436912537\n", - "Surface training t=4228, loss=0.3685190975666046\n", - "Surface training t=4229, loss=0.37547969818115234\n", - "Surface training t=4230, loss=0.37221407890319824\n", - "Surface training t=4231, loss=0.3747726082801819\n", - "Surface training t=4232, loss=0.4145944267511368\n", - "Surface training t=4233, loss=0.32387077808380127\n", - "Surface training t=4234, loss=0.3859538584947586\n", - "Surface training t=4235, loss=0.4018137902021408\n", - "Surface training t=4236, loss=0.3604001849889755\n", - "Surface training t=4237, loss=0.40909039974212646\n", - "Surface training t=4238, loss=0.3869653344154358\n", - "Surface training t=4239, loss=0.400825560092926\n", - "Surface training t=4240, loss=0.3525598645210266\n", - "Surface training t=4241, loss=0.35683633387088776\n", - "Surface training t=4242, loss=0.32306596636772156\n", - "Surface training t=4243, loss=0.35187433660030365\n", - "Surface training t=4244, loss=0.38634093105793\n", - "Surface training t=4245, loss=0.39483949542045593\n", - "Surface training t=4246, loss=0.41561809182167053\n", - "Surface training t=4247, loss=0.3929671049118042\n", - "Surface training t=4248, loss=0.37398913502693176\n", - "Surface training t=4249, loss=0.3760479837656021\n", - "Surface training t=4250, loss=0.35255981981754303\n", - "Surface training t=4251, loss=0.3584528863430023\n", - "Surface training t=4252, loss=0.3208587020635605\n", - "Surface training t=4253, loss=0.3483874350786209\n", - "Surface training t=4254, loss=0.3341013044118881\n", - "Surface training t=4255, loss=0.3511430025100708\n", - "Surface training t=4256, loss=0.39988963305950165\n", - "Surface training t=4257, loss=0.4129539728164673\n", - "Surface training t=4258, loss=0.3908233195543289\n", - "Surface training t=4259, loss=0.36181671917438507\n", - "Surface training t=4260, loss=0.42447084188461304\n", - "Surface training t=4261, loss=0.38360926508903503\n", - "Surface training t=4262, loss=0.4091063290834427\n", - "Surface training t=4263, loss=0.37716926634311676\n", - "Surface training t=4264, loss=0.39614975452423096\n", - "Surface training t=4265, loss=0.3753490746021271\n", - "Surface training t=4266, loss=0.42440783977508545\n", - "Surface training t=4267, loss=0.3837572932243347\n", - "Surface training t=4268, loss=0.41328078508377075\n", - "Surface training t=4269, loss=0.3799852877855301\n", - "Surface training t=4270, loss=0.37348271906375885\n", - "Surface training t=4271, loss=0.38784344494342804\n", - "Surface training t=4272, loss=0.3934313505887985\n", - "Surface training t=4273, loss=0.3926992565393448\n", - "Surface training t=4274, loss=0.35267187654972076\n", - "Surface training t=4275, loss=0.4444499611854553\n", - "Surface training t=4276, loss=0.36108633875846863\n", - "Surface training t=4277, loss=0.3628004491329193\n", - "Surface training t=4278, loss=0.3530699163675308\n", - "Surface training t=4279, loss=0.36622439324855804\n", - "Surface training t=4280, loss=0.38559652864933014\n", - "Surface training t=4281, loss=0.3681141883134842\n", - "Surface training t=4282, loss=0.38887767493724823\n", - "Surface training t=4283, loss=0.4231356382369995\n", - "Surface training t=4284, loss=0.39899827539920807\n", - "Surface training t=4285, loss=0.3789834678173065\n", - "Surface training t=4286, loss=0.349868506193161\n", - "Surface training t=4287, loss=0.39582115411758423\n", - "Surface training t=4288, loss=0.3507246971130371\n", - "Surface training t=4289, loss=0.37392280995845795\n", - "Surface training t=4290, loss=0.32730312645435333\n", - "Surface training t=4291, loss=0.35156576335430145\n", - "Surface training t=4292, loss=0.3771182894706726\n", - "Surface training t=4293, loss=0.42598100006580353\n", - "Surface training t=4294, loss=0.40974435210227966\n", - "Surface training t=4295, loss=0.3313213288784027\n", - "Surface training t=4296, loss=0.38774310052394867\n", - "Surface training t=4297, loss=0.3780954033136368\n", - "Surface training t=4298, loss=0.43271537125110626\n", - "Surface training t=4299, loss=0.3877466320991516\n", - "Surface training t=4300, loss=0.38695840537548065\n", - "Surface training t=4301, loss=0.35753315687179565\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=4302, loss=0.37416818737983704\n", - "Surface training t=4303, loss=0.38134732842445374\n", - "Surface training t=4304, loss=0.3781530112028122\n", - "Surface training t=4305, loss=0.3643421232700348\n", - "Surface training t=4306, loss=0.3680584281682968\n", - "Surface training t=4307, loss=0.3986388146877289\n", - "Surface training t=4308, loss=0.38962027430534363\n", - "Surface training t=4309, loss=0.3907150626182556\n", - "Surface training t=4310, loss=0.3817341476678848\n", - "Surface training t=4311, loss=0.3531882166862488\n", - "Surface training t=4312, loss=0.3760717958211899\n", - "Surface training t=4313, loss=0.40734662115573883\n", - "Surface training t=4314, loss=0.3628798723220825\n", - "Surface training t=4315, loss=0.4091508239507675\n", - "Surface training t=4316, loss=0.34178614616394043\n", - "Surface training t=4317, loss=0.40241488814353943\n", - "Surface training t=4318, loss=0.398468941450119\n", - "Surface training t=4319, loss=0.36917728185653687\n", - "Surface training t=4320, loss=0.3768483102321625\n", - "Surface training t=4321, loss=0.4205663800239563\n", - "Surface training t=4322, loss=0.3845602869987488\n", - "Surface training t=4323, loss=0.3772782236337662\n", - "Surface training t=4324, loss=0.3747744709253311\n", - "Surface training t=4325, loss=0.39269959926605225\n", - "Surface training t=4326, loss=0.45843103528022766\n", - "Surface training t=4327, loss=0.3879317045211792\n", - "Surface training t=4328, loss=0.3375786393880844\n", - "Surface training t=4329, loss=0.41481436789035797\n", - "Surface training t=4330, loss=0.3537273108959198\n", - "Surface training t=4331, loss=0.3769138753414154\n", - "Surface training t=4332, loss=0.4170885384082794\n", - "Surface training t=4333, loss=0.4118906408548355\n", - "Surface training t=4334, loss=0.42412762343883514\n", - "Surface training t=4335, loss=0.3857725113630295\n", - "Surface training t=4336, loss=0.3564590960741043\n", - "Surface training t=4337, loss=0.35500285029411316\n", - "Surface training t=4338, loss=0.34384578466415405\n", - "Surface training t=4339, loss=0.3884899169206619\n", - "Surface training t=4340, loss=0.4181213974952698\n", - "Surface training t=4341, loss=0.3181734159588814\n", - "Surface training t=4342, loss=0.42288558185100555\n", - "Surface training t=4343, loss=0.34617947041988373\n", - "Surface training t=4344, loss=0.4405951499938965\n", - "Surface training t=4345, loss=0.31235460937023163\n", - "Surface training t=4346, loss=0.4012983441352844\n", - "Surface training t=4347, loss=0.3937307894229889\n", - "Surface training t=4348, loss=0.3397320806980133\n", - "Surface training t=4349, loss=0.31667403876781464\n", - "Surface training t=4350, loss=0.39592914283275604\n", - "Surface training t=4351, loss=0.3545546233654022\n", - "Surface training t=4352, loss=0.37972722947597504\n", - "Surface training t=4353, loss=0.39214687049388885\n", - "Surface training t=4354, loss=0.3416304439306259\n", - "Surface training t=4355, loss=0.37028568983078003\n", - "Surface training t=4356, loss=0.3455803543329239\n", - "Surface training t=4357, loss=0.34553787112236023\n", - "Surface training t=4358, loss=0.33478058874607086\n", - "Surface training t=4359, loss=0.372174471616745\n", - "Surface training t=4360, loss=0.34994155168533325\n", - "Surface training t=4361, loss=0.3253207355737686\n", - "Surface training t=4362, loss=0.37915845215320587\n", - "Surface training t=4363, loss=0.4169373959302902\n", - "Surface training t=4364, loss=0.3432408422231674\n", - "Surface training t=4365, loss=0.36017055809497833\n", - "Surface training t=4366, loss=0.37177030742168427\n", - "Surface training t=4367, loss=0.39301586151123047\n", - "Surface training t=4368, loss=0.34541475772857666\n", - "Surface training t=4369, loss=0.35473762452602386\n", - "Surface training t=4370, loss=0.4227055013179779\n", - "Surface training t=4371, loss=0.4023296535015106\n", - "Surface training t=4372, loss=0.4003516286611557\n", - "Surface training t=4373, loss=0.39285245537757874\n", - "Surface training t=4374, loss=0.3319564759731293\n", - "Surface training t=4375, loss=0.4001583158969879\n", - "Surface training t=4376, loss=0.4283875524997711\n", - "Surface training t=4377, loss=0.38392408192157745\n", - "Surface training t=4378, loss=0.36841341853141785\n", - "Surface training t=4379, loss=0.3593503534793854\n", - "Surface training t=4380, loss=0.3542510122060776\n", - "Surface training t=4381, loss=0.3701048195362091\n", - "Surface training t=4382, loss=0.3547619432210922\n", - "Surface training t=4383, loss=0.40310579538345337\n", - "Surface training t=4384, loss=0.3672349154949188\n", - "Surface training t=4385, loss=0.3739467263221741\n", - "Surface training t=4386, loss=0.36691808700561523\n", - "Surface training t=4387, loss=0.35604386031627655\n", - "Surface training t=4388, loss=0.3479558527469635\n", - "Surface training t=4389, loss=0.37733401358127594\n", - "Surface training t=4390, loss=0.3711944967508316\n", - "Surface training t=4391, loss=0.34421688318252563\n", - "Surface training t=4392, loss=0.4317270368337631\n", - "Surface training t=4393, loss=0.36792542040348053\n", - "Surface training t=4394, loss=0.31951098144054413\n", - "Surface training t=4395, loss=0.36033694446086884\n", - "Surface training t=4396, loss=0.379390224814415\n", - "Surface training t=4397, loss=0.3505350947380066\n", - "Surface training t=4398, loss=0.3869727849960327\n", - "Surface training t=4399, loss=0.36493639647960663\n", - "Surface training t=4400, loss=0.3528081029653549\n", - "Surface training t=4401, loss=0.34022682905197144\n", - "Surface training t=4402, loss=0.35573072731494904\n", - "Surface training t=4403, loss=0.3499225527048111\n", - "Surface training t=4404, loss=0.35486382246017456\n", - "Surface training t=4405, loss=0.36691416800022125\n", - "Surface training t=4406, loss=0.36063696444034576\n", - "Surface training t=4407, loss=0.34796419739723206\n", - "Surface training t=4408, loss=0.402608722448349\n", - "Surface training t=4409, loss=0.371611624956131\n", - "Surface training t=4410, loss=0.3657953590154648\n", - "Surface training t=4411, loss=0.3909660875797272\n", - "Surface training t=4412, loss=0.4032485634088516\n", - "Surface training t=4413, loss=0.37316152453422546\n", - "Surface training t=4414, loss=0.3520517945289612\n", - "Surface training t=4415, loss=0.38107921183109283\n", - "Surface training t=4416, loss=0.36524389684200287\n", - "Surface training t=4417, loss=0.3681595027446747\n", - "Surface training t=4418, loss=0.35095900297164917\n", - "Surface training t=4419, loss=0.3640449047088623\n", - "Surface training t=4420, loss=0.4061862975358963\n", - "Surface training t=4421, loss=0.37502218782901764\n", - "Surface training t=4422, loss=0.4401422441005707\n", - "Surface training t=4423, loss=0.3404920846223831\n", - "Surface training t=4424, loss=0.36102883517742157\n", - "Surface training t=4425, loss=0.3995009660720825\n", - "Surface training t=4426, loss=0.33152784407138824\n", - "Surface training t=4427, loss=0.33734703063964844\n", - "Surface training t=4428, loss=0.32043422758579254\n", - "Surface training t=4429, loss=0.38645413517951965\n", - "Surface training t=4430, loss=0.382725328207016\n", - "Surface training t=4431, loss=0.382860004901886\n", - "Surface training t=4432, loss=0.3888523578643799\n", - "Surface training t=4433, loss=0.36120568215847015\n", - "Surface training t=4434, loss=0.3106190487742424\n", - "Surface training t=4435, loss=0.3742678016424179\n", - "Surface training t=4436, loss=0.3242734670639038\n", - "Surface training t=4437, loss=0.3148689717054367\n", - "Surface training t=4438, loss=0.42700889706611633\n", - "Surface training t=4439, loss=0.37403063476085663\n", - "Surface training t=4440, loss=0.3247043117880821\n", - "Surface training t=4441, loss=0.3767058104276657\n", - "Surface training t=4442, loss=0.3977982699871063\n", - "Surface training t=4443, loss=0.3738497793674469\n", - "Surface training t=4444, loss=0.3952365815639496\n", - "Surface training t=4445, loss=0.40496671199798584\n", - "Surface training t=4446, loss=0.36646367609500885\n", - "Surface training t=4447, loss=0.38503114879131317\n", - "Surface training t=4448, loss=0.3294638693332672\n", - "Surface training t=4449, loss=0.35898570716381073\n", - "Surface training t=4450, loss=0.3792078197002411\n", - "Surface training t=4451, loss=0.37381313741207123\n", - "Surface training t=4452, loss=0.34670698642730713\n", - "Surface training t=4453, loss=0.3379925638437271\n", - "Surface training t=4454, loss=0.312766969203949\n", - "Surface training t=4455, loss=0.3246275782585144\n", - "Surface training t=4456, loss=0.3296464830636978\n", - "Surface training t=4457, loss=0.31421442329883575\n", - "Surface training t=4458, loss=0.38798047602176666\n", - "Surface training t=4459, loss=0.35983774065971375\n", - "Surface training t=4460, loss=0.33491238951683044\n", - "Surface training t=4461, loss=0.3415924608707428\n", - "Surface training t=4462, loss=0.35318075120449066\n", - "Surface training t=4463, loss=0.3805788457393646\n", - "Surface training t=4464, loss=0.3435753434896469\n", - "Surface training t=4465, loss=0.3380352556705475\n", - "Surface training t=4466, loss=0.3538895696401596\n", - "Surface training t=4467, loss=0.3636568784713745\n", - "Surface training t=4468, loss=0.432081863284111\n", - "Surface training t=4469, loss=0.391595795750618\n", - "Surface training t=4470, loss=0.3943723738193512\n", - "Surface training t=4471, loss=0.3214152157306671\n", - "Surface training t=4472, loss=0.30232080072164536\n", - "Surface training t=4473, loss=0.33674293756484985\n", - "Surface training t=4474, loss=0.36456628143787384\n", - "Surface training t=4475, loss=0.35045158863067627\n", - "Surface training t=4476, loss=0.3202579393982887\n", - "Surface training t=4477, loss=0.3390187472105026\n", - "Surface training t=4478, loss=0.36981113255023956\n", - "Surface training t=4479, loss=0.4193192571401596\n", - "Surface training t=4480, loss=0.4346388876438141\n", - "Surface training t=4481, loss=0.38033419847488403\n", - "Surface training t=4482, loss=0.3702719956636429\n", - "Surface training t=4483, loss=0.36689913272857666\n", - "Surface training t=4484, loss=0.35413530468940735\n", - "Surface training t=4485, loss=0.3101692497730255\n", - "Surface training t=4486, loss=0.3777613341808319\n", - "Surface training t=4487, loss=0.3818369656801224\n", - "Surface training t=4488, loss=0.37624460458755493\n", - "Surface training t=4489, loss=0.3786255419254303\n", - "Surface training t=4490, loss=0.35722778737545013\n", - "Surface training t=4491, loss=0.3468325436115265\n", - "Surface training t=4492, loss=0.41311104595661163\n", - "Surface training t=4493, loss=0.36761048436164856\n", - "Surface training t=4494, loss=0.33568279445171356\n", - "Surface training t=4495, loss=0.35242754220962524\n", - "Surface training t=4496, loss=0.38079413771629333\n", - "Surface training t=4497, loss=0.3614285886287689\n", - "Surface training t=4498, loss=0.36679908633232117\n", - "Surface training t=4499, loss=0.3454326391220093\n", - "Surface training t=4500, loss=0.34662647545337677\n", - "Surface training t=4501, loss=0.3883886933326721\n", - "Surface training t=4502, loss=0.3286457061767578\n", - "Surface training t=4503, loss=0.3812432587146759\n", - "Surface training t=4504, loss=0.363389328122139\n", - "Surface training t=4505, loss=0.30353666096925735\n", - "Surface training t=4506, loss=0.36332517862319946\n", - "Surface training t=4507, loss=0.32256707549095154\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=4508, loss=0.35294489562511444\n", - "Surface training t=4509, loss=0.30525777488946915\n", - "Surface training t=4510, loss=0.39752982556819916\n", - "Surface training t=4511, loss=0.34861883521080017\n", - "Surface training t=4512, loss=0.39940160512924194\n", - "Surface training t=4513, loss=0.336132749915123\n", - "Surface training t=4514, loss=0.36138124763965607\n", - "Surface training t=4515, loss=0.3972701132297516\n", - "Surface training t=4516, loss=0.32293180376291275\n", - "Surface training t=4517, loss=0.3962497413158417\n", - "Surface training t=4518, loss=0.382998526096344\n", - "Surface training t=4519, loss=0.3656099736690521\n", - "Surface training t=4520, loss=0.33271750807762146\n", - "Surface training t=4521, loss=0.3487425446510315\n", - "Surface training t=4522, loss=0.3864799439907074\n", - "Surface training t=4523, loss=0.35701726377010345\n", - "Surface training t=4524, loss=0.32848595082759857\n", - "Surface training t=4525, loss=0.3744707256555557\n", - "Surface training t=4526, loss=0.34485067427158356\n", - "Surface training t=4527, loss=0.39946161210536957\n", - "Surface training t=4528, loss=0.3228444755077362\n", - "Surface training t=4529, loss=0.4011625051498413\n", - "Surface training t=4530, loss=0.3238094300031662\n", - "Surface training t=4531, loss=0.35599151253700256\n", - "Surface training t=4532, loss=0.3940228223800659\n", - "Surface training t=4533, loss=0.39141616225242615\n", - "Surface training t=4534, loss=0.3737682104110718\n", - "Surface training t=4535, loss=0.37499624490737915\n", - "Surface training t=4536, loss=0.35205142199993134\n", - "Surface training t=4537, loss=0.363216832280159\n", - "Surface training t=4538, loss=0.35783466696739197\n", - "Surface training t=4539, loss=0.3443835824728012\n", - "Surface training t=4540, loss=0.3538028299808502\n", - "Surface training t=4541, loss=0.3464362770318985\n", - "Surface training t=4542, loss=0.3576887547969818\n", - "Surface training t=4543, loss=0.378839835524559\n", - "Surface training t=4544, loss=0.3379286676645279\n", - "Surface training t=4545, loss=0.37465597689151764\n", - "Surface training t=4546, loss=0.36155933141708374\n", - "Surface training t=4547, loss=0.3763217329978943\n", - "Surface training t=4548, loss=0.38714568316936493\n", - "Surface training t=4549, loss=0.40397776663303375\n", - "Surface training t=4550, loss=0.3954504281282425\n", - "Surface training t=4551, loss=0.37967929244041443\n", - "Surface training t=4552, loss=0.3590090870857239\n", - "Surface training t=4553, loss=0.3781857043504715\n", - "Surface training t=4554, loss=0.34687648713588715\n", - "Surface training t=4555, loss=0.358994722366333\n", - "Surface training t=4556, loss=0.3967878520488739\n", - "Surface training t=4557, loss=0.3773282468318939\n", - "Surface training t=4558, loss=0.38107360899448395\n", - "Surface training t=4559, loss=0.3196386396884918\n", - "Surface training t=4560, loss=0.34819504618644714\n", - "Surface training t=4561, loss=0.34106819331645966\n", - "Surface training t=4562, loss=0.3601728677749634\n", - "Surface training t=4563, loss=0.30577630549669266\n", - "Surface training t=4564, loss=0.321598619222641\n", - "Surface training t=4565, loss=0.3781317472457886\n", - "Surface training t=4566, loss=0.3458957076072693\n", - "Surface training t=4567, loss=0.316767081618309\n", - "Surface training t=4568, loss=0.3774903416633606\n", - "Surface training t=4569, loss=0.39020244777202606\n", - "Surface training t=4570, loss=0.3187175840139389\n", - "Surface training t=4571, loss=0.3674771934747696\n", - "Surface training t=4572, loss=0.3799215853214264\n", - "Surface training t=4573, loss=0.4054121673107147\n", - "Surface training t=4574, loss=0.3902060091495514\n", - "Surface training t=4575, loss=0.3830926716327667\n", - "Surface training t=4576, loss=0.38483820855617523\n", - "Surface training t=4577, loss=0.3915509134531021\n", - "Surface training t=4578, loss=0.3649221658706665\n", - "Surface training t=4579, loss=0.36073482036590576\n", - "Surface training t=4580, loss=0.4328763484954834\n", - "Surface training t=4581, loss=0.41254378855228424\n", - "Surface training t=4582, loss=0.478069007396698\n", - "Surface training t=4583, loss=0.39864878356456757\n", - "Surface training t=4584, loss=0.40513965487480164\n", - "Surface training t=4585, loss=0.40900513529777527\n", - "Surface training t=4586, loss=0.4422040432691574\n", - "Surface training t=4587, loss=0.3658248484134674\n", - "Surface training t=4588, loss=0.41193078458309174\n", - "Surface training t=4589, loss=0.36952202022075653\n", - "Surface training t=4590, loss=0.37535589933395386\n", - "Surface training t=4591, loss=0.35220447182655334\n", - "Surface training t=4592, loss=0.43900297582149506\n", - "Surface training t=4593, loss=0.4089733362197876\n", - "Surface training t=4594, loss=0.3889257460832596\n", - "Surface training t=4595, loss=0.38490912318229675\n", - "Surface training t=4596, loss=0.3716210424900055\n", - "Surface training t=4597, loss=0.3683793395757675\n", - "Surface training t=4598, loss=0.39986535906791687\n", - "Surface training t=4599, loss=0.359115332365036\n", - "Surface training t=4600, loss=0.37003548443317413\n", - "Surface training t=4601, loss=0.3796766698360443\n", - "Surface training t=4602, loss=0.36375147104263306\n", - "Surface training t=4603, loss=0.3631678372621536\n", - "Surface training t=4604, loss=0.3583451956510544\n", - "Surface training t=4605, loss=0.39910224080085754\n", - "Surface training t=4606, loss=0.3694910407066345\n", - "Surface training t=4607, loss=0.39311932027339935\n", - "Surface training t=4608, loss=0.4231205880641937\n", - "Surface training t=4609, loss=0.3708811402320862\n", - "Surface training t=4610, loss=0.3357953578233719\n", - "Surface training t=4611, loss=0.393618568778038\n", - "Surface training t=4612, loss=0.3344514071941376\n", - "Surface training t=4613, loss=0.3717297166585922\n", - "Surface training t=4614, loss=0.39689546823501587\n", - "Surface training t=4615, loss=0.3530116677284241\n", - "Surface training t=4616, loss=0.3464813828468323\n", - "Surface training t=4617, loss=0.3354956805706024\n", - "Surface training t=4618, loss=0.33438166975975037\n", - "Surface training t=4619, loss=0.3424687385559082\n", - "Surface training t=4620, loss=0.3704434931278229\n", - "Surface training t=4621, loss=0.33510713279247284\n", - "Surface training t=4622, loss=0.3415919840335846\n", - "Surface training t=4623, loss=0.3551909625530243\n", - "Surface training t=4624, loss=0.3573664426803589\n", - "Surface training t=4625, loss=0.34049396216869354\n", - "Surface training t=4626, loss=0.36195501685142517\n", - "Surface training t=4627, loss=0.3181038051843643\n", - "Surface training t=4628, loss=0.32798804342746735\n", - "Surface training t=4629, loss=0.3217407763004303\n", - "Surface training t=4630, loss=0.3853113204240799\n", - "Surface training t=4631, loss=0.35586076974868774\n", - "Surface training t=4632, loss=0.34820815920829773\n", - "Surface training t=4633, loss=0.3677218407392502\n", - "Surface training t=4634, loss=0.3393997848033905\n", - "Surface training t=4635, loss=0.42260393500328064\n", - "Surface training t=4636, loss=0.38695214688777924\n", - "Surface training t=4637, loss=0.3682563900947571\n", - "Surface training t=4638, loss=0.4041961133480072\n", - "Surface training t=4639, loss=0.38951657712459564\n", - "Surface training t=4640, loss=0.33623258769512177\n", - "Surface training t=4641, loss=0.3647107630968094\n", - "Surface training t=4642, loss=0.42259393632411957\n", - "Surface training t=4643, loss=0.32884112000465393\n", - "Surface training t=4644, loss=0.36407576501369476\n", - "Surface training t=4645, loss=0.3878577649593353\n", - "Surface training t=4646, loss=0.4463985115289688\n", - "Surface training t=4647, loss=0.38334959745407104\n", - "Surface training t=4648, loss=0.41183583438396454\n", - "Surface training t=4649, loss=0.4305856078863144\n", - "Surface training t=4650, loss=0.4443975239992142\n", - "Surface training t=4651, loss=0.34607939422130585\n", - "Surface training t=4652, loss=0.4207507073879242\n", - "Surface training t=4653, loss=0.3691413551568985\n", - "Surface training t=4654, loss=0.3824267238378525\n", - "Surface training t=4655, loss=0.3916876018047333\n", - "Surface training t=4656, loss=0.4695362448692322\n", - "Surface training t=4657, loss=0.40651577711105347\n", - "Surface training t=4658, loss=0.3876422345638275\n", - "Surface training t=4659, loss=0.34728407859802246\n", - "Surface training t=4660, loss=0.4125414937734604\n", - "Surface training t=4661, loss=0.40472573041915894\n", - "Surface training t=4662, loss=0.40897008776664734\n", - "Surface training t=4663, loss=0.3781917542219162\n", - "Surface training t=4664, loss=0.4093916565179825\n", - "Surface training t=4665, loss=0.3657839447259903\n", - "Surface training t=4666, loss=0.37420833110809326\n", - "Surface training t=4667, loss=0.39356274902820587\n", - "Surface training t=4668, loss=0.37300658226013184\n", - "Surface training t=4669, loss=0.3980589807033539\n", - "Surface training t=4670, loss=0.3215583711862564\n", - "Surface training t=4671, loss=0.3589416444301605\n", - "Surface training t=4672, loss=0.3586617708206177\n", - "Surface training t=4673, loss=0.35255520045757294\n", - "Surface training t=4674, loss=0.4310198277235031\n", - "Surface training t=4675, loss=0.3296675533056259\n", - "Surface training t=4676, loss=0.3538426607847214\n", - "Surface training t=4677, loss=0.3321065902709961\n", - "Surface training t=4678, loss=0.34450915455818176\n", - "Surface training t=4679, loss=0.34496834874153137\n", - "Surface training t=4680, loss=0.3844732195138931\n", - "Surface training t=4681, loss=0.316301167011261\n", - "Surface training t=4682, loss=0.30801568925380707\n", - "Surface training t=4683, loss=0.35344570875167847\n", - "Surface training t=4684, loss=0.387448325753212\n", - "Surface training t=4685, loss=0.38780422508716583\n", - "Surface training t=4686, loss=0.35513846576213837\n", - "Surface training t=4687, loss=0.3616010546684265\n", - "Surface training t=4688, loss=0.36283090710639954\n", - "Surface training t=4689, loss=0.34629860520362854\n", - "Surface training t=4690, loss=0.3755059391260147\n", - "Surface training t=4691, loss=0.3642145097255707\n", - "Surface training t=4692, loss=0.3723543733358383\n", - "Surface training t=4693, loss=0.35281139612197876\n", - "Surface training t=4694, loss=0.3390592336654663\n", - "Surface training t=4695, loss=0.35113413631916046\n", - "Surface training t=4696, loss=0.3374287039041519\n", - "Surface training t=4697, loss=0.3301214128732681\n", - "Surface training t=4698, loss=0.386724591255188\n", - "Surface training t=4699, loss=0.3799232542514801\n", - "Surface training t=4700, loss=0.3142611160874367\n", - "Surface training t=4701, loss=0.3914482593536377\n", - "Surface training t=4702, loss=0.30886702239513397\n", - "Surface training t=4703, loss=0.3320941925048828\n", - "Surface training t=4704, loss=0.33234554529190063\n", - "Surface training t=4705, loss=0.34983108937740326\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=4706, loss=0.36817994713783264\n", - "Surface training t=4707, loss=0.2991291359066963\n", - "Surface training t=4708, loss=0.36672520637512207\n", - "Surface training t=4709, loss=0.34427401423454285\n", - "Surface training t=4710, loss=0.36321890354156494\n", - "Surface training t=4711, loss=0.35722239315509796\n", - "Surface training t=4712, loss=0.3541698157787323\n", - "Surface training t=4713, loss=0.33795951306819916\n", - "Surface training t=4714, loss=0.32648633420467377\n", - "Surface training t=4715, loss=0.35048262774944305\n", - "Surface training t=4716, loss=0.36070995032787323\n", - "Surface training t=4717, loss=0.32955893874168396\n", - "Surface training t=4718, loss=0.3447103053331375\n", - "Surface training t=4719, loss=0.38561876118183136\n", - "Surface training t=4720, loss=0.32803651690483093\n", - "Surface training t=4721, loss=0.31871648132801056\n", - "Surface training t=4722, loss=0.35296526551246643\n", - "Surface training t=4723, loss=0.35386501252651215\n", - "Surface training t=4724, loss=0.34947003424167633\n", - "Surface training t=4725, loss=0.3464714288711548\n", - "Surface training t=4726, loss=0.3579832464456558\n", - "Surface training t=4727, loss=0.3185901641845703\n", - "Surface training t=4728, loss=0.31035933643579483\n", - "Surface training t=4729, loss=0.3371572941541672\n", - "Surface training t=4730, loss=0.32867611944675446\n", - "Surface training t=4731, loss=0.3622114360332489\n", - "Surface training t=4732, loss=0.3976222574710846\n", - "Surface training t=4733, loss=0.3607126921415329\n", - "Surface training t=4734, loss=0.3043099045753479\n", - "Surface training t=4735, loss=0.38926348090171814\n", - "Surface training t=4736, loss=0.3564392179250717\n", - "Surface training t=4737, loss=0.38054780662059784\n", - "Surface training t=4738, loss=0.35158489644527435\n", - "Surface training t=4739, loss=0.3089229464530945\n", - "Surface training t=4740, loss=0.3305133134126663\n", - "Surface training t=4741, loss=0.30790266394615173\n", - "Surface training t=4742, loss=0.33724159002304077\n", - "Surface training t=4743, loss=0.27343565225601196\n", - "Surface training t=4744, loss=0.3433024138212204\n", - "Surface training t=4745, loss=0.33814558386802673\n", - "Surface training t=4746, loss=0.32547444105148315\n", - "Surface training t=4747, loss=0.33887919783592224\n", - "Surface training t=4748, loss=0.3562484085559845\n", - "Surface training t=4749, loss=0.35663099586963654\n", - "Surface training t=4750, loss=0.3877490758895874\n", - "Surface training t=4751, loss=0.3305843025445938\n", - "Surface training t=4752, loss=0.3657403290271759\n", - "Surface training t=4753, loss=0.3271320313215256\n", - "Surface training t=4754, loss=0.36118726432323456\n", - "Surface training t=4755, loss=0.35845886170864105\n", - "Surface training t=4756, loss=0.31533050537109375\n", - "Surface training t=4757, loss=0.3080063760280609\n", - "Surface training t=4758, loss=0.3530910462141037\n", - "Surface training t=4759, loss=0.35311993956565857\n", - "Surface training t=4760, loss=0.3455332964658737\n", - "Surface training t=4761, loss=0.3651703745126724\n", - "Surface training t=4762, loss=0.36628711223602295\n", - "Surface training t=4763, loss=0.38244879245758057\n", - "Surface training t=4764, loss=0.37099991738796234\n", - "Surface training t=4765, loss=0.3709573298692703\n", - "Surface training t=4766, loss=0.3245709538459778\n", - "Surface training t=4767, loss=0.319130040705204\n", - "Surface training t=4768, loss=0.3583488464355469\n", - "Surface training t=4769, loss=0.3501458019018173\n", - "Surface training t=4770, loss=0.39026954770088196\n", - "Surface training t=4771, loss=0.33193716406822205\n", - "Surface training t=4772, loss=0.39734509587287903\n", - "Surface training t=4773, loss=0.34866711497306824\n", - "Surface training t=4774, loss=0.36379337310791016\n", - "Surface training t=4775, loss=0.3791978359222412\n", - "Surface training t=4776, loss=0.36265163123607635\n", - "Surface training t=4777, loss=0.3507082909345627\n", - "Surface training t=4778, loss=0.3862844556570053\n", - "Surface training t=4779, loss=0.3683246374130249\n", - "Surface training t=4780, loss=0.4104674607515335\n", - "Surface training t=4781, loss=0.4076852351427078\n", - "Surface training t=4782, loss=0.3420954793691635\n", - "Surface training t=4783, loss=0.4049474596977234\n", - "Surface training t=4784, loss=0.3591865450143814\n", - "Surface training t=4785, loss=0.3494352549314499\n", - "Surface training t=4786, loss=0.35347697138786316\n", - "Surface training t=4787, loss=0.3600555956363678\n", - "Surface training t=4788, loss=0.38043949007987976\n", - "Surface training t=4789, loss=0.31629742681980133\n", - "Surface training t=4790, loss=0.36473606526851654\n", - "Surface training t=4791, loss=0.34667447209358215\n", - "Surface training t=4792, loss=0.35855329036712646\n", - "Surface training t=4793, loss=0.33096690475940704\n", - "Surface training t=4794, loss=0.31840746104717255\n", - "Surface training t=4795, loss=0.36668989062309265\n", - "Surface training t=4796, loss=0.34504373371601105\n", - "Surface training t=4797, loss=0.34357741475105286\n", - "Surface training t=4798, loss=0.33180396258831024\n", - "Surface training t=4799, loss=0.3370264321565628\n", - "Surface training t=4800, loss=0.3651564419269562\n", - "Surface training t=4801, loss=0.31795254349708557\n", - "Surface training t=4802, loss=0.3411529213190079\n", - "Surface training t=4803, loss=0.32686223089694977\n", - "Surface training t=4804, loss=0.34000976383686066\n", - "Surface training t=4805, loss=0.3492043763399124\n", - "Surface training t=4806, loss=0.34844666719436646\n", - "Surface training t=4807, loss=0.34666359424591064\n", - "Surface training t=4808, loss=0.40211793780326843\n", - "Surface training t=4809, loss=0.3853513300418854\n", - "Surface training t=4810, loss=0.3136245310306549\n", - "Surface training t=4811, loss=0.34616684913635254\n", - "Surface training t=4812, loss=0.3297291547060013\n", - "Surface training t=4813, loss=0.4100306034088135\n", - "Surface training t=4814, loss=0.42301589250564575\n", - "Surface training t=4815, loss=0.36196161806583405\n", - "Surface training t=4816, loss=0.36324451863765717\n", - "Surface training t=4817, loss=0.36012423038482666\n", - "Surface training t=4818, loss=0.3938414752483368\n", - "Surface training t=4819, loss=0.3602464199066162\n", - "Surface training t=4820, loss=0.35456839203834534\n", - "Surface training t=4821, loss=0.3768157958984375\n", - "Surface training t=4822, loss=0.3227842301130295\n", - "Surface training t=4823, loss=0.31772269308567047\n", - "Surface training t=4824, loss=0.3012867271900177\n", - "Surface training t=4825, loss=0.3309038281440735\n", - "Surface training t=4826, loss=0.32625678181648254\n", - "Surface training t=4827, loss=0.33914417028427124\n", - "Surface training t=4828, loss=0.35071131587028503\n", - "Surface training t=4829, loss=0.3634670525789261\n", - "Surface training t=4830, loss=0.337959423661232\n", - "Surface training t=4831, loss=0.3566763699054718\n", - "Surface training t=4832, loss=0.3357463628053665\n", - "Surface training t=4833, loss=0.3245605081319809\n", - "Surface training t=4834, loss=0.2923673912882805\n", - "Surface training t=4835, loss=0.3655222952365875\n", - "Surface training t=4836, loss=0.3141457885503769\n", - "Surface training t=4837, loss=0.36085692048072815\n", - "Surface training t=4838, loss=0.3410634994506836\n", - "Surface training t=4839, loss=0.29969698190689087\n", - "Surface training t=4840, loss=0.32821793854236603\n", - "Surface training t=4841, loss=0.29426806420087814\n", - "Surface training t=4842, loss=0.3308296352624893\n", - "Surface training t=4843, loss=0.3346896171569824\n", - "Surface training t=4844, loss=0.3553932309150696\n", - "Surface training t=4845, loss=0.357504740357399\n", - "Surface training t=4846, loss=0.3548886328935623\n", - "Surface training t=4847, loss=0.3367583006620407\n", - "Surface training t=4848, loss=0.3784773647785187\n", - "Surface training t=4849, loss=0.34593960642814636\n", - "Surface training t=4850, loss=0.3441551774740219\n", - "Surface training t=4851, loss=0.3018244132399559\n", - "Surface training t=4852, loss=0.3421979546546936\n", - "Surface training t=4853, loss=0.33538977801799774\n", - "Surface training t=4854, loss=0.3206251561641693\n", - "Surface training t=4855, loss=0.29176507890224457\n", - "Surface training t=4856, loss=0.3301401287317276\n", - "Surface training t=4857, loss=0.33803005516529083\n", - "Surface training t=4858, loss=0.31050562858581543\n", - "Surface training t=4859, loss=0.30931776762008667\n", - "Surface training t=4860, loss=0.3420119434595108\n", - "Surface training t=4861, loss=0.33066271245479584\n", - "Surface training t=4862, loss=0.32381659746170044\n", - "Surface training t=4863, loss=0.3511497229337692\n", - "Surface training t=4864, loss=0.3257976323366165\n", - "Surface training t=4865, loss=0.30340299010276794\n", - "Surface training t=4866, loss=0.36182714998722076\n", - "Surface training t=4867, loss=0.33455997705459595\n", - "Surface training t=4868, loss=0.3825587183237076\n", - "Surface training t=4869, loss=0.33208900690078735\n", - "Surface training t=4870, loss=0.29730407893657684\n", - "Surface training t=4871, loss=0.30391183495521545\n", - "Surface training t=4872, loss=0.34452909231185913\n", - "Surface training t=4873, loss=0.31840674579143524\n", - "Surface training t=4874, loss=0.31174176931381226\n", - "Surface training t=4875, loss=0.3311067968606949\n", - "Surface training t=4876, loss=0.29250627756118774\n", - "Surface training t=4877, loss=0.3374555706977844\n", - "Surface training t=4878, loss=0.3070048391819\n", - "Surface training t=4879, loss=0.3393526077270508\n", - "Surface training t=4880, loss=0.3889262527227402\n", - "Surface training t=4881, loss=0.40356874465942383\n", - "Surface training t=4882, loss=0.28304241597652435\n", - "Surface training t=4883, loss=0.30312979966402054\n", - "Surface training t=4884, loss=0.325619101524353\n", - "Surface training t=4885, loss=0.315571665763855\n", - "Surface training t=4886, loss=0.33428333699703217\n", - "Surface training t=4887, loss=0.34479811787605286\n", - "Surface training t=4888, loss=0.32093317806720734\n", - "Surface training t=4889, loss=0.3607652336359024\n", - "Surface training t=4890, loss=0.3048575446009636\n", - "Surface training t=4891, loss=0.32494497299194336\n", - "Surface training t=4892, loss=0.35316579043865204\n", - "Surface training t=4893, loss=0.35572807490825653\n", - "Surface training t=4894, loss=0.3339609205722809\n", - "Surface training t=4895, loss=0.35076984763145447\n", - "Surface training t=4896, loss=0.3858302980661392\n", - "Surface training t=4897, loss=0.26687951385974884\n", - "Surface training t=4898, loss=0.33569417893886566\n", - "Surface training t=4899, loss=0.31369221210479736\n", - "Surface training t=4900, loss=0.3195297569036484\n", - "Surface training t=4901, loss=0.3004414141178131\n", - "Surface training t=4902, loss=0.33199629187583923\n", - "Surface training t=4903, loss=0.34944286942481995\n", - "Surface training t=4904, loss=0.33551979064941406\n", - "Surface training t=4905, loss=0.30169762670993805\n", - "Surface training t=4906, loss=0.36345718801021576\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=4907, loss=0.3308414816856384\n", - "Surface training t=4908, loss=0.31015458703041077\n", - "Surface training t=4909, loss=0.30463728308677673\n", - "Surface training t=4910, loss=0.32008279860019684\n", - "Surface training t=4911, loss=0.2951040714979172\n", - "Surface training t=4912, loss=0.33702829480171204\n", - "Surface training t=4913, loss=0.35228487849235535\n", - "Surface training t=4914, loss=0.3253467530012131\n", - "Surface training t=4915, loss=0.31549473106861115\n", - "Surface training t=4916, loss=0.3241606503725052\n", - "Surface training t=4917, loss=0.3779056668281555\n", - "Surface training t=4918, loss=0.3728039711713791\n", - "Surface training t=4919, loss=0.3220500349998474\n", - "Surface training t=4920, loss=0.30301307141780853\n", - "Surface training t=4921, loss=0.340562641620636\n", - "Surface training t=4922, loss=0.3344928175210953\n", - "Surface training t=4923, loss=0.32043473422527313\n", - "Surface training t=4924, loss=0.34644582867622375\n", - "Surface training t=4925, loss=0.3452429920434952\n", - "Surface training t=4926, loss=0.3113253712654114\n", - "Surface training t=4927, loss=0.3246746212244034\n", - "Surface training t=4928, loss=0.3507146090269089\n", - "Surface training t=4929, loss=0.33540084958076477\n", - "Surface training t=4930, loss=0.3244020491838455\n", - "Surface training t=4931, loss=0.3661618083715439\n", - "Surface training t=4932, loss=0.34582093358039856\n", - "Surface training t=4933, loss=0.33185313642024994\n", - "Surface training t=4934, loss=0.30788564682006836\n", - "Surface training t=4935, loss=0.36575618386268616\n", - "Surface training t=4936, loss=0.3551960587501526\n", - "Surface training t=4937, loss=0.3737960606813431\n", - "Surface training t=4938, loss=0.3404138684272766\n", - "Surface training t=4939, loss=0.364915132522583\n", - "Surface training t=4940, loss=0.39846308529376984\n", - "Surface training t=4941, loss=0.38006289303302765\n", - "Surface training t=4942, loss=0.32778748869895935\n", - "Surface training t=4943, loss=0.3378603160381317\n", - "Surface training t=4944, loss=0.2962803542613983\n", - "Surface training t=4945, loss=0.3246782422065735\n", - "Surface training t=4946, loss=0.3046301156282425\n", - "Surface training t=4947, loss=0.3412448614835739\n", - "Surface training t=4948, loss=0.3029187321662903\n", - "Surface training t=4949, loss=0.29171767085790634\n", - "Surface training t=4950, loss=0.3589441478252411\n", - "Surface training t=4951, loss=0.34133104979991913\n", - "Surface training t=4952, loss=0.3940960317850113\n", - "Surface training t=4953, loss=0.36376072466373444\n", - "Surface training t=4954, loss=0.35041598975658417\n", - "Surface training t=4955, loss=0.3523437827825546\n", - "Surface training t=4956, loss=0.3721865862607956\n", - "Surface training t=4957, loss=0.32386527955532074\n", - "Surface training t=4958, loss=0.3140314370393753\n", - "Surface training t=4959, loss=0.28060276061296463\n", - "Surface training t=4960, loss=0.3603641539812088\n", - "Surface training t=4961, loss=0.31138940155506134\n", - "Surface training t=4962, loss=0.3171911835670471\n", - "Surface training t=4963, loss=0.3066091537475586\n", - "Surface training t=4964, loss=0.29515208303928375\n", - "Surface training t=4965, loss=0.3382185995578766\n", - "Surface training t=4966, loss=0.33538947999477386\n", - "Surface training t=4967, loss=0.28029797971248627\n", - "Surface training t=4968, loss=0.3271774649620056\n", - "Surface training t=4969, loss=0.32764557003974915\n", - "Surface training t=4970, loss=0.34162838757038116\n", - "Surface training t=4971, loss=0.3391285389661789\n", - "Surface training t=4972, loss=0.30262291431427\n", - "Surface training t=4973, loss=0.31420229375362396\n", - "Surface training t=4974, loss=0.3697166293859482\n", - "Surface training t=4975, loss=0.32632970809936523\n", - "Surface training t=4976, loss=0.3026254177093506\n", - "Surface training t=4977, loss=0.3532605767250061\n", - "Surface training t=4978, loss=0.30645887553691864\n", - "Surface training t=4979, loss=0.3437691926956177\n", - "Surface training t=4980, loss=0.3165419399738312\n", - "Surface training t=4981, loss=0.3072514683008194\n", - "Surface training t=4982, loss=0.3565034717321396\n", - "Surface training t=4983, loss=0.2978912591934204\n", - "Surface training t=4984, loss=0.3520364761352539\n", - "Surface training t=4985, loss=0.3495063930749893\n", - "Surface training t=4986, loss=0.36801673471927643\n", - "Surface training t=4987, loss=0.3428412526845932\n", - "Surface training t=4988, loss=0.3384735882282257\n", - "Surface training t=4989, loss=0.387606680393219\n", - "Surface training t=4990, loss=0.3766317814588547\n", - "Surface training t=4991, loss=0.338203027844429\n", - "Surface training t=4992, loss=0.39408624172210693\n", - "Surface training t=4993, loss=0.356275275349617\n", - "Surface training t=4994, loss=0.39629240334033966\n", - "Surface training t=4995, loss=0.3384665846824646\n", - "Surface training t=4996, loss=0.4026898890733719\n", - "Surface training t=4997, loss=0.34863682091236115\n", - "Surface training t=4998, loss=0.3563470095396042\n", - "Surface training t=4999, loss=0.4017273336648941\n", - "Surface training t=5000, loss=0.37393513321876526\n", - "Surface training t=5001, loss=0.3785364329814911\n", - "Surface training t=5002, loss=0.3732006400823593\n", - "Surface training t=5003, loss=0.3531656265258789\n", - "Surface training t=5004, loss=0.3633383959531784\n", - "Surface training t=5005, loss=0.3960649371147156\n", - "Surface training t=5006, loss=0.3287232667207718\n", - "Surface training t=5007, loss=0.35228803753852844\n", - "Surface training t=5008, loss=0.35694481432437897\n", - "Surface training t=5009, loss=0.30663490295410156\n", - "Surface training t=5010, loss=0.3340197056531906\n", - "Surface training t=5011, loss=0.317983940243721\n", - "Surface training t=5012, loss=0.32604876160621643\n", - "Surface training t=5013, loss=0.3056921064853668\n", - "Surface training t=5014, loss=0.362693190574646\n", - "Surface training t=5015, loss=0.3350290209054947\n", - "Surface training t=5016, loss=0.3392331898212433\n", - "Surface training t=5017, loss=0.4065302312374115\n", - "Surface training t=5018, loss=0.35698170959949493\n", - "Surface training t=5019, loss=0.36246106028556824\n", - "Surface training t=5020, loss=0.3966373801231384\n", - "Surface training t=5021, loss=0.3505736291408539\n", - "Surface training t=5022, loss=0.32250915467739105\n", - "Surface training t=5023, loss=0.3016819953918457\n", - "Surface training t=5024, loss=0.32525525987148285\n", - "Surface training t=5025, loss=0.3245408684015274\n", - "Surface training t=5026, loss=0.30935530364513397\n", - "Surface training t=5027, loss=0.3512042462825775\n", - "Surface training t=5028, loss=0.305649071931839\n", - "Surface training t=5029, loss=0.3038254678249359\n", - "Surface training t=5030, loss=0.2866813912987709\n", - "Surface training t=5031, loss=0.3126784861087799\n", - "Surface training t=5032, loss=0.33396829664707184\n", - "Surface training t=5033, loss=0.3579540550708771\n", - "Surface training t=5034, loss=0.34195244312286377\n", - "Surface training t=5035, loss=0.29794879257678986\n", - "Surface training t=5036, loss=0.3168325573205948\n", - "Surface training t=5037, loss=0.27554409950971603\n", - "Surface training t=5038, loss=0.326405867934227\n", - "Surface training t=5039, loss=0.3510781079530716\n", - "Surface training t=5040, loss=0.29139018058776855\n", - "Surface training t=5041, loss=0.33237215876579285\n", - "Surface training t=5042, loss=0.3298225998878479\n", - "Surface training t=5043, loss=0.3370172530412674\n", - "Surface training t=5044, loss=0.35236912965774536\n", - "Surface training t=5045, loss=0.30571249127388\n", - "Surface training t=5046, loss=0.3181273490190506\n", - "Surface training t=5047, loss=0.3052722364664078\n", - "Surface training t=5048, loss=0.32137201726436615\n", - "Surface training t=5049, loss=0.40604472160339355\n", - "Surface training t=5050, loss=0.30490587651729584\n", - "Surface training t=5051, loss=0.3336108773946762\n", - "Surface training t=5052, loss=0.31789545714855194\n", - "Surface training t=5053, loss=0.37427598237991333\n", - "Surface training t=5054, loss=0.27456338703632355\n", - "Surface training t=5055, loss=0.31868574023246765\n", - "Surface training t=5056, loss=0.33535999059677124\n", - "Surface training t=5057, loss=0.33639541268348694\n", - "Surface training t=5058, loss=0.31113961338996887\n", - "Surface training t=5059, loss=0.3320653587579727\n", - "Surface training t=5060, loss=0.32970669865608215\n", - "Surface training t=5061, loss=0.3436540812253952\n", - "Surface training t=5062, loss=0.35555507242679596\n", - "Surface training t=5063, loss=0.3302745670080185\n", - "Surface training t=5064, loss=0.3070853650569916\n", - "Surface training t=5065, loss=0.31401415169239044\n", - "Surface training t=5066, loss=0.27949759364128113\n", - "Surface training t=5067, loss=0.3778763711452484\n", - "Surface training t=5068, loss=0.3409053534269333\n", - "Surface training t=5069, loss=0.31061360239982605\n", - "Surface training t=5070, loss=0.34573225677013397\n", - "Surface training t=5071, loss=0.3926655948162079\n", - "Surface training t=5072, loss=0.3155723810195923\n", - "Surface training t=5073, loss=0.3356122821569443\n", - "Surface training t=5074, loss=0.32923953235149384\n", - "Surface training t=5075, loss=0.3358412832021713\n", - "Surface training t=5076, loss=0.330840066075325\n", - "Surface training t=5077, loss=0.33498451113700867\n", - "Surface training t=5078, loss=0.34786106646060944\n", - "Surface training t=5079, loss=0.35248924791812897\n", - "Surface training t=5080, loss=0.31918953359127045\n", - "Surface training t=5081, loss=0.34258170425891876\n", - "Surface training t=5082, loss=0.326102614402771\n", - "Surface training t=5083, loss=0.3332967013120651\n", - "Surface training t=5084, loss=0.34429173171520233\n", - "Surface training t=5085, loss=0.312836155295372\n", - "Surface training t=5086, loss=0.3133603483438492\n", - "Surface training t=5087, loss=0.28854088485240936\n", - "Surface training t=5088, loss=0.3574047088623047\n", - "Surface training t=5089, loss=0.3291769474744797\n", - "Surface training t=5090, loss=0.32721956074237823\n", - "Surface training t=5091, loss=0.31210191547870636\n", - "Surface training t=5092, loss=0.3378921151161194\n", - "Surface training t=5093, loss=0.35449180006980896\n", - "Surface training t=5094, loss=0.30053049325942993\n", - "Surface training t=5095, loss=0.342450350522995\n", - "Surface training t=5096, loss=0.3298911303281784\n", - "Surface training t=5097, loss=0.30693112313747406\n", - "Surface training t=5098, loss=0.3775996118783951\n", - "Surface training t=5099, loss=0.3450537472963333\n", - "Surface training t=5100, loss=0.36946484446525574\n", - "Surface training t=5101, loss=0.315378338098526\n", - "Surface training t=5102, loss=0.3302224278450012\n", - "Surface training t=5103, loss=0.3657698333263397\n", - "Surface training t=5104, loss=0.3062877953052521\n", - "Surface training t=5105, loss=0.31825053691864014\n", - "Surface training t=5106, loss=0.3210266977548599\n", - "Surface training t=5107, loss=0.374261274933815\n", - "Surface training t=5108, loss=0.29082677513360977\n", - "Surface training t=5109, loss=0.3620336949825287\n", - "Surface training t=5110, loss=0.3069612979888916\n", - "Surface training t=5111, loss=0.33348309993743896\n", - "Surface training t=5112, loss=0.35028955340385437\n", - "Surface training t=5113, loss=0.33586812019348145\n", - "Surface training t=5114, loss=0.3450666218996048\n", - "Surface training t=5115, loss=0.3159158527851105\n", - "Surface training t=5116, loss=0.3435831069946289\n", - "Surface training t=5117, loss=0.3492547571659088\n", - "Surface training t=5118, loss=0.33061981201171875\n", - "Surface training t=5119, loss=0.2839278429746628\n", - "Surface training t=5120, loss=0.32116320729255676\n", - "Surface training t=5121, loss=0.3218190521001816\n", - "Surface training t=5122, loss=0.3376515805721283\n", - "Surface training t=5123, loss=0.3453013598918915\n", - "Surface training t=5124, loss=0.30270346999168396\n", - "Surface training t=5125, loss=0.2980186343193054\n", - "Surface training t=5126, loss=0.3301408588886261\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=5127, loss=0.31235918402671814\n", - "Surface training t=5128, loss=0.2967495620250702\n", - "Surface training t=5129, loss=0.3284297585487366\n", - "Surface training t=5130, loss=0.32267408072948456\n", - "Surface training t=5131, loss=0.3147778809070587\n", - "Surface training t=5132, loss=0.3006099611520767\n", - "Surface training t=5133, loss=0.3322066217660904\n", - "Surface training t=5134, loss=0.3064575344324112\n", - "Surface training t=5135, loss=0.32586970925331116\n", - "Surface training t=5136, loss=0.3398517370223999\n", - "Surface training t=5137, loss=0.3024972528219223\n", - "Surface training t=5138, loss=0.3421929031610489\n", - "Surface training t=5139, loss=0.37816308438777924\n", - "Surface training t=5140, loss=0.33107152581214905\n", - "Surface training t=5141, loss=0.3079499751329422\n", - "Surface training t=5142, loss=0.33051906526088715\n", - "Surface training t=5143, loss=0.31433480978012085\n", - "Surface training t=5144, loss=0.37207408249378204\n", - "Surface training t=5145, loss=0.3656528890132904\n", - "Surface training t=5146, loss=0.28585386276245117\n", - "Surface training t=5147, loss=0.3087482899427414\n", - "Surface training t=5148, loss=0.3339806944131851\n", - "Surface training t=5149, loss=0.2772737741470337\n", - "Surface training t=5150, loss=0.3330953121185303\n", - "Surface training t=5151, loss=0.3201081156730652\n", - "Surface training t=5152, loss=0.31957970559597015\n", - "Surface training t=5153, loss=0.3925648629665375\n", - "Surface training t=5154, loss=0.322244793176651\n", - "Surface training t=5155, loss=0.3733319640159607\n", - "Surface training t=5156, loss=0.33869417011737823\n", - "Surface training t=5157, loss=0.35153207182884216\n", - "Surface training t=5158, loss=0.36017192900180817\n", - "Surface training t=5159, loss=0.35140009224414825\n", - "Surface training t=5160, loss=0.3221583664417267\n", - "Surface training t=5161, loss=0.32649077475070953\n", - "Surface training t=5162, loss=0.3200366944074631\n", - "Surface training t=5163, loss=0.3658405840396881\n", - "Surface training t=5164, loss=0.3387824445962906\n", - "Surface training t=5165, loss=0.35921627283096313\n", - "Surface training t=5166, loss=0.3437203913927078\n", - "Surface training t=5167, loss=0.3236856907606125\n", - "Surface training t=5168, loss=0.3191101849079132\n", - "Surface training t=5169, loss=0.35517778992652893\n", - "Surface training t=5170, loss=0.3477075546979904\n", - "Surface training t=5171, loss=0.33887770771980286\n", - "Surface training t=5172, loss=0.3186117112636566\n", - "Surface training t=5173, loss=0.3462630808353424\n", - "Surface training t=5174, loss=0.3194810450077057\n", - "Surface training t=5175, loss=0.2945423573255539\n", - "Surface training t=5176, loss=0.27970387786626816\n", - "Surface training t=5177, loss=0.2812473252415657\n", - "Surface training t=5178, loss=0.3395417481660843\n", - "Surface training t=5179, loss=0.34861965477466583\n", - "Surface training t=5180, loss=0.3401271849870682\n", - "Surface training t=5181, loss=0.32704608142375946\n", - "Surface training t=5182, loss=0.3360053151845932\n", - "Surface training t=5183, loss=0.3340822756290436\n", - "Surface training t=5184, loss=0.29851366579532623\n", - "Surface training t=5185, loss=0.26772399991750717\n", - "Surface training t=5186, loss=0.2876310497522354\n", - "Surface training t=5187, loss=0.31692464649677277\n", - "Surface training t=5188, loss=0.32635755836963654\n", - "Surface training t=5189, loss=0.3457022160291672\n", - "Surface training t=5190, loss=0.3070163279771805\n", - "Surface training t=5191, loss=0.3144734650850296\n", - "Surface training t=5192, loss=0.29606133699417114\n", - "Surface training t=5193, loss=0.35274988412857056\n", - "Surface training t=5194, loss=0.3125286251306534\n", - "Surface training t=5195, loss=0.36362960934638977\n", - "Surface training t=5196, loss=0.362822562456131\n", - "Surface training t=5197, loss=0.28442833572626114\n", - "Surface training t=5198, loss=0.30875878036022186\n", - "Surface training t=5199, loss=0.26590678840875626\n", - "Surface training t=5200, loss=0.2839842289686203\n", - "Surface training t=5201, loss=0.2611456662416458\n", - "Surface training t=5202, loss=0.31302520632743835\n", - "Surface training t=5203, loss=0.31763163208961487\n", - "Surface training t=5204, loss=0.3118499517440796\n", - "Surface training t=5205, loss=0.28228698670864105\n", - "Surface training t=5206, loss=0.3182913213968277\n", - "Surface training t=5207, loss=0.30041101574897766\n", - "Surface training t=5208, loss=0.3291539251804352\n", - "Surface training t=5209, loss=0.2746112644672394\n", - "Surface training t=5210, loss=0.3271748721599579\n", - "Surface training t=5211, loss=0.31908018887043\n", - "Surface training t=5212, loss=0.29389628767967224\n", - "Surface training t=5213, loss=0.29142312705516815\n", - "Surface training t=5214, loss=0.2833636701107025\n", - "Surface training t=5215, loss=0.3139423131942749\n", - "Surface training t=5216, loss=0.27024422585964203\n", - "Surface training t=5217, loss=0.3649779409170151\n", - "Surface training t=5218, loss=0.28366396576166153\n", - "Surface training t=5219, loss=0.3533956706523895\n", - "Surface training t=5220, loss=0.289802722632885\n", - "Surface training t=5221, loss=0.286709263920784\n", - "Surface training t=5222, loss=0.28641752898693085\n", - "Surface training t=5223, loss=0.3136758804321289\n", - "Surface training t=5224, loss=0.36799444258213043\n", - "Surface training t=5225, loss=0.30525609850883484\n", - "Surface training t=5226, loss=0.30976687371730804\n", - "Surface training t=5227, loss=0.2917463481426239\n", - "Surface training t=5228, loss=0.24422594159841537\n", - "Surface training t=5229, loss=0.32951758801937103\n", - "Surface training t=5230, loss=0.2839035242795944\n", - "Surface training t=5231, loss=0.31081482768058777\n", - "Surface training t=5232, loss=0.3194231688976288\n", - "Surface training t=5233, loss=0.29555778205394745\n", - "Surface training t=5234, loss=0.3080359846353531\n", - "Surface training t=5235, loss=0.34542316198349\n", - "Surface training t=5236, loss=0.30305199325084686\n", - "Surface training t=5237, loss=0.2829519659280777\n", - "Surface training t=5238, loss=0.31136369705200195\n", - "Surface training t=5239, loss=0.2844126522541046\n", - "Surface training t=5240, loss=0.2855081111192703\n", - "Surface training t=5241, loss=0.31801503896713257\n", - "Surface training t=5242, loss=0.3354305177927017\n", - "Surface training t=5243, loss=0.3014400452375412\n", - "Surface training t=5244, loss=0.28869061172008514\n", - "Surface training t=5245, loss=0.2764149233698845\n", - "Surface training t=5246, loss=0.28735628724098206\n", - "Surface training t=5247, loss=0.3087511658668518\n", - "Surface training t=5248, loss=0.32195207476615906\n", - "Surface training t=5249, loss=0.33974602818489075\n", - "Surface training t=5250, loss=0.33505403995513916\n", - "Surface training t=5251, loss=0.3104529082775116\n", - "Surface training t=5252, loss=0.2795489579439163\n", - "Surface training t=5253, loss=0.2762364521622658\n", - "Surface training t=5254, loss=0.28088851273059845\n", - "Surface training t=5255, loss=0.3075270652770996\n", - "Surface training t=5256, loss=0.2796332612633705\n", - "Surface training t=5257, loss=0.2957819700241089\n", - "Surface training t=5258, loss=0.2990708202123642\n", - "Surface training t=5259, loss=0.32436491549015045\n", - "Surface training t=5260, loss=0.3032735288143158\n", - "Surface training t=5261, loss=0.29087167978286743\n", - "Surface training t=5262, loss=0.2982979863882065\n", - "Surface training t=5263, loss=0.33239229023456573\n", - "Surface training t=5264, loss=0.2668542116880417\n", - "Surface training t=5265, loss=0.295545756816864\n", - "Surface training t=5266, loss=0.3399112671613693\n", - "Surface training t=5267, loss=0.3315669745206833\n", - "Surface training t=5268, loss=0.32599781453609467\n", - "Surface training t=5269, loss=0.35181064903736115\n", - "Surface training t=5270, loss=0.28338204324245453\n", - "Surface training t=5271, loss=0.2855370491743088\n", - "Surface training t=5272, loss=0.33162136375904083\n", - "Surface training t=5273, loss=0.34072771668434143\n", - "Surface training t=5274, loss=0.30892015993595123\n", - "Surface training t=5275, loss=0.2932841181755066\n", - "Surface training t=5276, loss=0.30333344638347626\n", - "Surface training t=5277, loss=0.3431232273578644\n", - "Surface training t=5278, loss=0.3499206453561783\n", - "Surface training t=5279, loss=0.27706025540828705\n", - "Surface training t=5280, loss=0.30452045798301697\n", - "Surface training t=5281, loss=0.3717424124479294\n", - "Surface training t=5282, loss=0.3294285088777542\n", - "Surface training t=5283, loss=0.2780197113752365\n", - "Surface training t=5284, loss=0.28540097177028656\n", - "Surface training t=5285, loss=0.3080756813287735\n", - "Surface training t=5286, loss=0.29741905629634857\n", - "Surface training t=5287, loss=0.2766420915722847\n", - "Surface training t=5288, loss=0.30745406448841095\n", - "Surface training t=5289, loss=0.3178475648164749\n", - "Surface training t=5290, loss=0.2757110595703125\n", - "Surface training t=5291, loss=0.2892886996269226\n", - "Surface training t=5292, loss=0.27124103158712387\n", - "Surface training t=5293, loss=0.3156990706920624\n", - "Surface training t=5294, loss=0.30737820267677307\n", - "Surface training t=5295, loss=0.2827487140893936\n", - "Surface training t=5296, loss=0.35758647322654724\n", - "Surface training t=5297, loss=0.3050578236579895\n", - "Surface training t=5298, loss=0.31275439262390137\n", - "Surface training t=5299, loss=0.294928178191185\n", - "Surface training t=5300, loss=0.2899646759033203\n", - "Surface training t=5301, loss=0.28297969698905945\n", - "Surface training t=5302, loss=0.24603481590747833\n", - "Surface training t=5303, loss=0.2800793796777725\n", - "Surface training t=5304, loss=0.2912147790193558\n", - "Surface training t=5305, loss=0.30129486322402954\n", - "Surface training t=5306, loss=0.32453176379203796\n", - "Surface training t=5307, loss=0.2821941524744034\n", - "Surface training t=5308, loss=0.2616967260837555\n", - "Surface training t=5309, loss=0.33688217401504517\n", - "Surface training t=5310, loss=0.28318412601947784\n", - "Surface training t=5311, loss=0.27297960221767426\n", - "Surface training t=5312, loss=0.310537189245224\n", - "Surface training t=5313, loss=0.28830520808696747\n", - "Surface training t=5314, loss=0.2757919430732727\n", - "Surface training t=5315, loss=0.3154882937669754\n", - "Surface training t=5316, loss=0.29518888890743256\n", - "Surface training t=5317, loss=0.29190076887607574\n", - "Surface training t=5318, loss=0.3252597749233246\n", - "Surface training t=5319, loss=0.2823193520307541\n", - "Surface training t=5320, loss=0.2899266928434372\n", - "Surface training t=5321, loss=0.3152923136949539\n", - "Surface training t=5322, loss=0.3083191365003586\n", - "Surface training t=5323, loss=0.31524188816547394\n", - "Surface training t=5324, loss=0.31481538712978363\n", - "Surface training t=5325, loss=0.34395258128643036\n", - "Surface training t=5326, loss=0.3631097227334976\n", - "Surface training t=5327, loss=0.32726915180683136\n", - "Surface training t=5328, loss=0.3260206878185272\n", - "Surface training t=5329, loss=0.31497257947921753\n", - "Surface training t=5330, loss=0.3168669790029526\n", - "Surface training t=5331, loss=0.30098024010658264\n", - "Surface training t=5332, loss=0.3021669089794159\n", - "Surface training t=5333, loss=0.30488647520542145\n", - "Surface training t=5334, loss=0.3000537008047104\n", - "Surface training t=5335, loss=0.2714517265558243\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=5336, loss=0.3154228627681732\n", - "Surface training t=5337, loss=0.3168509304523468\n", - "Surface training t=5338, loss=0.28224068880081177\n", - "Surface training t=5339, loss=0.2504410520195961\n", - "Surface training t=5340, loss=0.32608552277088165\n", - "Surface training t=5341, loss=0.30940762162208557\n", - "Surface training t=5342, loss=0.27408795058727264\n", - "Surface training t=5343, loss=0.3019694983959198\n", - "Surface training t=5344, loss=0.2762930616736412\n", - "Surface training t=5345, loss=0.31256940960884094\n", - "Surface training t=5346, loss=0.3233474940061569\n", - "Surface training t=5347, loss=0.2736572101712227\n", - "Surface training t=5348, loss=0.3227270543575287\n", - "Surface training t=5349, loss=0.3103865683078766\n", - "Surface training t=5350, loss=0.3080551028251648\n", - "Surface training t=5351, loss=0.26782675832509995\n", - "Surface training t=5352, loss=0.30565614998340607\n", - "Surface training t=5353, loss=0.26875555515289307\n", - "Surface training t=5354, loss=0.31428076326847076\n", - "Surface training t=5355, loss=0.27720676362514496\n", - "Surface training t=5356, loss=0.3071555644273758\n", - "Surface training t=5357, loss=0.2775801569223404\n", - "Surface training t=5358, loss=0.2764958292245865\n", - "Surface training t=5359, loss=0.30752623081207275\n", - "Surface training t=5360, loss=0.2797190397977829\n", - "Surface training t=5361, loss=0.3154224157333374\n", - "Surface training t=5362, loss=0.3495301455259323\n", - "Surface training t=5363, loss=0.31150591373443604\n", - "Surface training t=5364, loss=0.3151181489229202\n", - "Surface training t=5365, loss=0.29149240255355835\n", - "Surface training t=5366, loss=0.2884548604488373\n", - "Surface training t=5367, loss=0.30504658818244934\n", - "Surface training t=5368, loss=0.3349509686231613\n", - "Surface training t=5369, loss=0.27229519188404083\n", - "Surface training t=5370, loss=0.30262263119220734\n", - "Surface training t=5371, loss=0.33740057051181793\n", - "Surface training t=5372, loss=0.311432808637619\n", - "Surface training t=5373, loss=0.32609376311302185\n", - "Surface training t=5374, loss=0.3384900391101837\n", - "Surface training t=5375, loss=0.32255004346370697\n", - "Surface training t=5376, loss=0.25786731392145157\n", - "Surface training t=5377, loss=0.31840313971042633\n", - "Surface training t=5378, loss=0.2858589142560959\n", - "Surface training t=5379, loss=0.3159802407026291\n", - "Surface training t=5380, loss=0.2520401254296303\n", - "Surface training t=5381, loss=0.2748873308300972\n", - "Surface training t=5382, loss=0.2965017408132553\n", - "Surface training t=5383, loss=0.3010137528181076\n", - "Surface training t=5384, loss=0.3055528849363327\n", - "Surface training t=5385, loss=0.28253479301929474\n", - "Surface training t=5386, loss=0.32275134325027466\n", - "Surface training t=5387, loss=0.2955995798110962\n", - "Surface training t=5388, loss=0.3002787083387375\n", - "Surface training t=5389, loss=0.2856736183166504\n", - "Surface training t=5390, loss=0.25656118243932724\n", - "Surface training t=5391, loss=0.2957989573478699\n", - "Surface training t=5392, loss=0.26488954573869705\n", - "Surface training t=5393, loss=0.27138949930667877\n", - "Surface training t=5394, loss=0.24850346148014069\n", - "Surface training t=5395, loss=0.2515455558896065\n", - "Surface training t=5396, loss=0.25767311453819275\n", - "Surface training t=5397, loss=0.2536236643791199\n", - "Surface training t=5398, loss=0.28068122267723083\n", - "Surface training t=5399, loss=0.3079421669244766\n", - "Surface training t=5400, loss=0.3072337508201599\n", - "Surface training t=5401, loss=0.31400585174560547\n", - "Surface training t=5402, loss=0.3057602792978287\n", - "Surface training t=5403, loss=0.27873337268829346\n", - "Surface training t=5404, loss=0.26237376034259796\n", - "Surface training t=5405, loss=0.33534419536590576\n", - "Surface training t=5406, loss=0.28370876610279083\n", - "Surface training t=5407, loss=0.2611387372016907\n", - "Surface training t=5408, loss=0.26614295691251755\n", - "Surface training t=5409, loss=0.32271914184093475\n", - "Surface training t=5410, loss=0.2885916531085968\n", - "Surface training t=5411, loss=0.3603465110063553\n", - "Surface training t=5412, loss=0.3324859291315079\n", - "Surface training t=5413, loss=0.3189411461353302\n", - "Surface training t=5414, loss=0.309375524520874\n", - "Surface training t=5415, loss=0.26862654834985733\n", - "Surface training t=5416, loss=0.29588085412979126\n", - "Surface training t=5417, loss=0.29247719049453735\n", - "Surface training t=5418, loss=0.2808109223842621\n", - "Surface training t=5419, loss=0.3205348700284958\n", - "Surface training t=5420, loss=0.25902434438467026\n", - "Surface training t=5421, loss=0.3177907466888428\n", - "Surface training t=5422, loss=0.2786378115415573\n", - "Surface training t=5423, loss=0.3107255846261978\n", - "Surface training t=5424, loss=0.2891990542411804\n", - "Surface training t=5425, loss=0.287076935172081\n", - "Surface training t=5426, loss=0.30963917076587677\n", - "Surface training t=5427, loss=0.27823111414909363\n", - "Surface training t=5428, loss=0.2507066875696182\n", - "Surface training t=5429, loss=0.2834023982286453\n", - "Surface training t=5430, loss=0.2939326763153076\n", - "Surface training t=5431, loss=0.27959662675857544\n", - "Surface training t=5432, loss=0.2889847755432129\n", - "Surface training t=5433, loss=0.24871979653835297\n", - "Surface training t=5434, loss=0.27621908485889435\n", - "Surface training t=5435, loss=0.24632088094949722\n", - "Surface training t=5436, loss=0.26472024619579315\n", - "Surface training t=5437, loss=0.2506232485175133\n", - "Surface training t=5438, loss=0.3039991110563278\n", - "Surface training t=5439, loss=0.33138407766819\n", - "Surface training t=5440, loss=0.3109571635723114\n", - "Surface training t=5441, loss=0.25504396110773087\n", - "Surface training t=5442, loss=0.299990713596344\n", - "Surface training t=5443, loss=0.3032565414905548\n", - "Surface training t=5444, loss=0.2612878829240799\n", - "Surface training t=5445, loss=0.26880841702222824\n", - "Surface training t=5446, loss=0.26142919808626175\n", - "Surface training t=5447, loss=0.27344919741153717\n", - "Surface training t=5448, loss=0.27583011984825134\n", - "Surface training t=5449, loss=0.30835461616516113\n", - "Surface training t=5450, loss=0.2793858051300049\n", - "Surface training t=5451, loss=0.2662171274423599\n", - "Surface training t=5452, loss=0.2994857579469681\n", - "Surface training t=5453, loss=0.2735386937856674\n", - "Surface training t=5454, loss=0.2794640064239502\n", - "Surface training t=5455, loss=0.3171505928039551\n", - "Surface training t=5456, loss=0.3002983033657074\n", - "Surface training t=5457, loss=0.3573961853981018\n", - "Surface training t=5458, loss=0.3292344659566879\n", - "Surface training t=5459, loss=0.2958628684282303\n", - "Surface training t=5460, loss=0.31247176229953766\n", - "Surface training t=5461, loss=0.27524353563785553\n", - "Surface training t=5462, loss=0.2796356752514839\n", - "Surface training t=5463, loss=0.2971673756837845\n", - "Surface training t=5464, loss=0.3098434954881668\n", - "Surface training t=5465, loss=0.32059985399246216\n", - "Surface training t=5466, loss=0.2761843055486679\n", - "Surface training t=5467, loss=0.30048567056655884\n", - "Surface training t=5468, loss=0.2842482030391693\n", - "Surface training t=5469, loss=0.3106119930744171\n", - "Surface training t=5470, loss=0.28260834515094757\n", - "Surface training t=5471, loss=0.26915451139211655\n", - "Surface training t=5472, loss=0.29749737679958344\n", - "Surface training t=5473, loss=0.2910134345293045\n", - "Surface training t=5474, loss=0.305224746465683\n", - "Surface training t=5475, loss=0.3019418865442276\n", - "Surface training t=5476, loss=0.28075218200683594\n", - "Surface training t=5477, loss=0.30019836127758026\n", - "Surface training t=5478, loss=0.3820078372955322\n", - "Surface training t=5479, loss=0.2732817307114601\n", - "Surface training t=5480, loss=0.3466426134109497\n", - "Surface training t=5481, loss=0.3124508112668991\n", - "Surface training t=5482, loss=0.2845621258020401\n", - "Surface training t=5483, loss=0.30707378685474396\n", - "Surface training t=5484, loss=0.29039452970027924\n", - "Surface training t=5485, loss=0.3199286609888077\n", - "Surface training t=5486, loss=0.3197631239891052\n", - "Surface training t=5487, loss=0.27670401334762573\n", - "Surface training t=5488, loss=0.26168742775917053\n", - "Surface training t=5489, loss=0.2661653459072113\n", - "Surface training t=5490, loss=0.2356085404753685\n", - "Surface training t=5491, loss=0.26918022334575653\n", - "Surface training t=5492, loss=0.24656807631254196\n", - "Surface training t=5493, loss=0.32218673825263977\n", - "Surface training t=5494, loss=0.28741365671157837\n", - "Surface training t=5495, loss=0.24969105422496796\n", - "Surface training t=5496, loss=0.31087730824947357\n", - "Surface training t=5497, loss=0.2790236920118332\n", - "Surface training t=5498, loss=0.2759190648794174\n", - "Surface training t=5499, loss=0.28948312997817993\n", - "Surface training t=5500, loss=0.2858654707670212\n", - "Surface training t=5501, loss=0.271467462182045\n", - "Surface training t=5502, loss=0.2934168726205826\n", - "Surface training t=5503, loss=0.2782324254512787\n", - "Surface training t=5504, loss=0.27977845072746277\n", - "Surface training t=5505, loss=0.29194967448711395\n", - "Surface training t=5506, loss=0.2778228223323822\n", - "Surface training t=5507, loss=0.263792484998703\n", - "Surface training t=5508, loss=0.3202380985021591\n", - "Surface training t=5509, loss=0.2765244245529175\n", - "Surface training t=5510, loss=0.3174542635679245\n", - "Surface training t=5511, loss=0.2867610603570938\n", - "Surface training t=5512, loss=0.27944140136241913\n", - "Surface training t=5513, loss=0.2963864207267761\n", - "Surface training t=5514, loss=0.2732263058423996\n", - "Surface training t=5515, loss=0.25475041568279266\n", - "Surface training t=5516, loss=0.2956255078315735\n", - "Surface training t=5517, loss=0.27935677766799927\n", - "Surface training t=5518, loss=0.2768433094024658\n", - "Surface training t=5519, loss=0.3137855678796768\n", - "Surface training t=5520, loss=0.28458085656166077\n", - "Surface training t=5521, loss=0.32209503650665283\n", - "Surface training t=5522, loss=0.3039305657148361\n", - "Surface training t=5523, loss=0.32137535512447357\n", - "Surface training t=5524, loss=0.29991112649440765\n", - "Surface training t=5525, loss=0.310974583029747\n", - "Surface training t=5526, loss=0.2821638733148575\n", - "Surface training t=5527, loss=0.24271269142627716\n", - "Surface training t=5528, loss=0.2859947234392166\n", - "Surface training t=5529, loss=0.26540569216012955\n", - "Surface training t=5530, loss=0.2941063791513443\n", - "Surface training t=5531, loss=0.27333129942417145\n", - "Surface training t=5532, loss=0.26267752051353455\n", - "Surface training t=5533, loss=0.2423197254538536\n", - "Surface training t=5534, loss=0.2731754183769226\n", - "Surface training t=5535, loss=0.2915085256099701\n", - "Surface training t=5536, loss=0.2604707181453705\n", - "Surface training t=5537, loss=0.26023754477500916\n", - "Surface training t=5538, loss=0.29541662335395813\n", - "Surface training t=5539, loss=0.2839031517505646\n", - "Surface training t=5540, loss=0.27245618402957916\n", - "Surface training t=5541, loss=0.24934887886047363\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=5542, loss=0.27138882875442505\n", - "Surface training t=5543, loss=0.2844187170267105\n", - "Surface training t=5544, loss=0.31356102228164673\n", - "Surface training t=5545, loss=0.292723685503006\n", - "Surface training t=5546, loss=0.25311287492513657\n", - "Surface training t=5547, loss=0.27689386904239655\n", - "Surface training t=5548, loss=0.232448011636734\n", - "Surface training t=5549, loss=0.2797639071941376\n", - "Surface training t=5550, loss=0.28144100308418274\n", - "Surface training t=5551, loss=0.2877567410469055\n", - "Surface training t=5552, loss=0.2668849527835846\n", - "Surface training t=5553, loss=0.2677518427371979\n", - "Surface training t=5554, loss=0.2960076481103897\n", - "Surface training t=5555, loss=0.26236240565776825\n", - "Surface training t=5556, loss=0.294985368847847\n", - "Surface training t=5557, loss=0.2961774542927742\n", - "Surface training t=5558, loss=0.24526813626289368\n", - "Surface training t=5559, loss=0.25140950083732605\n", - "Surface training t=5560, loss=0.2658272981643677\n", - "Surface training t=5561, loss=0.2498021125793457\n", - "Surface training t=5562, loss=0.30401240289211273\n", - "Surface training t=5563, loss=0.2719496488571167\n", - "Surface training t=5564, loss=0.25773918628692627\n", - "Surface training t=5565, loss=0.2737438827753067\n", - "Surface training t=5566, loss=0.30231404304504395\n", - "Surface training t=5567, loss=0.2951057404279709\n", - "Surface training t=5568, loss=0.27608321607112885\n", - "Surface training t=5569, loss=0.2726520448923111\n", - "Surface training t=5570, loss=0.2513929158449173\n", - "Surface training t=5571, loss=0.28595873713493347\n", - "Surface training t=5572, loss=0.28757618367671967\n", - "Surface training t=5573, loss=0.24325910210609436\n", - "Surface training t=5574, loss=0.26000887155532837\n", - "Surface training t=5575, loss=0.2440582439303398\n", - "Surface training t=5576, loss=0.2601131945848465\n", - "Surface training t=5577, loss=0.26218388974666595\n", - "Surface training t=5578, loss=0.28694024682044983\n", - "Surface training t=5579, loss=0.29368405044078827\n", - "Surface training t=5580, loss=0.2851390391588211\n", - "Surface training t=5581, loss=0.26886405050754547\n", - "Surface training t=5582, loss=0.27911077439785004\n", - "Surface training t=5583, loss=0.2663371115922928\n", - "Surface training t=5584, loss=0.25594988465309143\n", - "Surface training t=5585, loss=0.21757669746875763\n", - "Surface training t=5586, loss=0.2722439467906952\n", - "Surface training t=5587, loss=0.22878342866897583\n", - "Surface training t=5588, loss=0.2590966448187828\n", - "Surface training t=5589, loss=0.2731727659702301\n", - "Surface training t=5590, loss=0.28275375068187714\n", - "Surface training t=5591, loss=0.27871301770210266\n", - "Surface training t=5592, loss=0.2857295870780945\n", - "Surface training t=5593, loss=0.3189517557621002\n", - "Surface training t=5594, loss=0.28158918023109436\n", - "Surface training t=5595, loss=0.29484932124614716\n", - "Surface training t=5596, loss=0.30400918424129486\n", - "Surface training t=5597, loss=0.2993856221437454\n", - "Surface training t=5598, loss=0.27200205624103546\n", - "Surface training t=5599, loss=0.30623704195022583\n", - "Surface training t=5600, loss=0.26845061779022217\n", - "Surface training t=5601, loss=0.25718916207551956\n", - "Surface training t=5602, loss=0.28326085209846497\n", - "Surface training t=5603, loss=0.30003419518470764\n", - "Surface training t=5604, loss=0.23991231620311737\n", - "Surface training t=5605, loss=0.27385224401950836\n", - "Surface training t=5606, loss=0.2870698869228363\n", - "Surface training t=5607, loss=0.3018124848604202\n", - "Surface training t=5608, loss=0.27997085452079773\n", - "Surface training t=5609, loss=0.3286374658346176\n", - "Surface training t=5610, loss=0.3184300810098648\n", - "Surface training t=5611, loss=0.2683255970478058\n", - "Surface training t=5612, loss=0.271855428814888\n", - "Surface training t=5613, loss=0.2505941241979599\n", - "Surface training t=5614, loss=0.24834634363651276\n", - "Surface training t=5615, loss=0.2617364674806595\n", - "Surface training t=5616, loss=0.2427818924188614\n", - "Surface training t=5617, loss=0.25957509875297546\n", - "Surface training t=5618, loss=0.26846276223659515\n", - "Surface training t=5619, loss=0.24651837348937988\n", - "Surface training t=5620, loss=0.2638716995716095\n", - "Surface training t=5621, loss=0.25387635827064514\n", - "Surface training t=5622, loss=0.24597052484750748\n", - "Surface training t=5623, loss=0.25522398948669434\n", - "Surface training t=5624, loss=0.26750846952199936\n", - "Surface training t=5625, loss=0.25618021935224533\n", - "Surface training t=5626, loss=0.2969071567058563\n", - "Surface training t=5627, loss=0.27289776504039764\n", - "Surface training t=5628, loss=0.29026858508586884\n", - "Surface training t=5629, loss=0.2521316111087799\n", - "Surface training t=5630, loss=0.2752348482608795\n", - "Surface training t=5631, loss=0.31527090072631836\n", - "Surface training t=5632, loss=0.2528953030705452\n", - "Surface training t=5633, loss=0.2850010097026825\n", - "Surface training t=5634, loss=0.27734095603227615\n", - "Surface training t=5635, loss=0.2594125419855118\n", - "Surface training t=5636, loss=0.23811812698841095\n", - "Surface training t=5637, loss=0.24894623458385468\n", - "Surface training t=5638, loss=0.2695363312959671\n", - "Surface training t=5639, loss=0.28653375804424286\n", - "Surface training t=5640, loss=0.24064621329307556\n", - "Surface training t=5641, loss=0.24331875145435333\n", - "Surface training t=5642, loss=0.2547645941376686\n", - "Surface training t=5643, loss=0.2678392305970192\n", - "Surface training t=5644, loss=0.2293531596660614\n", - "Surface training t=5645, loss=0.2438811957836151\n", - "Surface training t=5646, loss=0.2520901635289192\n", - "Surface training t=5647, loss=0.2493877336382866\n", - "Surface training t=5648, loss=0.24569933861494064\n", - "Surface training t=5649, loss=0.2593766003847122\n", - "Surface training t=5650, loss=0.2594577670097351\n", - "Surface training t=5651, loss=0.26512354612350464\n", - "Surface training t=5652, loss=0.3112800195813179\n", - "Surface training t=5653, loss=0.23411448299884796\n", - "Surface training t=5654, loss=0.23659634590148926\n", - "Surface training t=5655, loss=0.24505725502967834\n", - "Surface training t=5656, loss=0.22829022258520126\n", - "Surface training t=5657, loss=0.2425260692834854\n", - "Surface training t=5658, loss=0.20204700529575348\n", - "Surface training t=5659, loss=0.24042008817195892\n", - "Surface training t=5660, loss=0.26664645969867706\n", - "Surface training t=5661, loss=0.24775190651416779\n", - "Surface training t=5662, loss=0.24065418541431427\n", - "Surface training t=5663, loss=0.24357635527849197\n", - "Surface training t=5664, loss=0.21948809921741486\n", - "Surface training t=5665, loss=0.24532347917556763\n", - "Surface training t=5666, loss=0.21521545946598053\n", - "Surface training t=5667, loss=0.22890952229499817\n", - "Surface training t=5668, loss=0.255230113863945\n", - "Surface training t=5669, loss=0.23950286209583282\n", - "Surface training t=5670, loss=0.24355542659759521\n", - "Surface training t=5671, loss=0.24598385393619537\n", - "Surface training t=5672, loss=0.26544634997844696\n", - "Surface training t=5673, loss=0.2614247798919678\n", - "Surface training t=5674, loss=0.2677955776453018\n", - "Surface training t=5675, loss=0.26819784939289093\n", - "Surface training t=5676, loss=0.31399428844451904\n", - "Surface training t=5677, loss=0.23841558396816254\n", - "Surface training t=5678, loss=0.3031114637851715\n", - "Surface training t=5679, loss=0.2751857340335846\n", - "Surface training t=5680, loss=0.2820971757173538\n", - "Surface training t=5681, loss=0.3061434030532837\n", - "Surface training t=5682, loss=0.2608427107334137\n", - "Surface training t=5683, loss=0.26679010689258575\n", - "Surface training t=5684, loss=0.2638688310980797\n", - "Surface training t=5685, loss=0.3037513941526413\n", - "Surface training t=5686, loss=0.24148042500019073\n", - "Surface training t=5687, loss=0.27669382095336914\n", - "Surface training t=5688, loss=0.2881956100463867\n", - "Surface training t=5689, loss=0.26133888959884644\n", - "Surface training t=5690, loss=0.24681229889392853\n", - "Surface training t=5691, loss=0.27391237020492554\n", - "Surface training t=5692, loss=0.26092658936977386\n", - "Surface training t=5693, loss=0.31026582419872284\n", - "Surface training t=5694, loss=0.2742907702922821\n", - "Surface training t=5695, loss=0.29612063616514206\n", - "Surface training t=5696, loss=0.27647264301776886\n", - "Surface training t=5697, loss=0.2366505116224289\n", - "Surface training t=5698, loss=0.2805522680282593\n", - "Surface training t=5699, loss=0.2583799660205841\n", - "Surface training t=5700, loss=0.28827178478240967\n", - "Surface training t=5701, loss=0.2948344051837921\n", - "Surface training t=5702, loss=0.23715005069971085\n", - "Surface training t=5703, loss=0.24524802714586258\n", - "Surface training t=5704, loss=0.24866118282079697\n", - "Surface training t=5705, loss=0.23410479724407196\n", - "Surface training t=5706, loss=0.2103646621108055\n", - "Surface training t=5707, loss=0.23618943989276886\n", - "Surface training t=5708, loss=0.24107129126787186\n", - "Surface training t=5709, loss=0.2343919649720192\n", - "Surface training t=5710, loss=0.2182186245918274\n", - "Surface training t=5711, loss=0.27048371732234955\n", - "Surface training t=5712, loss=0.23509840667247772\n", - "Surface training t=5713, loss=0.2676534056663513\n", - "Surface training t=5714, loss=0.21997057646512985\n", - "Surface training t=5715, loss=0.22156639397144318\n", - "Surface training t=5716, loss=0.2582215219736099\n", - "Surface training t=5717, loss=0.2714954912662506\n", - "Surface training t=5718, loss=0.24157509207725525\n", - "Surface training t=5719, loss=0.2728704437613487\n", - "Surface training t=5720, loss=0.2719624489545822\n", - "Surface training t=5721, loss=0.28234247863292694\n", - "Surface training t=5722, loss=0.24687814712524414\n", - "Surface training t=5723, loss=0.23862841725349426\n", - "Surface training t=5724, loss=0.2245400846004486\n", - "Surface training t=5725, loss=0.25974874198436737\n", - "Surface training t=5726, loss=0.22384586930274963\n", - "Surface training t=5727, loss=0.23736846446990967\n", - "Surface training t=5728, loss=0.25716813653707504\n", - "Surface training t=5729, loss=0.26260292530059814\n", - "Surface training t=5730, loss=0.24021179229021072\n", - "Surface training t=5731, loss=0.26733438670635223\n", - "Surface training t=5732, loss=0.2501276507973671\n", - "Surface training t=5733, loss=0.2734862193465233\n", - "Surface training t=5734, loss=0.26853734254837036\n", - "Surface training t=5735, loss=0.26243139803409576\n", - "Surface training t=5736, loss=0.2560267448425293\n", - "Surface training t=5737, loss=0.23318372666835785\n", - "Surface training t=5738, loss=0.21940502524375916\n", - "Surface training t=5739, loss=0.22199906408786774\n", - "Surface training t=5740, loss=0.21395358443260193\n", - "Surface training t=5741, loss=0.21865315735340118\n", - "Surface training t=5742, loss=0.2663719803094864\n", - "Surface training t=5743, loss=0.20194685459136963\n", - "Surface training t=5744, loss=0.2627224028110504\n", - "Surface training t=5745, loss=0.2358788549900055\n", - "Surface training t=5746, loss=0.21225757896900177\n", - "Surface training t=5747, loss=0.2502650320529938\n", - "Surface training t=5748, loss=0.26554616540670395\n", - "Surface training t=5749, loss=0.251275010406971\n", - "Surface training t=5750, loss=0.2339509353041649\n", - "Surface training t=5751, loss=0.2591789662837982\n", - "Surface training t=5752, loss=0.2300756499171257\n", - "Surface training t=5753, loss=0.2483048439025879\n", - "Surface training t=5754, loss=0.2508906349539757\n", - "Surface training t=5755, loss=0.24239613115787506\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=5756, loss=0.28181685507297516\n", - "Surface training t=5757, loss=0.26344049721956253\n", - "Surface training t=5758, loss=0.29710108041763306\n", - "Surface training t=5759, loss=0.26785653829574585\n", - "Surface training t=5760, loss=0.2803667336702347\n", - "Surface training t=5761, loss=0.2631390243768692\n", - "Surface training t=5762, loss=0.24363144487142563\n", - "Surface training t=5763, loss=0.2489042654633522\n", - "Surface training t=5764, loss=0.23085515946149826\n", - "Surface training t=5765, loss=0.25608067214488983\n", - "Surface training t=5766, loss=0.22413869947195053\n", - "Surface training t=5767, loss=0.25395335257053375\n", - "Surface training t=5768, loss=0.22599677741527557\n", - "Surface training t=5769, loss=0.24030011147260666\n", - "Surface training t=5770, loss=0.24892005324363708\n", - "Surface training t=5771, loss=0.23929309844970703\n", - "Surface training t=5772, loss=0.2633812129497528\n", - "Surface training t=5773, loss=0.24674811959266663\n", - "Surface training t=5774, loss=0.26250315457582474\n", - "Surface training t=5775, loss=0.29356078058481216\n", - "Surface training t=5776, loss=0.213335283100605\n", - "Surface training t=5777, loss=0.19810620695352554\n", - "Surface training t=5778, loss=0.24865950644016266\n", - "Surface training t=5779, loss=0.2092345878481865\n", - "Surface training t=5780, loss=0.22552520781755447\n", - "Surface training t=5781, loss=0.2413126528263092\n", - "Surface training t=5782, loss=0.1924886628985405\n", - "Surface training t=5783, loss=0.247968889772892\n", - "Surface training t=5784, loss=0.22858811169862747\n", - "Surface training t=5785, loss=0.21570511162281036\n", - "Surface training t=5786, loss=0.22778580337762833\n", - "Surface training t=5787, loss=0.2542550787329674\n", - "Surface training t=5788, loss=0.2166137844324112\n", - "Surface training t=5789, loss=0.2316930741071701\n", - "Surface training t=5790, loss=0.23551886528730392\n", - "Surface training t=5791, loss=0.25839152932167053\n", - "Surface training t=5792, loss=0.24083992093801498\n", - "Surface training t=5793, loss=0.28645437955856323\n", - "Surface training t=5794, loss=0.26577866822481155\n", - "Surface training t=5795, loss=0.223770871758461\n", - "Surface training t=5796, loss=0.25682928413152695\n", - "Surface training t=5797, loss=0.2278820425271988\n", - "Surface training t=5798, loss=0.25040098279714584\n", - "Surface training t=5799, loss=0.23915944248437881\n", - "Surface training t=5800, loss=0.25439295917749405\n", - "Surface training t=5801, loss=0.2066636011004448\n", - "Surface training t=5802, loss=0.24651185423135757\n", - "Surface training t=5803, loss=0.2418351024389267\n", - "Surface training t=5804, loss=0.21879281848669052\n", - "Surface training t=5805, loss=0.2353498935699463\n", - "Surface training t=5806, loss=0.2052706554532051\n", - "Surface training t=5807, loss=0.21674738079309464\n", - "Surface training t=5808, loss=0.19287899881601334\n", - "Surface training t=5809, loss=0.2848002016544342\n", - "Surface training t=5810, loss=0.22335894405841827\n", - "Surface training t=5811, loss=0.2701161727309227\n", - "Surface training t=5812, loss=0.300137922167778\n", - "Surface training t=5813, loss=0.21339377015829086\n", - "Surface training t=5814, loss=0.236129030585289\n", - "Surface training t=5815, loss=0.22367924451828003\n", - "Surface training t=5816, loss=0.23495107889175415\n", - "Surface training t=5817, loss=0.23953159153461456\n", - "Surface training t=5818, loss=0.2501211315393448\n", - "Surface training t=5819, loss=0.21391016989946365\n", - "Surface training t=5820, loss=0.21211907267570496\n", - "Surface training t=5821, loss=0.24048027396202087\n", - "Surface training t=5822, loss=0.2155677229166031\n", - "Surface training t=5823, loss=0.2605423629283905\n", - "Surface training t=5824, loss=0.22448408603668213\n", - "Surface training t=5825, loss=0.21246473491191864\n", - "Surface training t=5826, loss=0.21469055116176605\n", - "Surface training t=5827, loss=0.23677664995193481\n", - "Surface training t=5828, loss=0.20252244174480438\n", - "Surface training t=5829, loss=0.1930719092488289\n", - "Surface training t=5830, loss=0.1812382936477661\n", - "Surface training t=5831, loss=0.24405019730329514\n", - "Surface training t=5832, loss=0.24805141240358353\n", - "Surface training t=5833, loss=0.20320583134889603\n", - "Surface training t=5834, loss=0.2406870573759079\n", - "Surface training t=5835, loss=0.18721923977136612\n", - "Surface training t=5836, loss=0.24279669672250748\n", - "Surface training t=5837, loss=0.2279002144932747\n", - "Surface training t=5838, loss=0.22156088054180145\n", - "Surface training t=5839, loss=0.2025519162416458\n", - "Surface training t=5840, loss=0.23211894929409027\n", - "Surface training t=5841, loss=0.2430356815457344\n", - "Surface training t=5842, loss=0.2214917689561844\n", - "Surface training t=5843, loss=0.21698939055204391\n", - "Surface training t=5844, loss=0.212008498609066\n", - "Surface training t=5845, loss=0.1825014129281044\n", - "Surface training t=5846, loss=0.198260135948658\n", - "Surface training t=5847, loss=0.2604723125696182\n", - "Surface training t=5848, loss=0.2000882849097252\n", - "Surface training t=5849, loss=0.19602368772029877\n", - "Surface training t=5850, loss=0.2143305167555809\n", - "Surface training t=5851, loss=0.21612969785928726\n", - "Surface training t=5852, loss=0.21605177968740463\n", - "Surface training t=5853, loss=0.19426245242357254\n", - "Surface training t=5854, loss=0.21745772659778595\n", - "Surface training t=5855, loss=0.22859733551740646\n", - "Surface training t=5856, loss=0.23363152891397476\n", - "Surface training t=5857, loss=0.22192441672086716\n", - "Surface training t=5858, loss=0.22056526690721512\n", - "Surface training t=5859, loss=0.20342914760112762\n", - "Surface training t=5860, loss=0.24089495837688446\n", - "Surface training t=5861, loss=0.2215849682688713\n", - "Surface training t=5862, loss=0.23733220994472504\n", - "Surface training t=5863, loss=0.20904089510440826\n", - "Surface training t=5864, loss=0.1949523463845253\n", - "Surface training t=5865, loss=0.2275334596633911\n", - "Surface training t=5866, loss=0.19182147085666656\n", - "Surface training t=5867, loss=0.17702482640743256\n", - "Surface training t=5868, loss=0.22953535616397858\n", - "Surface training t=5869, loss=0.22019324451684952\n", - "Surface training t=5870, loss=0.21143320947885513\n", - "Surface training t=5871, loss=0.16702890023589134\n", - "Surface training t=5872, loss=0.20923428237438202\n", - "Surface training t=5873, loss=0.2148767113685608\n", - "Surface training t=5874, loss=0.24641714990139008\n", - "Surface training t=5875, loss=0.2057771533727646\n", - "Surface training t=5876, loss=0.21813170611858368\n", - "Surface training t=5877, loss=0.20154976099729538\n", - "Surface training t=5878, loss=0.21760380268096924\n", - "Surface training t=5879, loss=0.212163545191288\n", - "Surface training t=5880, loss=0.21061155945062637\n", - "Surface training t=5881, loss=0.18671277910470963\n", - "Surface training t=5882, loss=0.19573494046926498\n", - "Surface training t=5883, loss=0.20265308022499084\n", - "Surface training t=5884, loss=0.19894618541002274\n", - "Surface training t=5885, loss=0.18432192504405975\n", - "Surface training t=5886, loss=0.20718446373939514\n", - "Surface training t=5887, loss=0.1978747546672821\n", - "Surface training t=5888, loss=0.20380157977342606\n", - "Surface training t=5889, loss=0.21617702394723892\n", - "Surface training t=5890, loss=0.20712065696716309\n", - "Surface training t=5891, loss=0.23238670080900192\n", - "Surface training t=5892, loss=0.23626086115837097\n", - "Surface training t=5893, loss=0.23688852041959763\n", - "Surface training t=5894, loss=0.20436476916074753\n", - "Surface training t=5895, loss=0.2080719843506813\n", - "Surface training t=5896, loss=0.21066336333751678\n", - "Surface training t=5897, loss=0.22162267565727234\n", - "Surface training t=5898, loss=0.22503101080656052\n", - "Surface training t=5899, loss=0.20504780858755112\n", - "Surface training t=5900, loss=0.2423980012536049\n", - "Surface training t=5901, loss=0.2361539676785469\n", - "Surface training t=5902, loss=0.2547820433974266\n", - "Surface training t=5903, loss=0.23236045241355896\n", - "Surface training t=5904, loss=0.2464630827307701\n", - "Surface training t=5905, loss=0.2636684402823448\n", - "Surface training t=5906, loss=0.22602495551109314\n", - "Surface training t=5907, loss=0.213431254029274\n", - "Surface training t=5908, loss=0.21835853904485703\n", - "Surface training t=5909, loss=0.21528293192386627\n", - "Surface training t=5910, loss=0.20364543050527573\n", - "Surface training t=5911, loss=0.21075048297643661\n", - "Surface training t=5912, loss=0.19746337085962296\n", - "Surface training t=5913, loss=0.2018422707915306\n", - "Surface training t=5914, loss=0.19116342812776566\n", - "Surface training t=5915, loss=0.19917356967926025\n", - "Surface training t=5916, loss=0.2048327401280403\n", - "Surface training t=5917, loss=0.23639246821403503\n", - "Surface training t=5918, loss=0.17846745997667313\n", - "Surface training t=5919, loss=0.20140960812568665\n", - "Surface training t=5920, loss=0.20235729217529297\n", - "Surface training t=5921, loss=0.17983155697584152\n", - "Surface training t=5922, loss=0.18858501315116882\n", - "Surface training t=5923, loss=0.24477872252464294\n", - "Surface training t=5924, loss=0.20495326071977615\n", - "Surface training t=5925, loss=0.19913718849420547\n", - "Surface training t=5926, loss=0.19928117841482162\n", - "Surface training t=5927, loss=0.19102788716554642\n", - "Surface training t=5928, loss=0.19743959605693817\n", - "Surface training t=5929, loss=0.2249089628458023\n", - "Surface training t=5930, loss=0.17489437013864517\n", - "Surface training t=5931, loss=0.21431419998407364\n", - "Surface training t=5932, loss=0.2256087362766266\n", - "Surface training t=5933, loss=0.20973558723926544\n", - "Surface training t=5934, loss=0.20575639605522156\n", - "Surface training t=5935, loss=0.18590712547302246\n", - "Surface training t=5936, loss=0.205512136220932\n", - "Surface training t=5937, loss=0.197860985994339\n", - "Surface training t=5938, loss=0.19906559586524963\n", - "Surface training t=5939, loss=0.20174548774957657\n", - "Surface training t=5940, loss=0.21821367740631104\n", - "Surface training t=5941, loss=0.20357928425073624\n", - "Surface training t=5942, loss=0.21823624521493912\n", - "Surface training t=5943, loss=0.20445464551448822\n", - "Surface training t=5944, loss=0.19860658794641495\n", - "Surface training t=5945, loss=0.23034381866455078\n", - "Surface training t=5946, loss=0.20527391135692596\n", - "Surface training t=5947, loss=0.18689145147800446\n", - "Surface training t=5948, loss=0.19654285162687302\n", - "Surface training t=5949, loss=0.159416351467371\n", - "Surface training t=5950, loss=0.21861419081687927\n", - "Surface training t=5951, loss=0.23171500861644745\n", - "Surface training t=5952, loss=0.22413267940282822\n", - "Surface training t=5953, loss=0.1849636659026146\n", - "Surface training t=5954, loss=0.20860406756401062\n", - "Surface training t=5955, loss=0.20105641335248947\n", - "Surface training t=5956, loss=0.18109101802110672\n", - "Surface training t=5957, loss=0.15256792679429054\n", - "Surface training t=5958, loss=0.19623523205518723\n", - "Surface training t=5959, loss=0.20081625878810883\n", - "Surface training t=5960, loss=0.19822819530963898\n", - "Surface training t=5961, loss=0.18463262170553207\n", - "Surface training t=5962, loss=0.20018938928842545\n", - "Surface training t=5963, loss=0.1989734023809433\n", - "Surface training t=5964, loss=0.2366163432598114\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=5965, loss=0.18715417385101318\n", - "Surface training t=5966, loss=0.18970079720020294\n", - "Surface training t=5967, loss=0.20867151021957397\n", - "Surface training t=5968, loss=0.19249319285154343\n", - "Surface training t=5969, loss=0.18866565078496933\n", - "Surface training t=5970, loss=0.18043142557144165\n", - "Surface training t=5971, loss=0.1899040788412094\n", - "Surface training t=5972, loss=0.19732214510440826\n", - "Surface training t=5973, loss=0.20987335592508316\n", - "Surface training t=5974, loss=0.1936940923333168\n", - "Surface training t=5975, loss=0.18381508439779282\n", - "Surface training t=5976, loss=0.20701026171445847\n", - "Surface training t=5977, loss=0.18307068198919296\n", - "Surface training t=5978, loss=0.1885962411761284\n", - "Surface training t=5979, loss=0.1919693499803543\n", - "Surface training t=5980, loss=0.18375688791275024\n", - "Surface training t=5981, loss=0.1890314668416977\n", - "Surface training t=5982, loss=0.20804809033870697\n", - "Surface training t=5983, loss=0.19155900925397873\n", - "Surface training t=5984, loss=0.15697254985570908\n", - "Surface training t=5985, loss=0.16652313619852066\n", - "Surface training t=5986, loss=0.17939885705709457\n", - "Surface training t=5987, loss=0.19632162153720856\n", - "Surface training t=5988, loss=0.21900884062051773\n", - "Surface training t=5989, loss=0.2639082074165344\n", - "Surface training t=5990, loss=0.2094082310795784\n", - "Surface training t=5991, loss=0.21374109387397766\n", - "Surface training t=5992, loss=0.2354900911450386\n", - "Surface training t=5993, loss=0.2108624428510666\n", - "Surface training t=5994, loss=0.23406413197517395\n", - "Surface training t=5995, loss=0.21394554525613785\n", - "Surface training t=5996, loss=0.2129553258419037\n", - "Surface training t=5997, loss=0.18984857201576233\n", - "Surface training t=5998, loss=0.18797137588262558\n", - "Surface training t=5999, loss=0.1899837851524353\n", - "Surface training t=6000, loss=0.19832508265972137\n", - "Surface training t=6001, loss=0.1865461841225624\n", - "Surface training t=6002, loss=0.1901335045695305\n", - "Surface training t=6003, loss=0.21264315396547318\n", - "Surface training t=6004, loss=0.20572783052921295\n", - "Surface training t=6005, loss=0.22472195327281952\n", - "Surface training t=6006, loss=0.19215860962867737\n", - "Surface training t=6007, loss=0.21991105377674103\n", - "Surface training t=6008, loss=0.24481336027383804\n", - "Surface training t=6009, loss=0.21904142200946808\n", - "Surface training t=6010, loss=0.2228039726614952\n", - "Surface training t=6011, loss=0.24296022206544876\n", - "Surface training t=6012, loss=0.19611839950084686\n", - "Surface training t=6013, loss=0.22378388792276382\n", - "Surface training t=6014, loss=0.2114352583885193\n", - "Surface training t=6015, loss=0.18609953671693802\n", - "Surface training t=6016, loss=0.17513614147901535\n", - "Surface training t=6017, loss=0.1735362932085991\n", - "Surface training t=6018, loss=0.19227538257837296\n", - "Surface training t=6019, loss=0.18912921845912933\n", - "Surface training t=6020, loss=0.18029196560382843\n", - "Surface training t=6021, loss=0.1800175905227661\n", - "Surface training t=6022, loss=0.17112716287374496\n", - "Surface training t=6023, loss=0.17715828120708466\n", - "Surface training t=6024, loss=0.17394323647022247\n", - "Surface training t=6025, loss=0.17342999577522278\n", - "Surface training t=6026, loss=0.16581465303897858\n", - "Surface training t=6027, loss=0.16484224051237106\n", - "Surface training t=6028, loss=0.1843707263469696\n", - "Surface training t=6029, loss=0.17964500933885574\n", - "Surface training t=6030, loss=0.18024103343486786\n", - "Surface training t=6031, loss=0.18149247765541077\n", - "Surface training t=6032, loss=0.19934312254190445\n", - "Surface training t=6033, loss=0.16371086239814758\n", - "Surface training t=6034, loss=0.19118747115135193\n", - "Surface training t=6035, loss=0.2021971493959427\n", - "Surface training t=6036, loss=0.15982942283153534\n", - "Surface training t=6037, loss=0.16306638717651367\n", - "Surface training t=6038, loss=0.16637106239795685\n", - "Surface training t=6039, loss=0.1570066139101982\n", - "Surface training t=6040, loss=0.15793360024690628\n", - "Surface training t=6041, loss=0.1881508156657219\n", - "Surface training t=6042, loss=0.15959100425243378\n", - "Surface training t=6043, loss=0.14585702121257782\n", - "Surface training t=6044, loss=0.19715115427970886\n", - "Surface training t=6045, loss=0.1699470803141594\n", - "Surface training t=6046, loss=0.17874807864427567\n", - "Surface training t=6047, loss=0.17225198447704315\n", - "Surface training t=6048, loss=0.16481294482946396\n", - "Surface training t=6049, loss=0.16333115845918655\n", - "Surface training t=6050, loss=0.18151257187128067\n", - "Surface training t=6051, loss=0.15541397035121918\n", - "Surface training t=6052, loss=0.17766361683607101\n", - "Surface training t=6053, loss=0.16292842477560043\n", - "Surface training t=6054, loss=0.1604878082871437\n", - "Surface training t=6055, loss=0.18467581272125244\n", - "Surface training t=6056, loss=0.1613655611872673\n", - "Surface training t=6057, loss=0.18563051521778107\n", - "Surface training t=6058, loss=0.18722794950008392\n", - "Surface training t=6059, loss=0.15147150307893753\n", - "Surface training t=6060, loss=0.17135512828826904\n", - "Surface training t=6061, loss=0.17242483794689178\n", - "Surface training t=6062, loss=0.20720868557691574\n", - "Surface training t=6063, loss=0.15071314573287964\n", - "Surface training t=6064, loss=0.179229237139225\n", - "Surface training t=6065, loss=0.18659765273332596\n", - "Surface training t=6066, loss=0.1646461859345436\n", - "Surface training t=6067, loss=0.15615691244602203\n", - "Surface training t=6068, loss=0.1764150708913803\n", - "Surface training t=6069, loss=0.18830852210521698\n", - "Surface training t=6070, loss=0.16247264295816422\n", - "Surface training t=6071, loss=0.16694226115942\n", - "Surface training t=6072, loss=0.1594299077987671\n", - "Surface training t=6073, loss=0.19008053839206696\n", - "Surface training t=6074, loss=0.19994448125362396\n", - "Surface training t=6075, loss=0.16393209993839264\n", - "Surface training t=6076, loss=0.17231790721416473\n", - "Surface training t=6077, loss=0.1891251504421234\n", - "Surface training t=6078, loss=0.20096160471439362\n", - "Surface training t=6079, loss=0.1912306323647499\n", - "Surface training t=6080, loss=0.17138078808784485\n", - "Surface training t=6081, loss=0.22999047487974167\n", - "Surface training t=6082, loss=0.18284162878990173\n", - "Surface training t=6083, loss=0.1756485253572464\n", - "Surface training t=6084, loss=0.21336089074611664\n", - "Surface training t=6085, loss=0.19242210686206818\n", - "Surface training t=6086, loss=0.20155901461839676\n", - "Surface training t=6087, loss=0.1980149820446968\n", - "Surface training t=6088, loss=0.20113538205623627\n", - "Surface training t=6089, loss=0.17224488407373428\n", - "Surface training t=6090, loss=0.16425103694200516\n", - "Surface training t=6091, loss=0.15363404154777527\n", - "Surface training t=6092, loss=0.16860225051641464\n", - "Surface training t=6093, loss=0.16284148395061493\n", - "Surface training t=6094, loss=0.16186519712209702\n", - "Surface training t=6095, loss=0.16134454309940338\n", - "Surface training t=6096, loss=0.1707853153347969\n", - "Surface training t=6097, loss=0.17414626479148865\n", - "Surface training t=6098, loss=0.1597272753715515\n", - "Surface training t=6099, loss=0.14549122750759125\n", - "Surface training t=6100, loss=0.15662898123264313\n", - "Surface training t=6101, loss=0.1611928641796112\n", - "Surface training t=6102, loss=0.14328592270612717\n", - "Surface training t=6103, loss=0.15351009368896484\n", - "Surface training t=6104, loss=0.16349314153194427\n", - "Surface training t=6105, loss=0.16887948662042618\n", - "Surface training t=6106, loss=0.1773289069533348\n", - "Surface training t=6107, loss=0.15109039843082428\n", - "Surface training t=6108, loss=0.1524820476770401\n", - "Surface training t=6109, loss=0.15974965691566467\n", - "Surface training t=6110, loss=0.15048197656869888\n", - "Surface training t=6111, loss=0.1412685140967369\n", - "Surface training t=6112, loss=0.15804662555456161\n", - "Surface training t=6113, loss=0.15471616387367249\n", - "Surface training t=6114, loss=0.15389209985733032\n", - "Surface training t=6115, loss=0.13702958077192307\n", - "Surface training t=6116, loss=0.14338675141334534\n", - "Surface training t=6117, loss=0.16241881251335144\n", - "Surface training t=6118, loss=0.17675654590129852\n", - "Surface training t=6119, loss=0.15096253901720047\n", - "Surface training t=6120, loss=0.1549517661333084\n", - "Surface training t=6121, loss=0.13712722808122635\n", - "Surface training t=6122, loss=0.14682067185640335\n", - "Surface training t=6123, loss=0.15664712339639664\n", - "Surface training t=6124, loss=0.18149502575397491\n", - "Surface training t=6125, loss=0.18715478479862213\n", - "Surface training t=6126, loss=0.13342046737670898\n", - "Surface training t=6127, loss=0.15386119484901428\n", - "Surface training t=6128, loss=0.14574100822210312\n", - "Surface training t=6129, loss=0.1634814590215683\n", - "Surface training t=6130, loss=0.15608977526426315\n", - "Surface training t=6131, loss=0.1797185316681862\n", - "Surface training t=6132, loss=0.16630209237337112\n", - "Surface training t=6133, loss=0.17187851667404175\n", - "Surface training t=6134, loss=0.17876850068569183\n", - "Surface training t=6135, loss=0.16445063054561615\n", - "Surface training t=6136, loss=0.15072523802518845\n", - "Surface training t=6137, loss=0.12960007786750793\n", - "Surface training t=6138, loss=0.16233934462070465\n", - "Surface training t=6139, loss=0.17872590571641922\n", - "Surface training t=6140, loss=0.15210683643817902\n", - "Surface training t=6141, loss=0.14653483033180237\n", - "Surface training t=6142, loss=0.16884347051382065\n", - "Surface training t=6143, loss=0.14243555068969727\n", - "Surface training t=6144, loss=0.16416127979755402\n", - "Surface training t=6145, loss=0.14030514657497406\n", - "Surface training t=6146, loss=0.1545058861374855\n", - "Surface training t=6147, loss=0.15918942540884018\n", - "Surface training t=6148, loss=0.13944797590374947\n", - "Surface training t=6149, loss=0.14602072536945343\n", - "Surface training t=6150, loss=0.16203810274600983\n", - "Surface training t=6151, loss=0.16175387054681778\n", - "Surface training t=6152, loss=0.15407534688711166\n", - "Surface training t=6153, loss=0.14263132214546204\n", - "Surface training t=6154, loss=0.16691673547029495\n", - "Surface training t=6155, loss=0.16779212653636932\n", - "Surface training t=6156, loss=0.17559605091810226\n", - "Surface training t=6157, loss=0.1485622227191925\n", - "Surface training t=6158, loss=0.14536960422992706\n", - "Surface training t=6159, loss=0.14237608015537262\n", - "Surface training t=6160, loss=0.16344010084867477\n", - "Surface training t=6161, loss=0.12204406410455704\n", - "Surface training t=6162, loss=0.1694810539484024\n", - "Surface training t=6163, loss=0.1389295756816864\n", - "Surface training t=6164, loss=0.1396031379699707\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=6165, loss=0.14169222116470337\n", - "Surface training t=6166, loss=0.13859302550554276\n", - "Surface training t=6167, loss=0.1399461254477501\n", - "Surface training t=6168, loss=0.13400553166866302\n", - "Surface training t=6169, loss=0.12007201462984085\n", - "Surface training t=6170, loss=0.12699957191944122\n", - "Surface training t=6171, loss=0.13865924626588821\n", - "Surface training t=6172, loss=0.13573505729436874\n", - "Surface training t=6173, loss=0.1321863755583763\n", - "Surface training t=6174, loss=0.12254073843359947\n", - "Surface training t=6175, loss=0.1411471739411354\n", - "Surface training t=6176, loss=0.15004030615091324\n", - "Surface training t=6177, loss=0.14053325355052948\n", - "Surface training t=6178, loss=0.11494629085063934\n", - "Surface training t=6179, loss=0.1549944281578064\n", - "Surface training t=6180, loss=0.17885801196098328\n", - "Surface training t=6181, loss=0.16520866751670837\n", - "Surface training t=6182, loss=0.19184453040361404\n", - "Surface training t=6183, loss=0.1764288693666458\n", - "Surface training t=6184, loss=0.1491750255227089\n", - "Surface training t=6185, loss=0.12190937995910645\n", - "Surface training t=6186, loss=0.13083581626415253\n", - "Surface training t=6187, loss=0.14355751127004623\n", - "Surface training t=6188, loss=0.1232408806681633\n", - "Surface training t=6189, loss=0.11710559204220772\n", - "Surface training t=6190, loss=0.12678370997309685\n", - "Surface training t=6191, loss=0.12433802336454391\n", - "Surface training t=6192, loss=0.12141124531626701\n", - "Surface training t=6193, loss=0.120465237647295\n", - "Surface training t=6194, loss=0.14719729870557785\n", - "Surface training t=6195, loss=0.13248514384031296\n", - "Surface training t=6196, loss=0.14888101816177368\n", - "Surface training t=6197, loss=0.12057901918888092\n", - "Surface training t=6198, loss=0.1280447654426098\n", - "Surface training t=6199, loss=0.12297289445996284\n", - "Surface training t=6200, loss=0.1384391114115715\n", - "Surface training t=6201, loss=0.12703319266438484\n", - "Surface training t=6202, loss=0.14413084834814072\n", - "Surface training t=6203, loss=0.12645482644438744\n", - "Surface training t=6204, loss=0.13918715342879295\n", - "Surface training t=6205, loss=0.13407820835709572\n", - "Surface training t=6206, loss=0.10718812048435211\n", - "Surface training t=6207, loss=0.1310325786471367\n", - "Surface training t=6208, loss=0.11391391605138779\n", - "Surface training t=6209, loss=0.13467802107334137\n", - "Surface training t=6210, loss=0.12296168133616447\n", - "Surface training t=6211, loss=0.1315583884716034\n", - "Surface training t=6212, loss=0.1219753623008728\n", - "Surface training t=6213, loss=0.11360089480876923\n", - "Surface training t=6214, loss=0.12933935225009918\n", - "Surface training t=6215, loss=0.14710277318954468\n", - "Surface training t=6216, loss=0.12784706056118011\n", - "Surface training t=6217, loss=0.1410617083311081\n", - "Surface training t=6218, loss=0.13382410630583763\n", - "Surface training t=6219, loss=0.13338936120271683\n", - "Surface training t=6220, loss=0.13165835663676262\n", - "Surface training t=6221, loss=0.15424025058746338\n", - "Surface training t=6222, loss=0.13924867659807205\n", - "Surface training t=6223, loss=0.14841028302907944\n", - "Surface training t=6224, loss=0.12907447293400764\n", - "Surface training t=6225, loss=0.12379607930779457\n", - "Surface training t=6226, loss=0.11800078675150871\n", - "Surface training t=6227, loss=0.11561201885342598\n", - "Surface training t=6228, loss=0.12451492622494698\n", - "Surface training t=6229, loss=0.11910755932331085\n", - "Surface training t=6230, loss=0.13059014454483986\n", - "Surface training t=6231, loss=0.12060251086950302\n", - "Surface training t=6232, loss=0.10572578385472298\n", - "Surface training t=6233, loss=0.12091754376888275\n", - "Surface training t=6234, loss=0.11369778215885162\n", - "Surface training t=6235, loss=0.11443190276622772\n", - "Surface training t=6236, loss=0.12055366858839989\n", - "Surface training t=6237, loss=0.11703043803572655\n", - "Surface training t=6238, loss=0.11763092875480652\n", - "Surface training t=6239, loss=0.09965915977954865\n", - "Surface training t=6240, loss=0.11305494979023933\n", - "Surface training t=6241, loss=0.1328577995300293\n", - "Surface training t=6242, loss=0.12026013061404228\n", - "Surface training t=6243, loss=0.1278861090540886\n", - "Surface training t=6244, loss=0.12152859196066856\n", - "Surface training t=6245, loss=0.1286592110991478\n", - "Surface training t=6246, loss=0.12697313353419304\n", - "Surface training t=6247, loss=0.10453671962022781\n", - "Surface training t=6248, loss=0.0933302640914917\n", - "Surface training t=6249, loss=0.11511707678437233\n", - "Surface training t=6250, loss=0.12023818492889404\n", - "Surface training t=6251, loss=0.1240900494158268\n", - "Surface training t=6252, loss=0.13776439428329468\n", - "Surface training t=6253, loss=0.14287276193499565\n", - "Surface training t=6254, loss=0.17350435256958008\n", - "Surface training t=6255, loss=0.13657470792531967\n", - "Surface training t=6256, loss=0.13909109309315681\n", - "Surface training t=6257, loss=0.1252162680029869\n", - "Surface training t=6258, loss=0.1272376999258995\n", - "Surface training t=6259, loss=0.11145465448498726\n", - "Surface training t=6260, loss=0.10143030807375908\n", - "Surface training t=6261, loss=0.1240747906267643\n", - "Surface training t=6262, loss=0.11061912402510643\n", - "Surface training t=6263, loss=0.1377483829855919\n", - "Surface training t=6264, loss=0.13634102419018745\n", - "Surface training t=6265, loss=0.12304287403821945\n", - "Surface training t=6266, loss=0.12979355454444885\n", - "Surface training t=6267, loss=0.1109202690422535\n", - "Surface training t=6268, loss=0.13970839977264404\n", - "Surface training t=6269, loss=0.11747585982084274\n", - "Surface training t=6270, loss=0.10327755659818649\n", - "Surface training t=6271, loss=0.10389763116836548\n", - "Surface training t=6272, loss=0.09661239385604858\n", - "Surface training t=6273, loss=0.11208087205886841\n", - "Surface training t=6274, loss=0.0981617383658886\n", - "Surface training t=6275, loss=0.10385242849588394\n", - "Surface training t=6276, loss=0.09203502908349037\n", - "Surface training t=6277, loss=0.10476534441113472\n", - "Surface training t=6278, loss=0.11347486451268196\n", - "Surface training t=6279, loss=0.11326542124152184\n", - "Surface training t=6280, loss=0.10453790798783302\n", - "Surface training t=6281, loss=0.09437520802021027\n", - "Surface training t=6282, loss=0.0863831415772438\n", - "Surface training t=6283, loss=0.08988593146204948\n", - "Surface training t=6284, loss=0.09068583324551582\n", - "Surface training t=6285, loss=0.10034262016415596\n", - "Surface training t=6286, loss=0.09455830976366997\n", - "Surface training t=6287, loss=0.12465077638626099\n", - "Surface training t=6288, loss=0.12496170774102211\n", - "Surface training t=6289, loss=0.11900858581066132\n", - "Surface training t=6290, loss=0.11423804238438606\n", - "Surface training t=6291, loss=0.10650434345006943\n", - "Surface training t=6292, loss=0.11141633242368698\n", - "Surface training t=6293, loss=0.12299909442663193\n", - "Surface training t=6294, loss=0.1182461678981781\n", - "Surface training t=6295, loss=0.11447746306657791\n", - "Surface training t=6296, loss=0.1317930370569229\n", - "Surface training t=6297, loss=0.11760444939136505\n", - "Surface training t=6298, loss=0.12196676433086395\n", - "Surface training t=6299, loss=0.11334113776683807\n", - "Surface training t=6300, loss=0.10725197196006775\n", - "Surface training t=6301, loss=0.09396778792142868\n", - "Surface training t=6302, loss=0.0939830057322979\n", - "Surface training t=6303, loss=0.09026018157601357\n", - "Surface training t=6304, loss=0.08834217488765717\n", - "Surface training t=6305, loss=0.10659286007285118\n", - "Surface training t=6306, loss=0.12100407108664513\n", - "Surface training t=6307, loss=0.12111103534698486\n", - "Surface training t=6308, loss=0.10403259843587875\n", - "Surface training t=6309, loss=0.10635790228843689\n", - "Surface training t=6310, loss=0.10130785033106804\n", - "Surface training t=6311, loss=0.09622466191649437\n", - "Surface training t=6312, loss=0.10678102448582649\n", - "Surface training t=6313, loss=0.09554547071456909\n", - "Surface training t=6314, loss=0.10622081905603409\n", - "Surface training t=6315, loss=0.07916982844471931\n", - "Surface training t=6316, loss=0.07793348282575607\n", - "Surface training t=6317, loss=0.09242960438132286\n", - "Surface training t=6318, loss=0.09128560498356819\n", - "Surface training t=6319, loss=0.10068169608712196\n", - "Surface training t=6320, loss=0.07660332322120667\n", - "Surface training t=6321, loss=0.07246863842010498\n", - "Surface training t=6322, loss=0.07918097078800201\n", - "Surface training t=6323, loss=0.07605034857988358\n", - "Surface training t=6324, loss=0.0968756414949894\n", - "Surface training t=6325, loss=0.09065381810069084\n", - "Surface training t=6326, loss=0.08447947353124619\n", - "Surface training t=6327, loss=0.07597498595714569\n", - "Surface training t=6328, loss=0.0906515009701252\n", - "Surface training t=6329, loss=0.08572664484381676\n", - "Surface training t=6330, loss=0.06567506492137909\n", - "Surface training t=6331, loss=0.08673300594091415\n", - "Surface training t=6332, loss=0.10262828320264816\n", - "Surface training t=6333, loss=0.09752796962857246\n", - "Surface training t=6334, loss=0.1277221292257309\n", - "Surface training t=6335, loss=0.09834958240389824\n", - "Surface training t=6336, loss=0.11249848082661629\n", - "Surface training t=6337, loss=0.1050686165690422\n", - "Surface training t=6338, loss=0.11630981415510178\n", - "Surface training t=6339, loss=0.11409225314855576\n", - "Surface training t=6340, loss=0.1043352372944355\n", - "Surface training t=6341, loss=0.12699782848358154\n", - "Surface training t=6342, loss=0.10029268264770508\n", - "Surface training t=6343, loss=0.11047562584280968\n", - "Surface training t=6344, loss=0.09878901392221451\n", - "Surface training t=6345, loss=0.10251739248633385\n", - "Surface training t=6346, loss=0.07876493781805038\n", - "Surface training t=6347, loss=0.09130226448178291\n", - "Surface training t=6348, loss=0.07238997891545296\n", - "Surface training t=6349, loss=0.08549825474619865\n", - "Surface training t=6350, loss=0.09911378845572472\n", - "Surface training t=6351, loss=0.10694431141018867\n", - "Surface training t=6352, loss=0.08157498762011528\n", - "Surface training t=6353, loss=0.08176742121577263\n", - "Surface training t=6354, loss=0.08766279369592667\n", - "Surface training t=6355, loss=0.10712090134620667\n", - "Surface training t=6356, loss=0.10354938730597496\n", - "Surface training t=6357, loss=0.08696227893233299\n", - "Surface training t=6358, loss=0.11288385838270187\n", - "Surface training t=6359, loss=0.10236290842294693\n", - "Surface training t=6360, loss=0.08847342059016228\n", - "Surface training t=6361, loss=0.10741666331887245\n", - "Surface training t=6362, loss=0.09909989312291145\n", - "Surface training t=6363, loss=0.09912761300802231\n", - "Surface training t=6364, loss=0.09780840575695038\n", - "Surface training t=6365, loss=0.07055509462952614\n", - "Surface training t=6366, loss=0.07503162696957588\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=6367, loss=0.0787622518837452\n", - "Surface training t=6368, loss=0.06521524488925934\n", - "Surface training t=6369, loss=0.07176929339766502\n", - "Surface training t=6370, loss=0.07974477857351303\n", - "Surface training t=6371, loss=0.07878698408603668\n", - "Surface training t=6372, loss=0.09860457479953766\n", - "Surface training t=6373, loss=0.10441012680530548\n", - "Surface training t=6374, loss=0.11296579614281654\n", - "Surface training t=6375, loss=0.12415020167827606\n", - "Surface training t=6376, loss=0.09233628585934639\n", - "Surface training t=6377, loss=0.10202889889478683\n", - "Surface training t=6378, loss=0.09441499039530754\n", - "Surface training t=6379, loss=0.08965088799595833\n", - "Surface training t=6380, loss=0.09817822650074959\n", - "Surface training t=6381, loss=0.08105646818876266\n", - "Surface training t=6382, loss=0.07835578918457031\n", - "Surface training t=6383, loss=0.09217649698257446\n", - "Surface training t=6384, loss=0.10457371547818184\n", - "Surface training t=6385, loss=0.08166946284472942\n", - "Surface training t=6386, loss=0.08332228660583496\n", - "Surface training t=6387, loss=0.06654095277190208\n", - "Surface training t=6388, loss=0.0936763733625412\n", - "Surface training t=6389, loss=0.07569503411650658\n", - "Surface training t=6390, loss=0.07963017001748085\n", - "Surface training t=6391, loss=0.05592386610805988\n", - "Surface training t=6392, loss=0.0627307128161192\n", - "Surface training t=6393, loss=0.06419874727725983\n", - "Surface training t=6394, loss=0.07674234360456467\n", - "Surface training t=6395, loss=0.0803149975836277\n", - "Surface training t=6396, loss=0.08464843407273293\n", - "Surface training t=6397, loss=0.07820525951683521\n", - "Surface training t=6398, loss=0.08035193383693695\n", - "Surface training t=6399, loss=0.0773887112736702\n", - "Surface training t=6400, loss=0.0755675844848156\n", - "Surface training t=6401, loss=0.07849320024251938\n", - "Surface training t=6402, loss=0.08617449179291725\n", - "Surface training t=6403, loss=0.07228778302669525\n", - "Surface training t=6404, loss=0.06187979131937027\n", - "Surface training t=6405, loss=0.09768931567668915\n", - "Surface training t=6406, loss=0.07189011201262474\n", - "Surface training t=6407, loss=0.07545600458979607\n", - "Surface training t=6408, loss=0.08969200402498245\n", - "Surface training t=6409, loss=0.10961342602968216\n", - "Surface training t=6410, loss=0.095860056579113\n", - "Surface training t=6411, loss=0.09085974469780922\n", - "Surface training t=6412, loss=0.09289279580116272\n", - "Surface training t=6413, loss=0.1143757775425911\n", - "Surface training t=6414, loss=0.13264401629567146\n", - "Surface training t=6415, loss=0.10964342951774597\n", - "Surface training t=6416, loss=0.16619138419628143\n", - "Surface training t=6417, loss=0.13546491414308548\n", - "Surface training t=6418, loss=0.1067240834236145\n", - "Surface training t=6419, loss=0.1203572042286396\n", - "Surface training t=6420, loss=0.16365526616573334\n", - "Surface training t=6421, loss=0.12100750580430031\n", - "Surface training t=6422, loss=0.1517399437725544\n", - "Surface training t=6423, loss=0.18564745038747787\n", - "Surface training t=6424, loss=0.12331707403063774\n", - "Surface training t=6425, loss=0.14881723374128342\n", - "Surface training t=6426, loss=0.09361382573843002\n", - "Surface training t=6427, loss=0.07340855151414871\n", - "Surface training t=6428, loss=0.07699993066489697\n", - "Surface training t=6429, loss=0.06195247545838356\n", - "Surface training t=6430, loss=0.06633442267775536\n", - "Surface training t=6431, loss=0.06322388350963593\n", - "Surface training t=6432, loss=0.05148257315158844\n", - "Surface training t=6433, loss=0.062096353620290756\n", - "Surface training t=6434, loss=0.06840730831027031\n", - "Surface training t=6435, loss=0.10372085124254227\n", - "Surface training t=6436, loss=0.0867747962474823\n", - "Surface training t=6437, loss=0.09552576020359993\n", - "Surface training t=6438, loss=0.08368458598852158\n", - "Surface training t=6439, loss=0.08645020052790642\n", - "Surface training t=6440, loss=0.06976744532585144\n", - "Surface training t=6441, loss=0.055551644414663315\n", - "Surface training t=6442, loss=0.07214463874697685\n", - "Surface training t=6443, loss=0.06237412802875042\n", - "Surface training t=6444, loss=0.061785656958818436\n", - "Surface training t=6445, loss=0.06200643256306648\n", - "Surface training t=6446, loss=0.07610954344272614\n", - "Surface training t=6447, loss=0.08186917752027512\n", - "Surface training t=6448, loss=0.08141817152500153\n", - "Surface training t=6449, loss=0.07421938329935074\n", - "Surface training t=6450, loss=0.07476048730313778\n", - "Surface training t=6451, loss=0.1144334003329277\n", - "Surface training t=6452, loss=0.10395936295390129\n", - "Surface training t=6453, loss=0.09622088074684143\n", - "Surface training t=6454, loss=0.09895464777946472\n", - "Surface training t=6455, loss=0.08720838278532028\n", - "Surface training t=6456, loss=0.08515366166830063\n", - "Surface training t=6457, loss=0.08122950792312622\n", - "Surface training t=6458, loss=0.07286781072616577\n", - "Surface training t=6459, loss=0.06892695277929306\n", - "Surface training t=6460, loss=0.07332360371947289\n", - "Surface training t=6461, loss=0.09571387246251106\n", - "Surface training t=6462, loss=0.08217081055045128\n", - "Surface training t=6463, loss=0.08011249266564846\n", - "Surface training t=6464, loss=0.11127601936459541\n", - "Surface training t=6465, loss=0.09425710514187813\n", - "Surface training t=6466, loss=0.07409397885203362\n", - "Surface training t=6467, loss=0.06567266210913658\n", - "Surface training t=6468, loss=0.07982943579554558\n", - "Surface training t=6469, loss=0.062171487137675285\n", - "Surface training t=6470, loss=0.06902001611888409\n", - "Surface training t=6471, loss=0.06854667514562607\n", - "Surface training t=6472, loss=0.08863543346524239\n", - "Surface training t=6473, loss=0.11466338112950325\n", - "Surface training t=6474, loss=0.12136582657694817\n", - "Surface training t=6475, loss=0.09994084015488625\n", - "Surface training t=6476, loss=0.08348259329795837\n", - "Surface training t=6477, loss=0.06651759333908558\n", - "Surface training t=6478, loss=0.07047591544687748\n", - "Surface training t=6479, loss=0.06782716326415539\n", - "Surface training t=6480, loss=0.0570925697684288\n", - "Surface training t=6481, loss=0.06300196424126625\n", - "Surface training t=6482, loss=0.10611993446946144\n", - "Surface training t=6483, loss=0.08197895437479019\n", - "Surface training t=6484, loss=0.0784884188324213\n", - "Surface training t=6485, loss=0.08397635444998741\n", - "Surface training t=6486, loss=0.07298005186021328\n", - "Surface training t=6487, loss=0.1377154290676117\n", - "Surface training t=6488, loss=0.10218369588255882\n", - "Surface training t=6489, loss=0.08158302493393421\n", - "Surface training t=6490, loss=0.11488358303904533\n", - "Surface training t=6491, loss=0.09577455371618271\n", - "Surface training t=6492, loss=0.08665335550904274\n", - "Surface training t=6493, loss=0.0856105238199234\n", - "Surface training t=6494, loss=0.11165454238653183\n", - "Surface training t=6495, loss=0.1027168482542038\n", - "Surface training t=6496, loss=0.08369562774896622\n", - "Surface training t=6497, loss=0.08363272622227669\n", - "Surface training t=6498, loss=0.08234923705458641\n", - "Surface training t=6499, loss=0.09276967868208885\n", - "Surface training t=6500, loss=0.11607419699430466\n", - "Surface training t=6501, loss=0.0945208165794611\n", - "Surface training t=6502, loss=0.11772848665714264\n", - "Surface training t=6503, loss=0.14818432182073593\n", - "Surface training t=6504, loss=0.11674736812710762\n", - "Surface training t=6505, loss=0.102546326816082\n", - "Surface training t=6506, loss=0.1647746041417122\n", - "Surface training t=6507, loss=0.13041241094470024\n", - "Surface training t=6508, loss=0.18591656535863876\n", - "Surface training t=6509, loss=0.10697466135025024\n", - "Surface training t=6510, loss=0.07909291982650757\n", - "Surface training t=6511, loss=0.1093275137245655\n", - "Surface training t=6512, loss=0.13524342328310013\n", - "Surface training t=6513, loss=0.09597013518214226\n", - "Surface training t=6514, loss=0.11247427761554718\n", - "Surface training t=6515, loss=0.15344467759132385\n", - "Surface training t=6516, loss=0.11471601948142052\n", - "Surface training t=6517, loss=0.09725265018641949\n", - "Surface training t=6518, loss=0.14606104791164398\n", - "Surface training t=6519, loss=0.09317990951240063\n", - "Surface training t=6520, loss=0.11945315822958946\n", - "Surface training t=6521, loss=0.15265124291181564\n", - "Surface training t=6522, loss=0.10886674374341965\n", - "Surface training t=6523, loss=0.1298053115606308\n", - "Surface training t=6524, loss=0.1630808189511299\n", - "Surface training t=6525, loss=0.11613558977842331\n", - "Surface training t=6526, loss=0.13230882585048676\n", - "Surface training t=6527, loss=0.15212421119213104\n", - "Surface training t=6528, loss=0.10327658802270889\n", - "Surface training t=6529, loss=0.11565600708127022\n", - "Surface training t=6530, loss=0.11410248279571533\n", - "Surface training t=6531, loss=0.08166379854083061\n", - "Surface training t=6532, loss=0.0799659714102745\n", - "Surface training t=6533, loss=0.0975094586610794\n", - "Surface training t=6534, loss=0.10070207342505455\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=6535, loss=0.08263273909687996\n", - "Surface training t=6536, loss=0.07509677484631538\n", - "Surface training t=6537, loss=0.06775899045169353\n", - "Surface training t=6538, loss=0.05697708763182163\n", - "Surface training t=6539, loss=0.062465643510222435\n", - "Surface training t=6540, loss=0.06831344403326511\n", - "Surface training t=6541, loss=0.0691518783569336\n", - "Surface training t=6542, loss=0.07015266828238964\n", - "Surface training t=6543, loss=0.07522526755928993\n", - "Surface training t=6544, loss=0.05927532911300659\n", - "Surface training t=6545, loss=0.09363263100385666\n", - "Surface training t=6546, loss=0.06246895156800747\n", - "Surface training t=6547, loss=0.06279245391488075\n", - "Surface training t=6548, loss=0.08215402066707611\n", - "Surface training t=6549, loss=0.0809592455625534\n", - "Surface training t=6550, loss=0.06643397733569145\n", - "Surface training t=6551, loss=0.055719420313835144\n", - "Surface training t=6552, loss=0.08855758607387543\n", - "Surface training t=6553, loss=0.06214790791273117\n", - "Surface training t=6554, loss=0.06904836371541023\n", - "Surface training t=6555, loss=0.05089796334505081\n", - "Surface training t=6556, loss=0.05204209126532078\n", - "Surface training t=6557, loss=0.05486822873353958\n", - "Surface training t=6558, loss=0.0668561551719904\n", - "Surface training t=6559, loss=0.0586121529340744\n", - "Surface training t=6560, loss=0.051465338096022606\n", - "Surface training t=6561, loss=0.05534493923187256\n", - "Surface training t=6562, loss=0.0570825170725584\n", - "Surface training t=6563, loss=0.055446427315473557\n", - "Surface training t=6564, loss=0.08034413307905197\n", - "Surface training t=6565, loss=0.06508777849376202\n", - "Surface training t=6566, loss=0.0734159555286169\n", - "Surface training t=6567, loss=0.053355615586042404\n", - "Surface training t=6568, loss=0.06437218002974987\n", - "Surface training t=6569, loss=0.0681879073381424\n", - "Surface training t=6570, loss=0.0702652782201767\n", - "Surface training t=6571, loss=0.0571643952280283\n", - "Surface training t=6572, loss=0.07458909600973129\n", - "Surface training t=6573, loss=0.10222658514976501\n", - "Surface training t=6574, loss=0.05696292035281658\n", - "Surface training t=6575, loss=0.05551867187023163\n", - "Surface training t=6576, loss=0.058645810931921005\n", - "Surface training t=6577, loss=0.06412734463810921\n", - "Surface training t=6578, loss=0.06999019160866737\n", - "Surface training t=6579, loss=0.06475582346320152\n", - "Surface training t=6580, loss=0.09652753174304962\n", - "Surface training t=6581, loss=0.06803357042372227\n", - "Surface training t=6582, loss=0.06442196294665337\n", - "Surface training t=6583, loss=0.06810127198696136\n", - "Surface training t=6584, loss=0.05824914388358593\n", - "Surface training t=6585, loss=0.0795835442841053\n", - "Surface training t=6586, loss=0.06004784069955349\n", - "Surface training t=6587, loss=0.061835478991270065\n", - "Surface training t=6588, loss=0.06334215961396694\n", - "Surface training t=6589, loss=0.0726240873336792\n", - "Surface training t=6590, loss=0.06381619349122047\n", - "Surface training t=6591, loss=0.06654654629528522\n", - "Surface training t=6592, loss=0.07114574685692787\n", - "Surface training t=6593, loss=0.058106496930122375\n", - "Surface training t=6594, loss=0.06832762062549591\n", - "Surface training t=6595, loss=0.05980901047587395\n", - "Surface training t=6596, loss=0.07285495847463608\n", - "Surface training t=6597, loss=0.04786033183336258\n", - "Surface training t=6598, loss=0.050922760739922523\n", - "Surface training t=6599, loss=0.06781729310750961\n", - "Surface training t=6600, loss=0.05165375396609306\n", - "Surface training t=6601, loss=0.04343603365123272\n", - "Surface training t=6602, loss=0.053924936801195145\n", - "Surface training t=6603, loss=0.05711314454674721\n", - "Surface training t=6604, loss=0.05293353274464607\n", - "Surface training t=6605, loss=0.047438645735383034\n", - "Surface training t=6606, loss=0.052071915939450264\n", - "Surface training t=6607, loss=0.051457347348332405\n", - "Surface training t=6608, loss=0.05958336591720581\n", - "Surface training t=6609, loss=0.048146480694413185\n", - "Surface training t=6610, loss=0.055865246802568436\n", - "Surface training t=6611, loss=0.05100358836352825\n", - "Surface training t=6612, loss=0.04995092377066612\n", - "Surface training t=6613, loss=0.05079924128949642\n", - "Surface training t=6614, loss=0.0541802067309618\n", - "Surface training t=6615, loss=0.04829112999141216\n", - "Surface training t=6616, loss=0.05890970304608345\n", - "Surface training t=6617, loss=0.05480232276022434\n", - "Surface training t=6618, loss=0.04040177911520004\n", - "Surface training t=6619, loss=0.049373287707567215\n", - "Surface training t=6620, loss=0.053708143532276154\n", - "Surface training t=6621, loss=0.05118122510612011\n", - "Surface training t=6622, loss=0.05158263258635998\n", - "Surface training t=6623, loss=0.05384488217532635\n", - "Surface training t=6624, loss=0.05452554300427437\n", - "Surface training t=6625, loss=0.0676688700914383\n", - "Surface training t=6626, loss=0.04291494004428387\n", - "Surface training t=6627, loss=0.05191992036998272\n", - "Surface training t=6628, loss=0.05634714663028717\n", - "Surface training t=6629, loss=0.055702462792396545\n", - "Surface training t=6630, loss=0.05409005470573902\n", - "Surface training t=6631, loss=0.046520233154296875\n", - "Surface training t=6632, loss=0.05014405585825443\n", - "Surface training t=6633, loss=0.0544965248554945\n", - "Surface training t=6634, loss=0.06268368661403656\n", - "Surface training t=6635, loss=0.052892837673425674\n", - "Surface training t=6636, loss=0.0570847075432539\n", - "Surface training t=6637, loss=0.06834745034575462\n", - "Surface training t=6638, loss=0.04848978854715824\n", - "Surface training t=6639, loss=0.0463049728423357\n", - "Surface training t=6640, loss=0.0677587203681469\n", - "Surface training t=6641, loss=0.06510374695062637\n", - "Surface training t=6642, loss=0.05073880776762962\n", - "Surface training t=6643, loss=0.0532929003238678\n", - "Surface training t=6644, loss=0.048587899655103683\n", - "Surface training t=6645, loss=0.04930643551051617\n", - "Surface training t=6646, loss=0.0789974071085453\n", - "Surface training t=6647, loss=0.06489753350615501\n", - "Surface training t=6648, loss=0.06963972188532352\n", - "Surface training t=6649, loss=0.06416494585573673\n", - "Surface training t=6650, loss=0.05974812060594559\n", - "Surface training t=6651, loss=0.0653834380209446\n", - "Surface training t=6652, loss=0.06202646903693676\n", - "Surface training t=6653, loss=0.05119881220161915\n", - "Surface training t=6654, loss=0.056701915338635445\n", - "Surface training t=6655, loss=0.054695405066013336\n", - "Surface training t=6656, loss=0.04173432104289532\n", - "Surface training t=6657, loss=0.0466703437268734\n", - "Surface training t=6658, loss=0.039677875116467476\n", - "Surface training t=6659, loss=0.054673340171575546\n", - "Surface training t=6660, loss=0.0623239129781723\n", - "Surface training t=6661, loss=0.0873384103178978\n", - "Surface training t=6662, loss=0.05470678396522999\n", - "Surface training t=6663, loss=0.06476274877786636\n", - "Surface training t=6664, loss=0.06705522909760475\n", - "Surface training t=6665, loss=0.07815146818757057\n", - "Surface training t=6666, loss=0.07580738887190819\n", - "Surface training t=6667, loss=0.06868822313845158\n", - "Surface training t=6668, loss=0.07132414728403091\n", - "Surface training t=6669, loss=0.04865003935992718\n", - "Surface training t=6670, loss=0.062249865382909775\n", - "Surface training t=6671, loss=0.05823610723018646\n", - "Surface training t=6672, loss=0.06009959056973457\n", - "Surface training t=6673, loss=0.057332951575517654\n", - "Surface training t=6674, loss=0.06657889857888222\n", - "Surface training t=6675, loss=0.106511440128088\n", - "Surface training t=6676, loss=0.07746599614620209\n", - "Surface training t=6677, loss=0.0717069935053587\n", - "Surface training t=6678, loss=0.06362240016460419\n", - "Surface training t=6679, loss=0.07056612893939018\n", - "Surface training t=6680, loss=0.07034165598452091\n", - "Surface training t=6681, loss=0.06777880899608135\n", - "Surface training t=6682, loss=0.07087123766541481\n", - "Surface training t=6683, loss=0.08958978578448296\n", - "Surface training t=6684, loss=0.06539057940244675\n", - "Surface training t=6685, loss=0.05335737578570843\n", - "Surface training t=6686, loss=0.06667109206318855\n", - "Surface training t=6687, loss=0.06048168987035751\n", - "Surface training t=6688, loss=0.09965814277529716\n", - "Surface training t=6689, loss=0.08013177663087845\n", - "Surface training t=6690, loss=0.0759519636631012\n", - "Surface training t=6691, loss=0.07031269930303097\n", - "Surface training t=6692, loss=0.06470049917697906\n", - "Surface training t=6693, loss=0.07526356540620327\n", - "Surface training t=6694, loss=0.07024072855710983\n", - "Surface training t=6695, loss=0.0767584852874279\n", - "Surface training t=6696, loss=0.049207981675863266\n", - "Surface training t=6697, loss=0.05204688757658005\n", - "Surface training t=6698, loss=0.04643629305064678\n", - "Surface training t=6699, loss=0.0486974511295557\n", - "Surface training t=6700, loss=0.06254588067531586\n", - "Surface training t=6701, loss=0.06799792312085629\n", - "Surface training t=6702, loss=0.06339827552437782\n", - "Surface training t=6703, loss=0.0529327318072319\n", - "Surface training t=6704, loss=0.06868554279208183\n", - "Surface training t=6705, loss=0.0683923028409481\n", - "Surface training t=6706, loss=0.07156149670481682\n", - "Surface training t=6707, loss=0.06538640521466732\n", - "Surface training t=6708, loss=0.10515481606125832\n", - "Surface training t=6709, loss=0.07215394824743271\n", - "Surface training t=6710, loss=0.060595253482460976\n", - "Surface training t=6711, loss=0.0618708822876215\n", - "Surface training t=6712, loss=0.0703611746430397\n", - "Surface training t=6713, loss=0.06503847241401672\n", - "Surface training t=6714, loss=0.054217955097556114\n", - "Surface training t=6715, loss=0.03762657940387726\n", - "Surface training t=6716, loss=0.042860712856054306\n", - "Surface training t=6717, loss=0.045344455167651176\n", - "Surface training t=6718, loss=0.04144933633506298\n", - "Surface training t=6719, loss=0.037418147549033165\n", - "Surface training t=6720, loss=0.0385288055986166\n", - "Surface training t=6721, loss=0.04132726788520813\n", - "Surface training t=6722, loss=0.04262514412403107\n", - "Surface training t=6723, loss=0.04335766099393368\n", - "Surface training t=6724, loss=0.04732424579560757\n", - "Surface training t=6725, loss=0.04716363176703453\n", - "Surface training t=6726, loss=0.044431647285819054\n", - "Surface training t=6727, loss=0.039917778223752975\n", - "Surface training t=6728, loss=0.049020493403077126\n", - "Surface training t=6729, loss=0.04077751189470291\n", - "Surface training t=6730, loss=0.042125701904296875\n", - "Surface training t=6731, loss=0.053271979093551636\n", - "Surface training t=6732, loss=0.05297834239900112\n", - "Surface training t=6733, loss=0.04758075252175331\n", - "Surface training t=6734, loss=0.04307187721133232\n", - "Surface training t=6735, loss=0.04182131960988045\n", - "Surface training t=6736, loss=0.03770568408071995\n", - "Surface training t=6737, loss=0.033346609212458134\n", - "Surface training t=6738, loss=0.04013346880674362\n", - "Surface training t=6739, loss=0.04197762534022331\n", - "Surface training t=6740, loss=0.04531700909137726\n", - "Surface training t=6741, loss=0.038737695664167404\n", - "Surface training t=6742, loss=0.04240168444812298\n", - "Surface training t=6743, loss=0.038845526054501534\n", - "Surface training t=6744, loss=0.03815450705587864\n", - "Surface training t=6745, loss=0.04417004995048046\n", - "Surface training t=6746, loss=0.04459221288561821\n", - "Surface training t=6747, loss=0.048390889540314674\n", - "Surface training t=6748, loss=0.056356893852353096\n", - "Surface training t=6749, loss=0.05790188908576965\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=6750, loss=0.05680453963577747\n", - "Surface training t=6751, loss=0.05918400548398495\n", - "Surface training t=6752, loss=0.06044897437095642\n", - "Surface training t=6753, loss=0.05117589421570301\n", - "Surface training t=6754, loss=0.04509519226849079\n", - "Surface training t=6755, loss=0.0514133982360363\n", - "Surface training t=6756, loss=0.06894635036587715\n", - "Surface training t=6757, loss=0.0689119566231966\n", - "Surface training t=6758, loss=0.06560586765408516\n", - "Surface training t=6759, loss=0.05514034256339073\n", - "Surface training t=6760, loss=0.05582304298877716\n", - "Surface training t=6761, loss=0.04877492040395737\n", - "Surface training t=6762, loss=0.0577834527939558\n", - "Surface training t=6763, loss=0.047458846122026443\n", - "Surface training t=6764, loss=0.04555273987352848\n", - "Surface training t=6765, loss=0.046605708077549934\n", - "Surface training t=6766, loss=0.04774414375424385\n", - "Surface training t=6767, loss=0.04327121935784817\n", - "Surface training t=6768, loss=0.046833792701363564\n", - "Surface training t=6769, loss=0.039917461574077606\n", - "Surface training t=6770, loss=0.04767734743654728\n", - "Surface training t=6771, loss=0.05963779613375664\n", - "Surface training t=6772, loss=0.04274016432464123\n", - "Surface training t=6773, loss=0.04640737548470497\n", - "Surface training t=6774, loss=0.03927728906273842\n", - "Surface training t=6775, loss=0.035963039845228195\n", - "Surface training t=6776, loss=0.0374287124723196\n", - "Surface training t=6777, loss=0.056640833616256714\n", - "Surface training t=6778, loss=0.0331686045974493\n", - "Surface training t=6779, loss=0.04376171715557575\n", - "Surface training t=6780, loss=0.044779032468795776\n", - "Surface training t=6781, loss=0.03559113293886185\n", - "Surface training t=6782, loss=0.048077259212732315\n", - "Surface training t=6783, loss=0.06704745814204216\n", - "Surface training t=6784, loss=0.05272080563008785\n", - "Surface training t=6785, loss=0.056473542004823685\n", - "Surface training t=6786, loss=0.055515106767416\n", - "Surface training t=6787, loss=0.046075908467173576\n", - "Surface training t=6788, loss=0.04287556931376457\n", - "Surface training t=6789, loss=0.0464485976845026\n", - "Surface training t=6790, loss=0.040877221152186394\n", - "Surface training t=6791, loss=0.04558095335960388\n", - "Surface training t=6792, loss=0.054503509774804115\n", - "Surface training t=6793, loss=0.04613749124109745\n", - "Surface training t=6794, loss=0.043282195925712585\n", - "Surface training t=6795, loss=0.04671158269047737\n", - "Surface training t=6796, loss=0.04226808063685894\n", - "Surface training t=6797, loss=0.04041628912091255\n", - "Surface training t=6798, loss=0.037351710721850395\n", - "Surface training t=6799, loss=0.040903838351368904\n", - "Surface training t=6800, loss=0.036003848537802696\n", - "Surface training t=6801, loss=0.04405613988637924\n", - "Surface training t=6802, loss=0.03661175072193146\n", - "Surface training t=6803, loss=0.036004478111863136\n", - "Surface training t=6804, loss=0.04535919986665249\n", - "Surface training t=6805, loss=0.05008687265217304\n", - "Surface training t=6806, loss=0.06700899265706539\n", - "Surface training t=6807, loss=0.05739838071167469\n", - "Surface training t=6808, loss=0.061685504391789436\n", - "Surface training t=6809, loss=0.0459253154695034\n", - "Surface training t=6810, loss=0.06208847276866436\n", - "Surface training t=6811, loss=0.04403620958328247\n", - "Surface training t=6812, loss=0.06348088756203651\n", - "Surface training t=6813, loss=0.044483186677098274\n", - "Surface training t=6814, loss=0.06398420222103596\n", - "Surface training t=6815, loss=0.04313371703028679\n", - "Surface training t=6816, loss=0.044380323961377144\n", - "Surface training t=6817, loss=0.0652238205075264\n", - "Surface training t=6818, loss=0.04614422284066677\n", - "Surface training t=6819, loss=0.05383044481277466\n", - "Surface training t=6820, loss=0.05161551758646965\n", - "Surface training t=6821, loss=0.04263623431324959\n", - "Surface training t=6822, loss=0.05000704154372215\n", - "Surface training t=6823, loss=0.043023910373449326\n", - "Surface training t=6824, loss=0.036291858181357384\n", - "Surface training t=6825, loss=0.047978343442082405\n", - "Surface training t=6826, loss=0.05056559108197689\n", - "Surface training t=6827, loss=0.04474089294672012\n", - "Surface training t=6828, loss=0.0585990734398365\n", - "Surface training t=6829, loss=0.060390111058950424\n", - "Surface training t=6830, loss=0.05946025252342224\n", - "Surface training t=6831, loss=0.080683384090662\n", - "Surface training t=6832, loss=0.06415949203073978\n", - "Surface training t=6833, loss=0.08901065215468407\n", - "Surface training t=6834, loss=0.07837581634521484\n", - "Surface training t=6835, loss=0.06631389632821083\n", - "Surface training t=6836, loss=0.05135498382151127\n", - "Surface training t=6837, loss=0.0667949952185154\n", - "Surface training t=6838, loss=0.04666006751358509\n", - "Surface training t=6839, loss=0.05345660820603371\n", - "Surface training t=6840, loss=0.05863986536860466\n", - "Surface training t=6841, loss=0.0467084776610136\n", - "Surface training t=6842, loss=0.043893782421946526\n", - "Surface training t=6843, loss=0.04446075297892094\n", - "Surface training t=6844, loss=0.05492132902145386\n", - "Surface training t=6845, loss=0.048746854066848755\n", - "Surface training t=6846, loss=0.04601630941033363\n", - "Surface training t=6847, loss=0.05942690186202526\n", - "Surface training t=6848, loss=0.07638815604150295\n", - "Surface training t=6849, loss=0.08046985417604446\n", - "Surface training t=6850, loss=0.07203604467213154\n", - "Surface training t=6851, loss=0.07357585616409779\n", - "Surface training t=6852, loss=0.07306573167443275\n", - "Surface training t=6853, loss=0.0780811458826065\n", - "Surface training t=6854, loss=0.058193355798721313\n", - "Surface training t=6855, loss=0.06467016041278839\n", - "Surface training t=6856, loss=0.05361099913716316\n", - "Surface training t=6857, loss=0.05452089384198189\n", - "Surface training t=6858, loss=0.04865552298724651\n", - "Surface training t=6859, loss=0.04872431606054306\n", - "Surface training t=6860, loss=0.04388018324971199\n", - "Surface training t=6861, loss=0.04414182901382446\n", - "Surface training t=6862, loss=0.04128102771937847\n", - "Surface training t=6863, loss=0.047495003789663315\n", - "Surface training t=6864, loss=0.04968772642314434\n", - "Surface training t=6865, loss=0.03808435797691345\n", - "Surface training t=6866, loss=0.04217234253883362\n", - "Surface training t=6867, loss=0.0469952579587698\n", - "Surface training t=6868, loss=0.04181386157870293\n", - "Surface training t=6869, loss=0.05082413926720619\n", - "Surface training t=6870, loss=0.05454172007739544\n", - "Surface training t=6871, loss=0.037902144715189934\n", - "Surface training t=6872, loss=0.03406159020960331\n", - "Surface training t=6873, loss=0.03251019027084112\n", - "Surface training t=6874, loss=0.03940955176949501\n", - "Surface training t=6875, loss=0.04519432969391346\n", - "Surface training t=6876, loss=0.04573187045753002\n", - "Surface training t=6877, loss=0.05549873784184456\n", - "Surface training t=6878, loss=0.04976160079240799\n", - "Surface training t=6879, loss=0.0461676437407732\n", - "Surface training t=6880, loss=0.05690077319741249\n", - "Surface training t=6881, loss=0.04864482581615448\n", - "Surface training t=6882, loss=0.05327231623232365\n", - "Surface training t=6883, loss=0.05773543566465378\n", - "Surface training t=6884, loss=0.04059571586549282\n", - "Surface training t=6885, loss=0.04647260531783104\n", - "Surface training t=6886, loss=0.05500786565244198\n", - "Surface training t=6887, loss=0.04792735539376736\n", - "Surface training t=6888, loss=0.04920137859880924\n", - "Surface training t=6889, loss=0.041598519310355186\n", - "Surface training t=6890, loss=0.06241196021437645\n", - "Surface training t=6891, loss=0.04092787951231003\n", - "Surface training t=6892, loss=0.05727571249008179\n", - "Surface training t=6893, loss=0.08691614121198654\n", - "Surface training t=6894, loss=0.07121443748474121\n", - "Surface training t=6895, loss=0.06296319514513016\n", - "Surface training t=6896, loss=0.04631749354302883\n", - "Surface training t=6897, loss=0.045278485864400864\n", - "Surface training t=6898, loss=0.03554150462150574\n", - "Surface training t=6899, loss=0.04096223786473274\n", - "Surface training t=6900, loss=0.037617402151227\n", - "Surface training t=6901, loss=0.04956452362239361\n", - "Surface training t=6902, loss=0.04314143769443035\n", - "Surface training t=6903, loss=0.03950625844299793\n", - "Surface training t=6904, loss=0.03398263268172741\n", - "Surface training t=6905, loss=0.04276730306446552\n", - "Surface training t=6906, loss=0.032951321452856064\n", - "Surface training t=6907, loss=0.03027869574725628\n", - "Surface training t=6908, loss=0.047850579023361206\n", - "Surface training t=6909, loss=0.05983016453683376\n", - "Surface training t=6910, loss=0.07870491221547127\n", - "Surface training t=6911, loss=0.06842116266489029\n", - "Surface training t=6912, loss=0.050168320536613464\n", - "Surface training t=6913, loss=0.05028023570775986\n", - "Surface training t=6914, loss=0.03714112937450409\n", - "Surface training t=6915, loss=0.03356854245066643\n", - "Surface training t=6916, loss=0.041798802092671394\n", - "Surface training t=6917, loss=0.04278023540973663\n", - "Surface training t=6918, loss=0.0524508748203516\n", - "Surface training t=6919, loss=0.05844436399638653\n", - "Surface training t=6920, loss=0.049088047817349434\n", - "Surface training t=6921, loss=0.04553530551493168\n", - "Surface training t=6922, loss=0.05677792429924011\n", - "Surface training t=6923, loss=0.04421226494014263\n", - "Surface training t=6924, loss=0.048104315996170044\n", - "Surface training t=6925, loss=0.038429515436291695\n", - "Surface training t=6926, loss=0.052937911823391914\n", - "Surface training t=6927, loss=0.05012564733624458\n", - "Surface training t=6928, loss=0.03309015650302172\n", - "Surface training t=6929, loss=0.03798754699528217\n", - "Surface training t=6930, loss=0.04310588538646698\n", - "Surface training t=6931, loss=0.04568480886518955\n", - "Surface training t=6932, loss=0.06350985541939735\n", - "Surface training t=6933, loss=0.07523258961737156\n", - "Surface training t=6934, loss=0.0759334247559309\n", - "Surface training t=6935, loss=0.06735300272703171\n", - "Surface training t=6936, loss=0.07764855772256851\n", - "Surface training t=6937, loss=0.11895551532506943\n", - "Surface training t=6938, loss=0.08301113359630108\n", - "Surface training t=6939, loss=0.08875121921300888\n", - "Surface training t=6940, loss=0.14066891372203827\n", - "Surface training t=6941, loss=0.08922161906957626\n", - "Surface training t=6942, loss=0.13784771040081978\n", - "Surface training t=6943, loss=0.1337289810180664\n", - "Surface training t=6944, loss=0.09451157227158546\n", - "Surface training t=6945, loss=0.12528903782367706\n", - "Surface training t=6946, loss=0.08888543397188187\n", - "Surface training t=6947, loss=0.05664394050836563\n", - "Surface training t=6948, loss=0.059574030339717865\n", - "Surface training t=6949, loss=0.05792236328125\n", - "Surface training t=6950, loss=0.055765120312571526\n", - "Surface training t=6951, loss=0.061242032796144485\n", - "Surface training t=6952, loss=0.0437510721385479\n", - "Surface training t=6953, loss=0.03811192326247692\n", - "Surface training t=6954, loss=0.05270146392285824\n", - "Surface training t=6955, loss=0.04345439933240414\n", - "Surface training t=6956, loss=0.04389476962387562\n", - "Surface training t=6957, loss=0.03831322863698006\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=6958, loss=0.03997540660202503\n", - "Surface training t=6959, loss=0.034204003401100636\n", - "Surface training t=6960, loss=0.03698467090725899\n", - "Surface training t=6961, loss=0.031872011721134186\n", - "Surface training t=6962, loss=0.03813878819346428\n", - "Surface training t=6963, loss=0.039637716487050056\n", - "Surface training t=6964, loss=0.04164249636232853\n", - "Surface training t=6965, loss=0.03388249687850475\n", - "Surface training t=6966, loss=0.03253595717251301\n", - "Surface training t=6967, loss=0.0301474342122674\n", - "Surface training t=6968, loss=0.033372364938259125\n", - "Surface training t=6969, loss=0.0355071984231472\n", - "Surface training t=6970, loss=0.04580157622694969\n", - "Surface training t=6971, loss=0.05048590898513794\n", - "Surface training t=6972, loss=0.04722149111330509\n", - "Surface training t=6973, loss=0.04344245605170727\n", - "Surface training t=6974, loss=0.04829563573002815\n", - "Surface training t=6975, loss=0.04401862807571888\n", - "Surface training t=6976, loss=0.040138816460967064\n", - "Surface training t=6977, loss=0.055313291028141975\n", - "Surface training t=6978, loss=0.06723535433411598\n", - "Surface training t=6979, loss=0.04657880589365959\n", - "Surface training t=6980, loss=0.061288148164749146\n", - "Surface training t=6981, loss=0.0800996832549572\n", - "Surface training t=6982, loss=0.06388571299612522\n", - "Surface training t=6983, loss=0.04396831430494785\n", - "Surface training t=6984, loss=0.05065012164413929\n", - "Surface training t=6985, loss=0.054792214184999466\n", - "Surface training t=6986, loss=0.05250251665711403\n", - "Surface training t=6987, loss=0.05811113491654396\n", - "Surface training t=6988, loss=0.060188066214323044\n", - "Surface training t=6989, loss=0.05351139046251774\n", - "Surface training t=6990, loss=0.04550429619848728\n", - "Surface training t=6991, loss=0.050107309594750404\n", - "Surface training t=6992, loss=0.04762652702629566\n", - "Surface training t=6993, loss=0.04658833146095276\n", - "Surface training t=6994, loss=0.05637428164482117\n", - "Surface training t=6995, loss=0.04728624224662781\n", - "Surface training t=6996, loss=0.04837353900074959\n", - "Surface training t=6997, loss=0.045103706419467926\n", - "Surface training t=6998, loss=0.045534150674939156\n", - "Surface training t=6999, loss=0.048868633806705475\n", - "Surface training t=7000, loss=0.04848852567374706\n", - "Surface training t=7001, loss=0.0664604939520359\n", - "Surface training t=7002, loss=0.05591243505477905\n", - "Surface training t=7003, loss=0.06438840553164482\n", - "Surface training t=7004, loss=0.045081647112965584\n", - "Surface training t=7005, loss=0.043405866250395775\n", - "Surface training t=7006, loss=0.057374196127057076\n", - "Surface training t=7007, loss=0.05070004239678383\n", - "Surface training t=7008, loss=0.04897807724773884\n", - "Surface training t=7009, loss=0.040214983746409416\n", - "Surface training t=7010, loss=0.042447373270988464\n", - "Surface training t=7011, loss=0.055314742028713226\n", - "Surface training t=7012, loss=0.03490372281521559\n", - "Surface training t=7013, loss=0.04194132890552282\n", - "Surface training t=7014, loss=0.037951789796352386\n", - "Surface training t=7015, loss=0.03705426212400198\n", - "Surface training t=7016, loss=0.04478060454130173\n", - "Surface training t=7017, loss=0.04668711870908737\n", - "Surface training t=7018, loss=0.054271480068564415\n", - "Surface training t=7019, loss=0.047727204859256744\n", - "Surface training t=7020, loss=0.04009162820875645\n", - "Surface training t=7021, loss=0.04246188700199127\n", - "Surface training t=7022, loss=0.03959357179701328\n", - "Surface training t=7023, loss=0.036851778626441956\n", - "Surface training t=7024, loss=0.033850811421871185\n", - "Surface training t=7025, loss=0.037967806681990623\n", - "Surface training t=7026, loss=0.035714130848646164\n", - "Surface training t=7027, loss=0.04257109761238098\n", - "Surface training t=7028, loss=0.052366361021995544\n", - "Surface training t=7029, loss=0.046496505849063396\n", - "Surface training t=7030, loss=0.03487803228199482\n", - "Surface training t=7031, loss=0.054001759737730026\n", - "Surface training t=7032, loss=0.05401213467121124\n", - "Surface training t=7033, loss=0.04033865965902805\n", - "Surface training t=7034, loss=0.03868159931153059\n", - "Surface training t=7035, loss=0.051905686035752296\n", - "Surface training t=7036, loss=0.05914725922048092\n", - "Surface training t=7037, loss=0.06262314133346081\n", - "Surface training t=7038, loss=0.07568640634417534\n", - "Surface training t=7039, loss=0.04868934489786625\n", - "Surface training t=7040, loss=0.07549082487821579\n", - "Surface training t=7041, loss=0.06150026619434357\n", - "Surface training t=7042, loss=0.057531483471393585\n", - "Surface training t=7043, loss=0.06042981147766113\n", - "Surface training t=7044, loss=0.04628624953329563\n", - "Surface training t=7045, loss=0.0405756663531065\n", - "Surface training t=7046, loss=0.05176517739892006\n", - "Surface training t=7047, loss=0.042429523542523384\n", - "Surface training t=7048, loss=0.051529958844184875\n", - "Surface training t=7049, loss=0.0354914627969265\n", - "Surface training t=7050, loss=0.035198917612433434\n", - "Surface training t=7051, loss=0.048917366191744804\n", - "Surface training t=7052, loss=0.05241650342941284\n", - "Surface training t=7053, loss=0.051735954359173775\n", - "Surface training t=7054, loss=0.0559509489685297\n", - "Surface training t=7055, loss=0.05592021159827709\n", - "Surface training t=7056, loss=0.06451567448675632\n", - "Surface training t=7057, loss=0.09245526790618896\n", - "Surface training t=7058, loss=0.07085184007883072\n", - "Surface training t=7059, loss=0.07142772153019905\n", - "Surface training t=7060, loss=0.06018000841140747\n", - "Surface training t=7061, loss=0.06455237790942192\n", - "Surface training t=7062, loss=0.06983879953622818\n", - "Surface training t=7063, loss=0.0634433850646019\n", - "Surface training t=7064, loss=0.05453554168343544\n", - "Surface training t=7065, loss=0.04798227362334728\n", - "Surface training t=7066, loss=0.04847537353634834\n", - "Surface training t=7067, loss=0.036500247195363045\n", - "Surface training t=7068, loss=0.041340598836541176\n", - "Surface training t=7069, loss=0.0372626855969429\n", - "Surface training t=7070, loss=0.0369087178260088\n", - "Surface training t=7071, loss=0.04089837893843651\n", - "Surface training t=7072, loss=0.04048112779855728\n", - "Surface training t=7073, loss=0.040820034220814705\n", - "Surface training t=7074, loss=0.04475482739508152\n", - "Surface training t=7075, loss=0.05922120809555054\n", - "Surface training t=7076, loss=0.05658046156167984\n", - "Surface training t=7077, loss=0.05167556740343571\n", - "Surface training t=7078, loss=0.040950847789645195\n", - "Surface training t=7079, loss=0.042707448825240135\n", - "Surface training t=7080, loss=0.03648526594042778\n", - "Surface training t=7081, loss=0.05363054759800434\n", - "Surface training t=7082, loss=0.09422213956713676\n", - "Surface training t=7083, loss=0.0713215097784996\n", - "Surface training t=7084, loss=0.04961239732801914\n", - "Surface training t=7085, loss=0.0482251551002264\n", - "Surface training t=7086, loss=0.03582850471138954\n", - "Surface training t=7087, loss=0.03196105919778347\n", - "Surface training t=7088, loss=0.04036487638950348\n", - "Surface training t=7089, loss=0.036963216960430145\n", - "Surface training t=7090, loss=0.038987613283097744\n", - "Surface training t=7091, loss=0.03930506110191345\n", - "Surface training t=7092, loss=0.07809294760227203\n", - "Surface training t=7093, loss=0.05720623955130577\n", - "Surface training t=7094, loss=0.05699878931045532\n", - "Surface training t=7095, loss=0.0663883425295353\n", - "Surface training t=7096, loss=0.07160088047385216\n", - "Surface training t=7097, loss=0.06550346128642559\n", - "Surface training t=7098, loss=0.05266338586807251\n", - "Surface training t=7099, loss=0.07111354544758797\n", - "Surface training t=7100, loss=0.06068589724600315\n", - "Surface training t=7101, loss=0.061822980642318726\n", - "Surface training t=7102, loss=0.061978936195373535\n", - "Surface training t=7103, loss=0.07280857861042023\n", - "Surface training t=7104, loss=0.08351332694292068\n", - "Surface training t=7105, loss=0.10721263661980629\n", - "Surface training t=7106, loss=0.08636567369103432\n", - "Surface training t=7107, loss=0.09684111177921295\n", - "Surface training t=7108, loss=0.13722093403339386\n", - "Surface training t=7109, loss=0.08216702938079834\n", - "Surface training t=7110, loss=0.10867717489600182\n", - "Surface training t=7111, loss=0.10603096708655357\n", - "Surface training t=7112, loss=0.0767677053809166\n", - "Surface training t=7113, loss=0.06865807808935642\n", - "Surface training t=7114, loss=0.09182056039571762\n", - "Surface training t=7115, loss=0.062089256942272186\n", - "Surface training t=7116, loss=0.03941132500767708\n", - "Surface training t=7117, loss=0.06573152914643288\n", - "Surface training t=7118, loss=0.05673911049962044\n", - "Surface training t=7119, loss=0.0736789908260107\n", - "Surface training t=7120, loss=0.051700010895729065\n", - "Surface training t=7121, loss=0.05016983486711979\n", - "Surface training t=7122, loss=0.051413895562291145\n", - "Surface training t=7123, loss=0.05204831622540951\n", - "Surface training t=7124, loss=0.08126235753297806\n", - "Surface training t=7125, loss=0.059480585157871246\n", - "Surface training t=7126, loss=0.05189269781112671\n", - "Surface training t=7127, loss=0.040177807211875916\n", - "Surface training t=7128, loss=0.043223362416028976\n", - "Surface training t=7129, loss=0.0406574010848999\n", - "Surface training t=7130, loss=0.041750673204660416\n", - "Surface training t=7131, loss=0.03755274601280689\n", - "Surface training t=7132, loss=0.039860038086771965\n", - "Surface training t=7133, loss=0.04731777682900429\n", - "Surface training t=7134, loss=0.0412710327655077\n", - "Surface training t=7135, loss=0.07073646411299706\n", - "Surface training t=7136, loss=0.05228705704212189\n", - "Surface training t=7137, loss=0.056125376373529434\n", - "Surface training t=7138, loss=0.06268316879868507\n", - "Surface training t=7139, loss=0.05227356217801571\n", - "Surface training t=7140, loss=0.05511193722486496\n", - "Surface training t=7141, loss=0.058553777635097504\n", - "Surface training t=7142, loss=0.05965312570333481\n", - "Surface training t=7143, loss=0.06734750792384148\n", - "Surface training t=7144, loss=0.05804984085261822\n", - "Surface training t=7145, loss=0.06587477959692478\n", - "Surface training t=7146, loss=0.07383739948272705\n", - "Surface training t=7147, loss=0.06601291336119175\n", - "Surface training t=7148, loss=0.07027094811201096\n", - "Surface training t=7149, loss=0.050738418474793434\n", - "Surface training t=7150, loss=0.05840061604976654\n", - "Surface training t=7151, loss=0.05488130450248718\n", - "Surface training t=7152, loss=0.04034564457833767\n", - "Surface training t=7153, loss=0.039345771074295044\n", - "Surface training t=7154, loss=0.04365925118327141\n", - "Surface training t=7155, loss=0.044206712394952774\n", - "Surface training t=7156, loss=0.03151384275406599\n", - "Surface training t=7157, loss=0.03042173571884632\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=7158, loss=0.039857013151049614\n", - "Surface training t=7159, loss=0.03572567366063595\n", - "Surface training t=7160, loss=0.040665747597813606\n", - "Surface training t=7161, loss=0.04028290510177612\n", - "Surface training t=7162, loss=0.0505054946988821\n", - "Surface training t=7163, loss=0.048755815252661705\n", - "Surface training t=7164, loss=0.04793514311313629\n", - "Surface training t=7165, loss=0.05106751434504986\n", - "Surface training t=7166, loss=0.06215151026844978\n", - "Surface training t=7167, loss=0.05996991507709026\n", - "Surface training t=7168, loss=0.05185800604522228\n", - "Surface training t=7169, loss=0.05069417878985405\n", - "Surface training t=7170, loss=0.05913340672850609\n", - "Surface training t=7171, loss=0.05077727138996124\n", - "Surface training t=7172, loss=0.045458871871232986\n", - "Surface training t=7173, loss=0.0415475033223629\n", - "Surface training t=7174, loss=0.048806801438331604\n", - "Surface training t=7175, loss=0.056321464478969574\n", - "Surface training t=7176, loss=0.0650075227022171\n", - "Surface training t=7177, loss=0.045565878972411156\n", - "Surface training t=7178, loss=0.051630569621920586\n", - "Surface training t=7179, loss=0.044037384912371635\n", - "Surface training t=7180, loss=0.05129790119826794\n", - "Surface training t=7181, loss=0.04607277922332287\n", - "Surface training t=7182, loss=0.03809959627687931\n", - "Surface training t=7183, loss=0.04043198935687542\n", - "Surface training t=7184, loss=0.038306949660182\n", - "Surface training t=7185, loss=0.03442291542887688\n", - "Surface training t=7186, loss=0.031828299164772034\n", - "Surface training t=7187, loss=0.02990327589213848\n", - "Surface training t=7188, loss=0.032504575327038765\n", - "Surface training t=7189, loss=0.028803851455450058\n", - "Surface training t=7190, loss=0.030367029830813408\n", - "Surface training t=7191, loss=0.03075243253260851\n", - "Surface training t=7192, loss=0.03052990324795246\n", - "Surface training t=7193, loss=0.0306731341406703\n", - "Surface training t=7194, loss=0.029296468012034893\n", - "Surface training t=7195, loss=0.03430876322090626\n", - "Surface training t=7196, loss=0.02862386591732502\n", - "Surface training t=7197, loss=0.03502068668603897\n", - "Surface training t=7198, loss=0.04083430767059326\n", - "Surface training t=7199, loss=0.031339993700385094\n", - "Surface training t=7200, loss=0.03416942246258259\n", - "Surface training t=7201, loss=0.041053393855690956\n", - "Surface training t=7202, loss=0.04989922232925892\n", - "Surface training t=7203, loss=0.05134611018002033\n", - "Surface training t=7204, loss=0.04259142652153969\n", - "Surface training t=7205, loss=0.038433799520134926\n", - "Surface training t=7206, loss=0.03887411579489708\n", - "Surface training t=7207, loss=0.04022935591638088\n", - "Surface training t=7208, loss=0.06058656610548496\n", - "Surface training t=7209, loss=0.05507960729300976\n", - "Surface training t=7210, loss=0.08068656548857689\n", - "Surface training t=7211, loss=0.07229074835777283\n", - "Surface training t=7212, loss=0.08494248613715172\n", - "Surface training t=7213, loss=0.10253705829381943\n", - "Surface training t=7214, loss=0.07342435419559479\n", - "Surface training t=7215, loss=0.06019164063036442\n", - "Surface training t=7216, loss=0.06438025459647179\n", - "Surface training t=7217, loss=0.11737504601478577\n", - "Surface training t=7218, loss=0.08608995378017426\n", - "Surface training t=7219, loss=0.06716455519199371\n", - "Surface training t=7220, loss=0.08077678456902504\n", - "Surface training t=7221, loss=0.051128214225172997\n", - "Surface training t=7222, loss=0.030716054141521454\n", - "Surface training t=7223, loss=0.029754476621747017\n", - "Surface training t=7224, loss=0.03209080919623375\n", - "Surface training t=7225, loss=0.031027061864733696\n", - "Surface training t=7226, loss=0.041748590767383575\n", - "Surface training t=7227, loss=0.03607637621462345\n", - "Surface training t=7228, loss=0.03602680750191212\n", - "Surface training t=7229, loss=0.03988860920071602\n", - "Surface training t=7230, loss=0.03851282224059105\n", - "Surface training t=7231, loss=0.03772715199738741\n", - "Surface training t=7232, loss=0.03112609125673771\n", - "Surface training t=7233, loss=0.03516828268766403\n", - "Surface training t=7234, loss=0.037912722676992416\n", - "Surface training t=7235, loss=0.039347209967672825\n", - "Surface training t=7236, loss=0.0399656742811203\n", - "Surface training t=7237, loss=0.061206622049212456\n", - "Surface training t=7238, loss=0.051515666767954826\n", - "Surface training t=7239, loss=0.04612361639738083\n", - "Surface training t=7240, loss=0.05402091704308987\n", - "Surface training t=7241, loss=0.0548968892544508\n", - "Surface training t=7242, loss=0.04918750748038292\n", - "Surface training t=7243, loss=0.04948662593960762\n", - "Surface training t=7244, loss=0.059536054730415344\n", - "Surface training t=7245, loss=0.04678059369325638\n", - "Surface training t=7246, loss=0.04667394421994686\n", - "Surface training t=7247, loss=0.0452923309057951\n", - "Surface training t=7248, loss=0.04823669418692589\n", - "Surface training t=7249, loss=0.04702278599143028\n", - "Surface training t=7250, loss=0.05943410098552704\n", - "Surface training t=7251, loss=0.057311199605464935\n", - "Surface training t=7252, loss=0.06057071313261986\n", - "Surface training t=7253, loss=0.055954042822122574\n", - "Surface training t=7254, loss=0.058420563116669655\n", - "Surface training t=7255, loss=0.05647539533674717\n", - "Surface training t=7256, loss=0.054408276453614235\n", - "Surface training t=7257, loss=0.05369636043906212\n", - "Surface training t=7258, loss=0.039917788468301296\n", - "Surface training t=7259, loss=0.046098385006189346\n", - "Surface training t=7260, loss=0.04295805282890797\n", - "Surface training t=7261, loss=0.03004782646894455\n", - "Surface training t=7262, loss=0.035987209528684616\n", - "Surface training t=7263, loss=0.043795064091682434\n", - "Surface training t=7264, loss=0.04168171435594559\n", - "Surface training t=7265, loss=0.03433569520711899\n", - "Surface training t=7266, loss=0.042218420654535294\n", - "Surface training t=7267, loss=0.04611283168196678\n", - "Surface training t=7268, loss=0.04265117831528187\n", - "Surface training t=7269, loss=0.045208364725112915\n", - "Surface training t=7270, loss=0.05116444639861584\n", - "Surface training t=7271, loss=0.04480055719614029\n", - "Surface training t=7272, loss=0.05335213243961334\n", - "Surface training t=7273, loss=0.04403471015393734\n", - "Surface training t=7274, loss=0.048774879425764084\n", - "Surface training t=7275, loss=0.041630446910858154\n", - "Surface training t=7276, loss=0.04087973013520241\n", - "Surface training t=7277, loss=0.046663250774145126\n", - "Surface training t=7278, loss=0.03642917890101671\n", - "Surface training t=7279, loss=0.034499457105994225\n", - "Surface training t=7280, loss=0.035346854478120804\n", - "Surface training t=7281, loss=0.03687486983835697\n", - "Surface training t=7282, loss=0.04329502955079079\n", - "Surface training t=7283, loss=0.0457515399903059\n", - "Surface training t=7284, loss=0.06177694350481033\n", - "Surface training t=7285, loss=0.06263291463255882\n", - "Surface training t=7286, loss=0.044779665768146515\n", - "Surface training t=7287, loss=0.050052838400006294\n", - "Surface training t=7288, loss=0.03682264685630798\n", - "Surface training t=7289, loss=0.029738757759332657\n", - "Surface training t=7290, loss=0.03436045814305544\n", - "Surface training t=7291, loss=0.03266321960836649\n", - "Surface training t=7292, loss=0.0336856571957469\n", - "Surface training t=7293, loss=0.03399818390607834\n", - "Surface training t=7294, loss=0.0459832064807415\n", - "Surface training t=7295, loss=0.04207763075828552\n", - "Surface training t=7296, loss=0.04379417560994625\n", - "Surface training t=7297, loss=0.03864976670593023\n", - "Surface training t=7298, loss=0.03722698241472244\n", - "Surface training t=7299, loss=0.04009758494794369\n", - "Surface training t=7300, loss=0.040075041353702545\n", - "Surface training t=7301, loss=0.05620561167597771\n", - "Surface training t=7302, loss=0.03543802723288536\n", - "Surface training t=7303, loss=0.038912251591682434\n", - "Surface training t=7304, loss=0.04414434731006622\n", - "Surface training t=7305, loss=0.04437902942299843\n", - "Surface training t=7306, loss=0.04505426995456219\n", - "Surface training t=7307, loss=0.04800271987915039\n", - "Surface training t=7308, loss=0.08489679917693138\n", - "Surface training t=7309, loss=0.06144098564982414\n", - "Surface training t=7310, loss=0.05385489948093891\n", - "Surface training t=7311, loss=0.0453768577426672\n", - "Surface training t=7312, loss=0.0452869301661849\n", - "Surface training t=7313, loss=0.044789137318730354\n", - "Surface training t=7314, loss=0.03858406748622656\n", - "Surface training t=7315, loss=0.04460859298706055\n", - "Surface training t=7316, loss=0.041510699316859245\n", - "Surface training t=7317, loss=0.03526029363274574\n", - "Surface training t=7318, loss=0.031813813373446465\n", - "Surface training t=7319, loss=0.0313197011128068\n", - "Surface training t=7320, loss=0.025967574678361416\n", - "Surface training t=7321, loss=0.034007478505373\n", - "Surface training t=7322, loss=0.03937148116528988\n", - "Surface training t=7323, loss=0.053195495158433914\n", - "Surface training t=7324, loss=0.05808829702436924\n", - "Surface training t=7325, loss=0.04077646695077419\n", - "Surface training t=7326, loss=0.04642999917268753\n", - "Surface training t=7327, loss=0.04560563154518604\n", - "Surface training t=7328, loss=0.06388677656650543\n", - "Surface training t=7329, loss=0.062250712886452675\n", - "Surface training t=7330, loss=0.04893004149198532\n", - "Surface training t=7331, loss=0.04275653511285782\n", - "Surface training t=7332, loss=0.03735750261694193\n", - "Surface training t=7333, loss=0.04314216785132885\n", - "Surface training t=7334, loss=0.04536319151520729\n", - "Surface training t=7335, loss=0.041284605860710144\n", - "Surface training t=7336, loss=0.0477136243134737\n", - "Surface training t=7337, loss=0.034659769386053085\n", - "Surface training t=7338, loss=0.04086299426853657\n", - "Surface training t=7339, loss=0.03516193013638258\n", - "Surface training t=7340, loss=0.03398312535136938\n", - "Surface training t=7341, loss=0.034129454754292965\n", - "Surface training t=7342, loss=0.03414144180715084\n", - "Surface training t=7343, loss=0.026229905895888805\n", - "Surface training t=7344, loss=0.039219750091433525\n", - "Surface training t=7345, loss=0.030800403095781803\n", - "Surface training t=7346, loss=0.03049255721271038\n", - "Surface training t=7347, loss=0.03959307819604874\n", - "Surface training t=7348, loss=0.03100438602268696\n", - "Surface training t=7349, loss=0.03648771718144417\n", - "Surface training t=7350, loss=0.03173085115849972\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=7351, loss=0.03671680763363838\n", - "Surface training t=7352, loss=0.04072875529527664\n", - "Surface training t=7353, loss=0.06944437138736248\n", - "Surface training t=7354, loss=0.06746687740087509\n", - "Surface training t=7355, loss=0.05263223499059677\n", - "Surface training t=7356, loss=0.060009174048900604\n", - "Surface training t=7357, loss=0.066951435059309\n", - "Surface training t=7358, loss=0.05759923905134201\n", - "Surface training t=7359, loss=0.0676748976111412\n", - "Surface training t=7360, loss=0.054337095469236374\n", - "Surface training t=7361, loss=0.05804967321455479\n", - "Surface training t=7362, loss=0.06469338946044445\n", - "Surface training t=7363, loss=0.05915950797498226\n", - "Surface training t=7364, loss=0.049072328954935074\n", - "Surface training t=7365, loss=0.0506149735301733\n", - "Surface training t=7366, loss=0.05781533569097519\n", - "Surface training t=7367, loss=0.07533631101250648\n", - "Surface training t=7368, loss=0.06476636230945587\n", - "Surface training t=7369, loss=0.04850519262254238\n", - "Surface training t=7370, loss=0.04039045795798302\n", - "Surface training t=7371, loss=0.06628168746829033\n", - "Surface training t=7372, loss=0.04881373420357704\n", - "Surface training t=7373, loss=0.05198846571147442\n", - "Surface training t=7374, loss=0.03482411429286003\n", - "Surface training t=7375, loss=0.04541521146893501\n", - "Surface training t=7376, loss=0.05921219103038311\n", - "Surface training t=7377, loss=0.054861798882484436\n", - "Surface training t=7378, loss=0.0463736392557621\n", - "Surface training t=7379, loss=0.05057297646999359\n", - "Surface training t=7380, loss=0.033275676891207695\n", - "Surface training t=7381, loss=0.03789830766618252\n", - "Surface training t=7382, loss=0.029636387713253498\n", - "Surface training t=7383, loss=0.037747275084257126\n", - "Surface training t=7384, loss=0.042462464421987534\n", - "Surface training t=7385, loss=0.039325025863945484\n", - "Surface training t=7386, loss=0.042132679373025894\n", - "Surface training t=7387, loss=0.057777369394898415\n", - "Surface training t=7388, loss=0.04456315375864506\n", - "Surface training t=7389, loss=0.04030921496450901\n", - "Surface training t=7390, loss=0.04104047082364559\n", - "Surface training t=7391, loss=0.029839816503226757\n", - "Surface training t=7392, loss=0.03453825041651726\n", - "Surface training t=7393, loss=0.03517352603375912\n", - "Surface training t=7394, loss=0.05802213400602341\n", - "Surface training t=7395, loss=0.03871791251003742\n", - "Surface training t=7396, loss=0.035699816420674324\n", - "Surface training t=7397, loss=0.03971487656235695\n", - "Surface training t=7398, loss=0.048043590039014816\n", - "Surface training t=7399, loss=0.04184835962951183\n", - "Surface training t=7400, loss=0.03621280565857887\n", - "Surface training t=7401, loss=0.03575399238616228\n", - "Surface training t=7402, loss=0.038933174684643745\n", - "Surface training t=7403, loss=0.06778491847217083\n", - "Surface training t=7404, loss=0.047991929575800896\n", - "Surface training t=7405, loss=0.06004432588815689\n", - "Surface training t=7406, loss=0.03773492015898228\n", - "Surface training t=7407, loss=0.04335523210465908\n", - "Surface training t=7408, loss=0.0649011917412281\n", - "Surface training t=7409, loss=0.049643728882074356\n", - "Surface training t=7410, loss=0.044086722657084465\n", - "Surface training t=7411, loss=0.04305637255311012\n", - "Surface training t=7412, loss=0.04184269905090332\n", - "Surface training t=7413, loss=0.06036017835140228\n", - "Surface training t=7414, loss=0.04746948927640915\n", - "Surface training t=7415, loss=0.04683096893131733\n", - "Surface training t=7416, loss=0.05140865221619606\n", - "Surface training t=7417, loss=0.03620337415486574\n", - "Surface training t=7418, loss=0.029772842302918434\n", - "Surface training t=7419, loss=0.03090868517756462\n", - "Surface training t=7420, loss=0.030807497911155224\n", - "Surface training t=7421, loss=0.030799285508692265\n", - "Surface training t=7422, loss=0.034924207255244255\n", - "Surface training t=7423, loss=0.03096597082912922\n", - "Surface training t=7424, loss=0.03543272241950035\n", - "Surface training t=7425, loss=0.03976088762283325\n", - "Surface training t=7426, loss=0.04875587858259678\n", - "Surface training t=7427, loss=0.048669399693608284\n", - "Surface training t=7428, loss=0.04548405483365059\n", - "Surface training t=7429, loss=0.05272074230015278\n", - "Surface training t=7430, loss=0.054474152624607086\n", - "Surface training t=7431, loss=0.05576740577816963\n", - "Surface training t=7432, loss=0.052898915484547615\n", - "Surface training t=7433, loss=0.04628100246191025\n", - "Surface training t=7434, loss=0.05547080747783184\n", - "Surface training t=7435, loss=0.04898780770599842\n", - "Surface training t=7436, loss=0.03685707878321409\n", - "Surface training t=7437, loss=0.039172058925032616\n", - "Surface training t=7438, loss=0.04344281926751137\n", - "Surface training t=7439, loss=0.032801808789372444\n", - "Surface training t=7440, loss=0.039515700191259384\n", - "Surface training t=7441, loss=0.029180568642914295\n", - "Surface training t=7442, loss=0.037869371473789215\n", - "Surface training t=7443, loss=0.03347174450755119\n", - "Surface training t=7444, loss=0.03777723014354706\n", - "Surface training t=7445, loss=0.03045985195785761\n", - "Surface training t=7446, loss=0.037322286516427994\n", - "Surface training t=7447, loss=0.0375575739890337\n", - "Surface training t=7448, loss=0.03609997592866421\n", - "Surface training t=7449, loss=0.038944780826568604\n", - "Surface training t=7450, loss=0.029687009751796722\n", - "Surface training t=7451, loss=0.03286115080118179\n", - "Surface training t=7452, loss=0.03195726126432419\n", - "Surface training t=7453, loss=0.02852684911340475\n", - "Surface training t=7454, loss=0.028599812649190426\n", - "Surface training t=7455, loss=0.03421599045395851\n", - "Surface training t=7456, loss=0.032693736255168915\n", - "Surface training t=7457, loss=0.031242238357663155\n", - "Surface training t=7458, loss=0.02824427653104067\n", - "Surface training t=7459, loss=0.034063566476106644\n", - "Surface training t=7460, loss=0.027656007558107376\n", - "Surface training t=7461, loss=0.03658691234886646\n", - "Surface training t=7462, loss=0.049196431413292885\n", - "Surface training t=7463, loss=0.04338075965642929\n", - "Surface training t=7464, loss=0.04266495443880558\n", - "Surface training t=7465, loss=0.04858998954296112\n", - "Surface training t=7466, loss=0.03862902894616127\n", - "Surface training t=7467, loss=0.03585305064916611\n", - "Surface training t=7468, loss=0.05321606248617172\n", - "Surface training t=7469, loss=0.03464031405746937\n", - "Surface training t=7470, loss=0.04184821434319019\n", - "Surface training t=7471, loss=0.06081654131412506\n", - "Surface training t=7472, loss=0.07351404055953026\n", - "Surface training t=7473, loss=0.06578400731086731\n", - "Surface training t=7474, loss=0.05962267518043518\n", - "Surface training t=7475, loss=0.061561159789562225\n", - "Surface training t=7476, loss=0.05514015629887581\n", - "Surface training t=7477, loss=0.05050152353942394\n", - "Surface training t=7478, loss=0.051041459664702415\n", - "Surface training t=7479, loss=0.08813750371336937\n", - "Surface training t=7480, loss=0.07256461679935455\n", - "Surface training t=7481, loss=0.06406118534505367\n", - "Surface training t=7482, loss=0.06385402381420135\n", - "Surface training t=7483, loss=0.052906718105077744\n", - "Surface training t=7484, loss=0.044161705300211906\n", - "Surface training t=7485, loss=0.03581900242716074\n", - "Surface training t=7486, loss=0.03252662159502506\n", - "Surface training t=7487, loss=0.034427862614393234\n", - "Surface training t=7488, loss=0.0342786330729723\n", - "Surface training t=7489, loss=0.03526666387915611\n", - "Surface training t=7490, loss=0.030898060649633408\n", - "Surface training t=7491, loss=0.037950363010168076\n", - "Surface training t=7492, loss=0.0407226737588644\n", - "Surface training t=7493, loss=0.04063343070447445\n", - "Surface training t=7494, loss=0.03808092325925827\n", - "Surface training t=7495, loss=0.044963590800762177\n", - "Surface training t=7496, loss=0.03637799248099327\n", - "Surface training t=7497, loss=0.03077127132564783\n", - "Surface training t=7498, loss=0.03393091633915901\n", - "Surface training t=7499, loss=0.03601200133562088\n", - "Surface training t=7500, loss=0.038737816736102104\n", - "Surface training t=7501, loss=0.034615283831954\n", - "Surface training t=7502, loss=0.03033850435167551\n", - "Surface training t=7503, loss=0.031088308431208134\n", - "Surface training t=7504, loss=0.041657593101263046\n", - "Surface training t=7505, loss=0.04236241430044174\n", - "Surface training t=7506, loss=0.0437142476439476\n", - "Surface training t=7507, loss=0.048364898189902306\n", - "Surface training t=7508, loss=0.038565490394830704\n", - "Surface training t=7509, loss=0.029024064540863037\n", - "Surface training t=7510, loss=0.026325291022658348\n", - "Surface training t=7511, loss=0.034270090982317924\n", - "Surface training t=7512, loss=0.03356366604566574\n", - "Surface training t=7513, loss=0.02822686918079853\n", - "Surface training t=7514, loss=0.02851589396595955\n", - "Surface training t=7515, loss=0.03705216199159622\n", - "Surface training t=7516, loss=0.050111254677176476\n", - "Surface training t=7517, loss=0.0891575962305069\n", - "Surface training t=7518, loss=0.0518866702914238\n", - "Surface training t=7519, loss=0.038322778418660164\n", - "Surface training t=7520, loss=0.05388876982033253\n", - "Surface training t=7521, loss=0.042890775948762894\n", - "Surface training t=7522, loss=0.041861530393362045\n", - "Surface training t=7523, loss=0.060737401247024536\n", - "Surface training t=7524, loss=0.08302588760852814\n", - "Surface training t=7525, loss=0.05256679654121399\n", - "Surface training t=7526, loss=0.053705526515841484\n", - "Surface training t=7527, loss=0.05834941193461418\n", - "Surface training t=7528, loss=0.058547716587781906\n", - "Surface training t=7529, loss=0.05297193489968777\n", - "Surface training t=7530, loss=0.0481996014714241\n", - "Surface training t=7531, loss=0.05045383237302303\n", - "Surface training t=7532, loss=0.04258144833147526\n", - "Surface training t=7533, loss=0.06704448908567429\n", - "Surface training t=7534, loss=0.04938216879963875\n", - "Surface training t=7535, loss=0.052327705547213554\n", - "Surface training t=7536, loss=0.0472454447299242\n", - "Surface training t=7537, loss=0.04262588918209076\n", - "Surface training t=7538, loss=0.04546160437166691\n", - "Surface training t=7539, loss=0.04186227545142174\n", - "Surface training t=7540, loss=0.03510771691799164\n", - "Surface training t=7541, loss=0.04571804031729698\n", - "Surface training t=7542, loss=0.04745199717581272\n", - "Surface training t=7543, loss=0.03459430951625109\n", - "Surface training t=7544, loss=0.03655741270631552\n", - "Surface training t=7545, loss=0.043675534427165985\n", - "Surface training t=7546, loss=0.036305719055235386\n", - "Surface training t=7547, loss=0.04190008528530598\n", - "Surface training t=7548, loss=0.05985068529844284\n", - "Surface training t=7549, loss=0.05143946409225464\n", - "Surface training t=7550, loss=0.04784316197037697\n", - "Surface training t=7551, loss=0.03772587701678276\n", - "Surface training t=7552, loss=0.03468663804233074\n", - "Surface training t=7553, loss=0.03377645090222359\n", - "Surface training t=7554, loss=0.034146648831665516\n", - "Surface training t=7555, loss=0.03426423668861389\n", - "Surface training t=7556, loss=0.03968526050448418\n", - "Surface training t=7557, loss=0.03390557877719402\n", - "Surface training t=7558, loss=0.03263009246438742\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=7559, loss=0.03752314858138561\n", - "Surface training t=7560, loss=0.03702545911073685\n", - "Surface training t=7561, loss=0.07589799910783768\n", - "Surface training t=7562, loss=0.07456863671541214\n", - "Surface training t=7563, loss=0.03838946111500263\n", - "Surface training t=7564, loss=0.056856902316212654\n", - "Surface training t=7565, loss=0.03406102769076824\n", - "Surface training t=7566, loss=0.028543181717395782\n", - "Surface training t=7567, loss=0.0268198074772954\n", - "Surface training t=7568, loss=0.029810383915901184\n", - "Surface training t=7569, loss=0.033697815611958504\n", - "Surface training t=7570, loss=0.041385771706700325\n", - "Surface training t=7571, loss=0.10010123625397682\n", - "Surface training t=7572, loss=0.07220340333878994\n", - "Surface training t=7573, loss=0.06700139120221138\n", - "Surface training t=7574, loss=0.07772399485111237\n", - "Surface training t=7575, loss=0.12788764387369156\n", - "Surface training t=7576, loss=0.08751999959349632\n", - "Surface training t=7577, loss=0.09235880337655544\n", - "Surface training t=7578, loss=0.15938981622457504\n", - "Surface training t=7579, loss=0.08451230637729168\n", - "Surface training t=7580, loss=0.08076842501759529\n", - "Surface training t=7581, loss=0.09834415465593338\n", - "Surface training t=7582, loss=0.06361731700599194\n", - "Surface training t=7583, loss=0.06339052692055702\n", - "Surface training t=7584, loss=0.05483348108828068\n", - "Surface training t=7585, loss=0.07852288335561752\n", - "Surface training t=7586, loss=0.06978873908519745\n", - "Surface training t=7587, loss=0.0599615853279829\n", - "Surface training t=7588, loss=0.052545543760061264\n", - "Surface training t=7589, loss=0.05464196391403675\n", - "Surface training t=7590, loss=0.06557810679078102\n", - "Surface training t=7591, loss=0.04935033153742552\n", - "Surface training t=7592, loss=0.09696481376886368\n", - "Surface training t=7593, loss=0.07917047291994095\n", - "Surface training t=7594, loss=0.07618135213851929\n", - "Surface training t=7595, loss=0.08835884928703308\n", - "Surface training t=7596, loss=0.08473522216081619\n", - "Surface training t=7597, loss=0.07096003741025925\n", - "Surface training t=7598, loss=0.05157754197716713\n", - "Surface training t=7599, loss=0.06225414015352726\n", - "Surface training t=7600, loss=0.05736037530004978\n", - "Surface training t=7601, loss=0.03200187906622887\n", - "Surface training t=7602, loss=0.03961889259517193\n", - "Surface training t=7603, loss=0.04790743254125118\n", - "Surface training t=7604, loss=0.03578758426010609\n", - "Surface training t=7605, loss=0.037265436723828316\n", - "Surface training t=7606, loss=0.04264191724359989\n", - "Surface training t=7607, loss=0.02944291476160288\n", - "Surface training t=7608, loss=0.04120045155286789\n", - "Surface training t=7609, loss=0.04302643798291683\n", - "Surface training t=7610, loss=0.0573339257389307\n", - "Surface training t=7611, loss=0.07543119788169861\n", - "Surface training t=7612, loss=0.06301204115152359\n", - "Surface training t=7613, loss=0.06826283968985081\n", - "Surface training t=7614, loss=0.07709852233529091\n", - "Surface training t=7615, loss=0.08169586211442947\n", - "Surface training t=7616, loss=0.05044344626367092\n", - "Surface training t=7617, loss=0.04172699339687824\n", - "Surface training t=7618, loss=0.0631477553397417\n", - "Surface training t=7619, loss=0.057759156450629234\n", - "Surface training t=7620, loss=0.07272007316350937\n", - "Surface training t=7621, loss=0.051986195147037506\n", - "Surface training t=7622, loss=0.053754089400172234\n", - "Surface training t=7623, loss=0.04569860175251961\n", - "Surface training t=7624, loss=0.04189678467810154\n", - "Surface training t=7625, loss=0.04523515701293945\n", - "Surface training t=7626, loss=0.04945642873644829\n", - "Surface training t=7627, loss=0.03920803498476744\n", - "Surface training t=7628, loss=0.032468751072883606\n", - "Surface training t=7629, loss=0.040448760613799095\n", - "Surface training t=7630, loss=0.040479013696312904\n", - "Surface training t=7631, loss=0.043408339843153954\n", - "Surface training t=7632, loss=0.039026036858558655\n", - "Surface training t=7633, loss=0.03653192427009344\n", - "Surface training t=7634, loss=0.06476502865552902\n", - "Surface training t=7635, loss=0.0506687443703413\n", - "Surface training t=7636, loss=0.03952830098569393\n", - "Surface training t=7637, loss=0.035808585584163666\n", - "Surface training t=7638, loss=0.06157594546675682\n", - "Surface training t=7639, loss=0.0672038160264492\n", - "Surface training t=7640, loss=0.042527180165052414\n", - "Surface training t=7641, loss=0.03912721574306488\n", - "Surface training t=7642, loss=0.05591170862317085\n", - "Surface training t=7643, loss=0.04919377528131008\n", - "Surface training t=7644, loss=0.07520762458443642\n", - "Surface training t=7645, loss=0.050753965973854065\n", - "Surface training t=7646, loss=0.07267997413873672\n", - "Surface training t=7647, loss=0.07116981409490108\n", - "Surface training t=7648, loss=0.06527739576995373\n", - "Surface training t=7649, loss=0.054595598950982094\n", - "Surface training t=7650, loss=0.05396423488855362\n", - "Surface training t=7651, loss=0.05182906799018383\n", - "Surface training t=7652, loss=0.05988828092813492\n", - "Surface training t=7653, loss=0.06922803819179535\n", - "Surface training t=7654, loss=0.07170534506440163\n", - "Surface training t=7655, loss=0.053435822017490864\n", - "Surface training t=7656, loss=0.049546822905540466\n", - "Surface training t=7657, loss=0.053628019988536835\n", - "Surface training t=7658, loss=0.045185770839452744\n", - "Surface training t=7659, loss=0.04871520213782787\n", - "Surface training t=7660, loss=0.04466833360493183\n", - "Surface training t=7661, loss=0.04132500384002924\n", - "Surface training t=7662, loss=0.043588828295469284\n", - "Surface training t=7663, loss=0.052794527262449265\n", - "Surface training t=7664, loss=0.059305308386683464\n", - "Surface training t=7665, loss=0.05832100100815296\n", - "Surface training t=7666, loss=0.07816562056541443\n", - "Surface training t=7667, loss=0.061951540410518646\n", - "Surface training t=7668, loss=0.03904908522963524\n", - "Surface training t=7669, loss=0.03921642154455185\n", - "Surface training t=7670, loss=0.0423043817281723\n", - "Surface training t=7671, loss=0.03598523698747158\n", - "Surface training t=7672, loss=0.03379359468817711\n", - "Surface training t=7673, loss=0.04101408086717129\n", - "Surface training t=7674, loss=0.051848458126187325\n", - "Surface training t=7675, loss=0.03867427073419094\n", - "Surface training t=7676, loss=0.037267763167619705\n", - "Surface training t=7677, loss=0.04636576771736145\n", - "Surface training t=7678, loss=0.03500627540051937\n", - "Surface training t=7679, loss=0.045020684599876404\n", - "Surface training t=7680, loss=0.05773336626589298\n", - "Surface training t=7681, loss=0.04020854830741882\n", - "Surface training t=7682, loss=0.03866270836442709\n", - "Surface training t=7683, loss=0.03538406081497669\n", - "Surface training t=7684, loss=0.03223326988518238\n", - "Surface training t=7685, loss=0.03431130759418011\n", - "Surface training t=7686, loss=0.042401475831866264\n", - "Surface training t=7687, loss=0.0418472345918417\n", - "Surface training t=7688, loss=0.03389466926455498\n", - "Surface training t=7689, loss=0.037357211112976074\n", - "Surface training t=7690, loss=0.029099988751113415\n", - "Surface training t=7691, loss=0.0362990852445364\n", - "Surface training t=7692, loss=0.03998354449868202\n", - "Surface training t=7693, loss=0.03975874185562134\n", - "Surface training t=7694, loss=0.03376076463609934\n", - "Surface training t=7695, loss=0.05231565609574318\n", - "Surface training t=7696, loss=0.04335682652890682\n", - "Surface training t=7697, loss=0.04543976113200188\n", - "Surface training t=7698, loss=0.060506269335746765\n", - "Surface training t=7699, loss=0.05523073486983776\n", - "Surface training t=7700, loss=0.06465707905590534\n", - "Surface training t=7701, loss=0.051183007657527924\n", - "Surface training t=7702, loss=0.06597473844885826\n", - "Surface training t=7703, loss=0.06173296645283699\n", - "Surface training t=7704, loss=0.0578080452978611\n", - "Surface training t=7705, loss=0.04939994402229786\n", - "Surface training t=7706, loss=0.040398554876446724\n", - "Surface training t=7707, loss=0.041449058800935745\n", - "Surface training t=7708, loss=0.04366137832403183\n", - "Surface training t=7709, loss=0.05281545780599117\n", - "Surface training t=7710, loss=0.08812733367085457\n", - "Surface training t=7711, loss=0.045053512789309025\n", - "Surface training t=7712, loss=0.04149378091096878\n", - "Surface training t=7713, loss=0.04872930981218815\n", - "Surface training t=7714, loss=0.04767608176916838\n", - "Surface training t=7715, loss=0.05893024057149887\n", - "Surface training t=7716, loss=0.051919156685471535\n", - "Surface training t=7717, loss=0.0483828354626894\n", - "Surface training t=7718, loss=0.03684980422258377\n", - "Surface training t=7719, loss=0.04917503893375397\n", - "Surface training t=7720, loss=0.03927428647875786\n", - "Surface training t=7721, loss=0.03527433052659035\n", - "Surface training t=7722, loss=0.036097628995776176\n", - "Surface training t=7723, loss=0.03883451968431473\n", - "Surface training t=7724, loss=0.04040489159524441\n", - "Surface training t=7725, loss=0.03439766447991133\n", - "Surface training t=7726, loss=0.03805740363895893\n", - "Surface training t=7727, loss=0.054508211091160774\n", - "Surface training t=7728, loss=0.07361910864710808\n", - "Surface training t=7729, loss=0.06233218498528004\n", - "Surface training t=7730, loss=0.061517948284745216\n", - "Surface training t=7731, loss=0.06568977795541286\n", - "Surface training t=7732, loss=0.06652531400322914\n", - "Surface training t=7733, loss=0.09898637980222702\n", - "Surface training t=7734, loss=0.06466583721339703\n", - "Surface training t=7735, loss=0.04551692493259907\n", - "Surface training t=7736, loss=0.04622129164636135\n", - "Surface training t=7737, loss=0.04884251952171326\n", - "Surface training t=7738, loss=0.04352976381778717\n", - "Surface training t=7739, loss=0.050022609531879425\n", - "Surface training t=7740, loss=0.04949074424803257\n", - "Surface training t=7741, loss=0.031088512390851974\n", - "Surface training t=7742, loss=0.0324078481644392\n", - "Surface training t=7743, loss=0.02790275402367115\n", - "Surface training t=7744, loss=0.028135032393038273\n", - "Surface training t=7745, loss=0.030805516056716442\n", - "Surface training t=7746, loss=0.03333138860762119\n", - "Surface training t=7747, loss=0.03754897788167\n", - "Surface training t=7748, loss=0.03892248123884201\n", - "Surface training t=7749, loss=0.040298450738191605\n", - "Surface training t=7750, loss=0.044857291504740715\n", - "Surface training t=7751, loss=0.04247133433818817\n", - "Surface training t=7752, loss=0.041484703309834\n", - "Surface training t=7753, loss=0.034855274483561516\n", - "Surface training t=7754, loss=0.03822803869843483\n", - "Surface training t=7755, loss=0.03050841111689806\n", - "Surface training t=7756, loss=0.04967478848993778\n", - "Surface training t=7757, loss=0.049545109272003174\n", - "Surface training t=7758, loss=0.04125973582267761\n", - "Surface training t=7759, loss=0.045114658772945404\n", - "Surface training t=7760, loss=0.04228479601442814\n", - "Surface training t=7761, loss=0.03768603503704071\n", - "Surface training t=7762, loss=0.04471580870449543\n", - "Surface training t=7763, loss=0.03816244564950466\n", - "Surface training t=7764, loss=0.03462354000657797\n", - "Surface training t=7765, loss=0.029556256718933582\n", - "Surface training t=7766, loss=0.03559386543929577\n", - "Surface training t=7767, loss=0.0255454545840621\n", - "Surface training t=7768, loss=0.034752015955746174\n", - "Surface training t=7769, loss=0.02633629646152258\n", - "Surface training t=7770, loss=0.02758117113262415\n", - "Surface training t=7771, loss=0.03540210798382759\n", - "Surface training t=7772, loss=0.034584072418510914\n", - "Surface training t=7773, loss=0.030998900532722473\n", - "Surface training t=7774, loss=0.03467760980129242\n", - "Surface training t=7775, loss=0.03533061034977436\n", - "Surface training t=7776, loss=0.04142920859158039\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=7777, loss=0.044762369245290756\n", - "Surface training t=7778, loss=0.04742545820772648\n", - "Surface training t=7779, loss=0.03678876906633377\n", - "Surface training t=7780, loss=0.0387073690071702\n", - "Surface training t=7781, loss=0.04501793719828129\n", - "Surface training t=7782, loss=0.041009822860360146\n", - "Surface training t=7783, loss=0.03657203633338213\n", - "Surface training t=7784, loss=0.03992369957268238\n", - "Surface training t=7785, loss=0.05063782446086407\n", - "Surface training t=7786, loss=0.04253164678812027\n", - "Surface training t=7787, loss=0.03677576594054699\n", - "Surface training t=7788, loss=0.039170049130916595\n", - "Surface training t=7789, loss=0.04646126180887222\n", - "Surface training t=7790, loss=0.05096246302127838\n", - "Surface training t=7791, loss=0.051091439090669155\n", - "Surface training t=7792, loss=0.08685633167624474\n", - "Surface training t=7793, loss=0.06547893956303596\n", - "Surface training t=7794, loss=0.054038478061556816\n", - "Surface training t=7795, loss=0.06512981653213501\n", - "Surface training t=7796, loss=0.06663591600954533\n", - "Surface training t=7797, loss=0.08440656960010529\n", - "Surface training t=7798, loss=0.07284712046384811\n", - "Surface training t=7799, loss=0.06963863037526608\n", - "Surface training t=7800, loss=0.08610263466835022\n", - "Surface training t=7801, loss=0.0931207463145256\n", - "Surface training t=7802, loss=0.06998176500201225\n", - "Surface training t=7803, loss=0.12765943259000778\n", - "Surface training t=7804, loss=0.0952102579176426\n", - "Surface training t=7805, loss=0.07258912175893784\n", - "Surface training t=7806, loss=0.06613869778811932\n", - "Surface training t=7807, loss=0.09009860083460808\n", - "Surface training t=7808, loss=0.05564294941723347\n", - "Surface training t=7809, loss=0.06406892091035843\n", - "Surface training t=7810, loss=0.055453745648264885\n", - "Surface training t=7811, loss=0.09631957113742828\n", - "Surface training t=7812, loss=0.08933739736676216\n", - "Surface training t=7813, loss=0.06229339726269245\n", - "Surface training t=7814, loss=0.09882649034261703\n", - "Surface training t=7815, loss=0.0658393781632185\n", - "Surface training t=7816, loss=0.07388437539339066\n", - "Surface training t=7817, loss=0.056618787348270416\n", - "Surface training t=7818, loss=0.05585465393960476\n", - "Surface training t=7819, loss=0.043544407933950424\n", - "Surface training t=7820, loss=0.03411005064845085\n", - "Surface training t=7821, loss=0.0387142114341259\n", - "Surface training t=7822, loss=0.06086924485862255\n", - "Surface training t=7823, loss=0.05028999038040638\n", - "Surface training t=7824, loss=0.040401021018624306\n", - "Surface training t=7825, loss=0.06797118671238422\n", - "Surface training t=7826, loss=0.04317554831504822\n", - "Surface training t=7827, loss=0.06348823383450508\n", - "Surface training t=7828, loss=0.049864742904901505\n", - "Surface training t=7829, loss=0.05255609564483166\n", - "Surface training t=7830, loss=0.07016757130622864\n", - "Surface training t=7831, loss=0.08348225057125092\n", - "Surface training t=7832, loss=0.06690598279237747\n", - "Surface training t=7833, loss=0.08718465082347393\n", - "Surface training t=7834, loss=0.06679616309702396\n", - "Surface training t=7835, loss=0.07466511055827141\n", - "Surface training t=7836, loss=0.05887812003493309\n", - "Surface training t=7837, loss=0.07110176049172878\n", - "Surface training t=7838, loss=0.07960842177271843\n", - "Surface training t=7839, loss=0.08446424081921577\n", - "Surface training t=7840, loss=0.06834596768021584\n", - "Surface training t=7841, loss=0.06405268795788288\n", - "Surface training t=7842, loss=0.09420093521475792\n", - "Surface training t=7843, loss=0.06105892360210419\n", - "Surface training t=7844, loss=0.05174201354384422\n", - "Surface training t=7845, loss=0.052678730338811874\n", - "Surface training t=7846, loss=0.04173537343740463\n", - "Surface training t=7847, loss=0.042495790868997574\n", - "Surface training t=7848, loss=0.06347129866480827\n", - "Surface training t=7849, loss=0.055803149938583374\n", - "Surface training t=7850, loss=0.048223281279206276\n", - "Surface training t=7851, loss=0.04980096220970154\n", - "Surface training t=7852, loss=0.06102936342358589\n", - "Surface training t=7853, loss=0.0663658045232296\n", - "Surface training t=7854, loss=0.08218972757458687\n", - "Surface training t=7855, loss=0.05602940171957016\n", - "Surface training t=7856, loss=0.05152961052954197\n", - "Surface training t=7857, loss=0.06640687957406044\n", - "Surface training t=7858, loss=0.09412596747279167\n", - "Surface training t=7859, loss=0.056007279083132744\n", - "Surface training t=7860, loss=0.057521214708685875\n", - "Surface training t=7861, loss=0.06396911479532719\n", - "Surface training t=7862, loss=0.11131995543837547\n", - "Surface training t=7863, loss=0.08593726716935635\n", - "Surface training t=7864, loss=0.10917647927999496\n", - "Surface training t=7865, loss=0.09680841490626335\n", - "Surface training t=7866, loss=0.08098837733268738\n", - "Surface training t=7867, loss=0.10175152495503426\n", - "Surface training t=7868, loss=0.07912977412343025\n", - "Surface training t=7869, loss=0.06585841998457909\n", - "Surface training t=7870, loss=0.06660236418247223\n", - "Surface training t=7871, loss=0.06339343637228012\n", - "Surface training t=7872, loss=0.04456036165356636\n", - "Surface training t=7873, loss=0.03717319294810295\n", - "Surface training t=7874, loss=0.04177098162472248\n", - "Surface training t=7875, loss=0.03324656840413809\n", - "Surface training t=7876, loss=0.030641691759228706\n", - "Surface training t=7877, loss=0.02800305001437664\n", - "Surface training t=7878, loss=0.030504979193210602\n", - "Surface training t=7879, loss=0.03621141240000725\n", - "Surface training t=7880, loss=0.04153487645089626\n", - "Surface training t=7881, loss=0.06152629479765892\n", - "Surface training t=7882, loss=0.057503633201122284\n", - "Surface training t=7883, loss=0.05350308492779732\n", - "Surface training t=7884, loss=0.046292541548609734\n", - "Surface training t=7885, loss=0.05453076213598251\n", - "Surface training t=7886, loss=0.050278808921575546\n", - "Surface training t=7887, loss=0.05043049156665802\n", - "Surface training t=7888, loss=0.05044855736196041\n", - "Surface training t=7889, loss=0.0454441886395216\n", - "Surface training t=7890, loss=0.029965341091156006\n", - "Surface training t=7891, loss=0.04263542592525482\n", - "Surface training t=7892, loss=0.03765209764242172\n", - "Surface training t=7893, loss=0.0321496706455946\n", - "Surface training t=7894, loss=0.04159492440521717\n", - "Surface training t=7895, loss=0.04311164282262325\n", - "Surface training t=7896, loss=0.07431194558739662\n", - "Surface training t=7897, loss=0.04432203061878681\n", - "Surface training t=7898, loss=0.052381863817572594\n", - "Surface training t=7899, loss=0.046128084883093834\n", - "Surface training t=7900, loss=0.04217090085148811\n", - "Surface training t=7901, loss=0.03787138685584068\n", - "Surface training t=7902, loss=0.032065775245428085\n", - "Surface training t=7903, loss=0.02772896084934473\n", - "Surface training t=7904, loss=0.03172002546489239\n", - "Surface training t=7905, loss=0.03892369195818901\n", - "Surface training t=7906, loss=0.03312748670578003\n", - "Surface training t=7907, loss=0.030060192570090294\n", - "Surface training t=7908, loss=0.032175276428461075\n", - "Surface training t=7909, loss=0.025472842156887054\n", - "Surface training t=7910, loss=0.036776360124349594\n", - "Surface training t=7911, loss=0.02769693173468113\n", - "Surface training t=7912, loss=0.022818829864263535\n", - "Surface training t=7913, loss=0.0277189239859581\n", - "Surface training t=7914, loss=0.03415617719292641\n", - "Surface training t=7915, loss=0.03247552178800106\n", - "Surface training t=7916, loss=0.034227728843688965\n", - "Surface training t=7917, loss=0.03202946484088898\n", - "Surface training t=7918, loss=0.028375922702252865\n", - "Surface training t=7919, loss=0.03416351415216923\n", - "Surface training t=7920, loss=0.03384880814701319\n", - "Surface training t=7921, loss=0.028413357213139534\n", - "Surface training t=7922, loss=0.027423083782196045\n", - "Surface training t=7923, loss=0.03259052708745003\n", - "Surface training t=7924, loss=0.02904126327484846\n", - "Surface training t=7925, loss=0.027559039182960987\n", - "Surface training t=7926, loss=0.02714661695063114\n", - "Surface training t=7927, loss=0.03146187402307987\n", - "Surface training t=7928, loss=0.023318467661738396\n", - "Surface training t=7929, loss=0.024432464502751827\n", - "Surface training t=7930, loss=0.03390817157924175\n", - "Surface training t=7931, loss=0.02862958237528801\n", - "Surface training t=7932, loss=0.02953080739825964\n", - "Surface training t=7933, loss=0.029891262762248516\n", - "Surface training t=7934, loss=0.026202762499451637\n", - "Surface training t=7935, loss=0.027217188850045204\n", - "Surface training t=7936, loss=0.03432360850274563\n", - "Surface training t=7937, loss=0.03397503308951855\n", - "Surface training t=7938, loss=0.04454224556684494\n", - "Surface training t=7939, loss=0.040551941841840744\n", - "Surface training t=7940, loss=0.03259950876235962\n", - "Surface training t=7941, loss=0.04965721070766449\n", - "Surface training t=7942, loss=0.045989775098860264\n", - "Surface training t=7943, loss=0.04399549029767513\n", - "Surface training t=7944, loss=0.04249139316380024\n", - "Surface training t=7945, loss=0.03511985018849373\n", - "Surface training t=7946, loss=0.0270694587379694\n", - "Surface training t=7947, loss=0.05228930152952671\n", - "Surface training t=7948, loss=0.062398700043559074\n", - "Surface training t=7949, loss=0.04786863923072815\n", - "Surface training t=7950, loss=0.048993926495313644\n", - "Surface training t=7951, loss=0.0409467164427042\n", - "Surface training t=7952, loss=0.0494582187384367\n", - "Surface training t=7953, loss=0.039420073851943016\n", - "Surface training t=7954, loss=0.03804119676351547\n", - "Surface training t=7955, loss=0.04022488184273243\n", - "Surface training t=7956, loss=0.03199326619505882\n", - "Surface training t=7957, loss=0.03425561264157295\n", - "Surface training t=7958, loss=0.030557414516806602\n", - "Surface training t=7959, loss=0.036431146785616875\n", - "Surface training t=7960, loss=0.03235068265348673\n", - "Surface training t=7961, loss=0.036528317257761955\n", - "Surface training t=7962, loss=0.03467901796102524\n", - "Surface training t=7963, loss=0.0364993866533041\n", - "Surface training t=7964, loss=0.047278719022870064\n", - "Surface training t=7965, loss=0.031996769830584526\n", - "Surface training t=7966, loss=0.04913174919784069\n", - "Surface training t=7967, loss=0.04456733725965023\n", - "Surface training t=7968, loss=0.03241548966616392\n", - "Surface training t=7969, loss=0.04564506560564041\n", - "Surface training t=7970, loss=0.03386913053691387\n", - "Surface training t=7971, loss=0.040433911606669426\n", - "Surface training t=7972, loss=0.03873836062848568\n", - "Surface training t=7973, loss=0.034350791946053505\n", - "Surface training t=7974, loss=0.03300510346889496\n", - "Surface training t=7975, loss=0.03815378248691559\n", - "Surface training t=7976, loss=0.042170461267232895\n", - "Surface training t=7977, loss=0.04186094366014004\n", - "Surface training t=7978, loss=0.05559281446039677\n", - "Surface training t=7979, loss=0.07859997823834419\n", - "Surface training t=7980, loss=0.05774959363043308\n", - "Surface training t=7981, loss=0.051230983808636665\n", - "Surface training t=7982, loss=0.05141281522810459\n", - "Surface training t=7983, loss=0.04786410555243492\n", - "Surface training t=7984, loss=0.046678612008690834\n", - "Surface training t=7985, loss=0.051049040630459785\n", - "Surface training t=7986, loss=0.0531237181276083\n", - "Surface training t=7987, loss=0.05079345591366291\n", - "Surface training t=7988, loss=0.06396791711449623\n", - "Surface training t=7989, loss=0.07324148342013359\n", - "Surface training t=7990, loss=0.0625145323574543\n", - "Surface training t=7991, loss=0.05921827629208565\n", - "Surface training t=7992, loss=0.05260344035923481\n", - "Surface training t=7993, loss=0.0619766004383564\n", - "Surface training t=7994, loss=0.04767927527427673\n", - "Surface training t=7995, loss=0.03972122259438038\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=7996, loss=0.046960316598415375\n", - "Surface training t=7997, loss=0.044606005772948265\n", - "Surface training t=7998, loss=0.05748092383146286\n", - "Surface training t=7999, loss=0.05876355990767479\n", - "Surface training t=8000, loss=0.05244305171072483\n", - "Surface training t=8001, loss=0.066917534917593\n", - "Surface training t=8002, loss=0.056591471657156944\n", - "Surface training t=8003, loss=0.052410783246159554\n", - "Surface training t=8004, loss=0.04822654463350773\n", - "Surface training t=8005, loss=0.0535406693816185\n", - "Surface training t=8006, loss=0.0480313915759325\n", - "Surface training t=8007, loss=0.04356429539620876\n", - "Surface training t=8008, loss=0.07724859192967415\n", - "Surface training t=8009, loss=0.06835866905748844\n", - "Surface training t=8010, loss=0.06696313433349133\n", - "Surface training t=8011, loss=0.05365658365190029\n", - "Surface training t=8012, loss=0.06329476088285446\n", - "Surface training t=8013, loss=0.06599670276045799\n", - "Surface training t=8014, loss=0.051139770075678825\n", - "Surface training t=8015, loss=0.04681192897260189\n", - "Surface training t=8016, loss=0.04207531549036503\n", - "Surface training t=8017, loss=0.06423193216323853\n", - "Surface training t=8018, loss=0.037567080929875374\n", - "Surface training t=8019, loss=0.03291802853345871\n", - "Surface training t=8020, loss=0.0323818139731884\n", - "Surface training t=8021, loss=0.03281418699771166\n", - "Surface training t=8022, loss=0.029949942603707314\n", - "Surface training t=8023, loss=0.053432734683156013\n", - "Surface training t=8024, loss=0.05946579948067665\n", - "Surface training t=8025, loss=0.0766550712287426\n", - "Surface training t=8026, loss=0.05900576710700989\n", - "Surface training t=8027, loss=0.0599431786686182\n", - "Surface training t=8028, loss=0.038824426010251045\n", - "Surface training t=8029, loss=0.048819306306540966\n", - "Surface training t=8030, loss=0.057994117960333824\n", - "Surface training t=8031, loss=0.06769570335745811\n", - "Surface training t=8032, loss=0.060510050505399704\n", - "Surface training t=8033, loss=0.04338706284761429\n", - "Surface training t=8034, loss=0.041200061328709126\n", - "Surface training t=8035, loss=0.038646871224045753\n", - "Surface training t=8036, loss=0.04528047889471054\n", - "Surface training t=8037, loss=0.03985291346907616\n", - "Surface training t=8038, loss=0.0348511915653944\n", - "Surface training t=8039, loss=0.04266376979649067\n", - "Surface training t=8040, loss=0.041563187725842\n", - "Surface training t=8041, loss=0.04743294417858124\n", - "Surface training t=8042, loss=0.05247548036277294\n", - "Surface training t=8043, loss=0.08906527981162071\n", - "Surface training t=8044, loss=0.06625337339937687\n", - "Surface training t=8045, loss=0.05950721725821495\n", - "Surface training t=8046, loss=0.09865357726812363\n", - "Surface training t=8047, loss=0.06875131651759148\n", - "Surface training t=8048, loss=0.05947815626859665\n", - "Surface training t=8049, loss=0.06133977137506008\n", - "Surface training t=8050, loss=0.07875197380781174\n", - "Surface training t=8051, loss=0.0554162971675396\n", - "Surface training t=8052, loss=0.05148315615952015\n", - "Surface training t=8053, loss=0.1051216684281826\n", - "Surface training t=8054, loss=0.06054171547293663\n", - "Surface training t=8055, loss=0.07218816876411438\n", - "Surface training t=8056, loss=0.04793709143996239\n", - "Surface training t=8057, loss=0.05391297489404678\n", - "Surface training t=8058, loss=0.041096385568380356\n", - "Surface training t=8059, loss=0.06381458416581154\n", - "Surface training t=8060, loss=0.03942998684942722\n", - "Surface training t=8061, loss=0.04822292272001505\n", - "Surface training t=8062, loss=0.04987248592078686\n", - "Surface training t=8063, loss=0.04977233335375786\n", - "Surface training t=8064, loss=0.04943549446761608\n", - "Surface training t=8065, loss=0.05246012471616268\n", - "Surface training t=8066, loss=0.04710688814520836\n", - "Surface training t=8067, loss=0.09130764380097389\n", - "Surface training t=8068, loss=0.0691443495452404\n", - "Surface training t=8069, loss=0.0719300638884306\n", - "Surface training t=8070, loss=0.1155046857893467\n", - "Surface training t=8071, loss=0.0863608717918396\n", - "Surface training t=8072, loss=0.1290760338306427\n", - "Surface training t=8073, loss=0.0770078394562006\n", - "Surface training t=8074, loss=0.06481544114649296\n", - "Surface training t=8075, loss=0.05543074384331703\n", - "Surface training t=8076, loss=0.06603675335645676\n", - "Surface training t=8077, loss=0.0395981278270483\n", - "Surface training t=8078, loss=0.04835228994488716\n", - "Surface training t=8079, loss=0.031959653832018375\n", - "Surface training t=8080, loss=0.02995497640222311\n", - "Surface training t=8081, loss=0.05588173680007458\n", - "Surface training t=8082, loss=0.04186180792748928\n", - "Surface training t=8083, loss=0.048685019835829735\n", - "Surface training t=8084, loss=0.0433448851108551\n", - "Surface training t=8085, loss=0.042452892288565636\n", - "Surface training t=8086, loss=0.04365427419543266\n", - "Surface training t=8087, loss=0.029289454221725464\n", - "Surface training t=8088, loss=0.03348626662045717\n", - "Surface training t=8089, loss=0.03793897107243538\n", - "Surface training t=8090, loss=0.02468116581439972\n", - "Surface training t=8091, loss=0.031907436437904835\n", - "Surface training t=8092, loss=0.032317714765667915\n", - "Surface training t=8093, loss=0.026563781313598156\n", - "Surface training t=8094, loss=0.022727075032889843\n", - "Surface training t=8095, loss=0.03579657804220915\n", - "Surface training t=8096, loss=0.04424657113850117\n", - "Surface training t=8097, loss=0.037659620866179466\n", - "Surface training t=8098, loss=0.03282981179654598\n", - "Surface training t=8099, loss=0.03481563739478588\n", - "Surface training t=8100, loss=0.029408574104309082\n", - "Surface training t=8101, loss=0.02693677321076393\n", - "Surface training t=8102, loss=0.030976291745901108\n", - "Surface training t=8103, loss=0.02750521060079336\n", - "Surface training t=8104, loss=0.025818735361099243\n", - "Surface training t=8105, loss=0.02655072696506977\n", - "Surface training t=8106, loss=0.028271662071347237\n", - "Surface training t=8107, loss=0.028822663240134716\n", - "Surface training t=8108, loss=0.02427054289728403\n", - "Surface training t=8109, loss=0.03222198970615864\n", - "Surface training t=8110, loss=0.04110930021852255\n", - "Surface training t=8111, loss=0.06116233766078949\n", - "Surface training t=8112, loss=0.045671362429857254\n", - "Surface training t=8113, loss=0.04673242010176182\n", - "Surface training t=8114, loss=0.06310956366360188\n", - "Surface training t=8115, loss=0.05656692199409008\n", - "Surface training t=8116, loss=0.04347559995949268\n", - "Surface training t=8117, loss=0.03094171267002821\n", - "Surface training t=8118, loss=0.037192732095718384\n", - "Surface training t=8119, loss=0.02900402620434761\n", - "Surface training t=8120, loss=0.031122020445764065\n", - "Surface training t=8121, loss=0.03804511111229658\n", - "Surface training t=8122, loss=0.03477000817656517\n", - "Surface training t=8123, loss=0.03067579586058855\n", - "Surface training t=8124, loss=0.02995970007032156\n", - "Surface training t=8125, loss=0.02678900584578514\n", - "Surface training t=8126, loss=0.028556360863149166\n", - "Surface training t=8127, loss=0.03184475935995579\n", - "Surface training t=8128, loss=0.031061798334121704\n", - "Surface training t=8129, loss=0.024627897888422012\n", - "Surface training t=8130, loss=0.025856911204755306\n", - "Surface training t=8131, loss=0.03638261556625366\n", - "Surface training t=8132, loss=0.038384029641747475\n", - "Surface training t=8133, loss=0.04047386161983013\n", - "Surface training t=8134, loss=0.05561373755335808\n", - "Surface training t=8135, loss=0.06265822239220142\n", - "Surface training t=8136, loss=0.04124743863940239\n", - "Surface training t=8137, loss=0.047410354018211365\n", - "Surface training t=8138, loss=0.0400670412927866\n", - "Surface training t=8139, loss=0.036313554272055626\n", - "Surface training t=8140, loss=0.04476672783493996\n", - "Surface training t=8141, loss=0.04183358885347843\n", - "Surface training t=8142, loss=0.04391285218298435\n", - "Surface training t=8143, loss=0.07128050923347473\n", - "Surface training t=8144, loss=0.05363134853541851\n", - "Surface training t=8145, loss=0.056044312193989754\n", - "Surface training t=8146, loss=0.05099867284297943\n", - "Surface training t=8147, loss=0.04563993867486715\n", - "Surface training t=8148, loss=0.042807312682271004\n", - "Surface training t=8149, loss=0.03869009669870138\n", - "Surface training t=8150, loss=0.056853169575333595\n", - "Surface training t=8151, loss=0.052692048251628876\n", - "Surface training t=8152, loss=0.048433514311909676\n", - "Surface training t=8153, loss=0.0515508521348238\n", - "Surface training t=8154, loss=0.041842930018901825\n", - "Surface training t=8155, loss=0.03878939338028431\n", - "Surface training t=8156, loss=0.06360275484621525\n", - "Surface training t=8157, loss=0.05042772740125656\n", - "Surface training t=8158, loss=0.046639105305075645\n", - "Surface training t=8159, loss=0.03837432060390711\n", - "Surface training t=8160, loss=0.040753064677119255\n", - "Surface training t=8161, loss=0.03596400283277035\n", - "Surface training t=8162, loss=0.03594751749187708\n", - "Surface training t=8163, loss=0.04476146213710308\n", - "Surface training t=8164, loss=0.03665538877248764\n", - "Surface training t=8165, loss=0.034072921611368656\n", - "Surface training t=8166, loss=0.03747590258717537\n", - "Surface training t=8167, loss=0.044404828920960426\n", - "Surface training t=8168, loss=0.030271442607045174\n", - "Surface training t=8169, loss=0.02947387844324112\n", - "Surface training t=8170, loss=0.027790487743914127\n", - "Surface training t=8171, loss=0.039407284930348396\n", - "Surface training t=8172, loss=0.04433217830955982\n", - "Surface training t=8173, loss=0.03123011626303196\n", - "Surface training t=8174, loss=0.048464447259902954\n", - "Surface training t=8175, loss=0.043179234489798546\n", - "Surface training t=8176, loss=0.04622933454811573\n", - "Surface training t=8177, loss=0.05165672302246094\n", - "Surface training t=8178, loss=0.04566228576004505\n", - "Surface training t=8179, loss=0.04229218140244484\n", - "Surface training t=8180, loss=0.05138352885842323\n", - "Surface training t=8181, loss=0.06289944797754288\n", - "Surface training t=8182, loss=0.07240577973425388\n", - "Surface training t=8183, loss=0.10117115452885628\n", - "Surface training t=8184, loss=0.06165003776550293\n", - "Surface training t=8185, loss=0.06251878291368484\n", - "Surface training t=8186, loss=0.0598540473729372\n", - "Surface training t=8187, loss=0.08690063282847404\n", - "Surface training t=8188, loss=0.05849253013730049\n", - "Surface training t=8189, loss=0.050717780366539955\n", - "Surface training t=8190, loss=0.04376719333231449\n", - "Surface training t=8191, loss=0.07949284464120865\n", - "Surface training t=8192, loss=0.06556863710284233\n", - "Surface training t=8193, loss=0.05963302031159401\n", - "Surface training t=8194, loss=0.06973342224955559\n", - "Surface training t=8195, loss=0.12080088257789612\n", - "Surface training t=8196, loss=0.07484642416238785\n", - "Surface training t=8197, loss=0.09888829663395882\n", - "Surface training t=8198, loss=0.10479523614048958\n", - "Surface training t=8199, loss=0.07088159956037998\n", - "Surface training t=8200, loss=0.11270161718130112\n", - "Surface training t=8201, loss=0.077692411839962\n", - "Surface training t=8202, loss=0.10548760741949081\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=8203, loss=0.1050790511071682\n", - "Surface training t=8204, loss=0.06701423786580563\n", - "Surface training t=8205, loss=0.051142552867531776\n", - "Surface training t=8206, loss=0.06305070035159588\n", - "Surface training t=8207, loss=0.05264591798186302\n", - "Surface training t=8208, loss=0.041321054100990295\n", - "Surface training t=8209, loss=0.037438973784446716\n", - "Surface training t=8210, loss=0.028422240167856216\n", - "Surface training t=8211, loss=0.029197183437645435\n", - "Surface training t=8212, loss=0.02552660834044218\n", - "Surface training t=8213, loss=0.02770450245589018\n", - "Surface training t=8214, loss=0.026722535490989685\n", - "Surface training t=8215, loss=0.025469756685197353\n", - "Surface training t=8216, loss=0.03209039755165577\n", - "Surface training t=8217, loss=0.03133351635187864\n", - "Surface training t=8218, loss=0.03225865215063095\n", - "Surface training t=8219, loss=0.035759568214416504\n", - "Surface training t=8220, loss=0.04295361787080765\n", - "Surface training t=8221, loss=0.050283435732126236\n", - "Surface training t=8222, loss=0.04076569527387619\n", - "Surface training t=8223, loss=0.04812800511717796\n", - "Surface training t=8224, loss=0.040821123868227005\n", - "Surface training t=8225, loss=0.03986472077667713\n", - "Surface training t=8226, loss=0.040241312235593796\n", - "Surface training t=8227, loss=0.03669113479554653\n", - "Surface training t=8228, loss=0.02714124321937561\n", - "Surface training t=8229, loss=0.03180541843175888\n", - "Surface training t=8230, loss=0.03145965654402971\n", - "Surface training t=8231, loss=0.027090751565992832\n", - "Surface training t=8232, loss=0.027400050312280655\n", - "Surface training t=8233, loss=0.03858805075287819\n", - "Surface training t=8234, loss=0.030322750099003315\n", - "Surface training t=8235, loss=0.03352586179971695\n", - "Surface training t=8236, loss=0.038561574183404446\n", - "Surface training t=8237, loss=0.03510492108762264\n", - "Surface training t=8238, loss=0.04075220227241516\n", - "Surface training t=8239, loss=0.04845240339636803\n", - "Surface training t=8240, loss=0.04288442060351372\n", - "Surface training t=8241, loss=0.04298478364944458\n", - "Surface training t=8242, loss=0.04107731021940708\n", - "Surface training t=8243, loss=0.039881715551018715\n", - "Surface training t=8244, loss=0.03782412223517895\n", - "Surface training t=8245, loss=0.03211548551917076\n", - "Surface training t=8246, loss=0.02940355520695448\n", - "Surface training t=8247, loss=0.030091166496276855\n", - "Surface training t=8248, loss=0.04272838495671749\n", - "Surface training t=8249, loss=0.0569680780172348\n", - "Surface training t=8250, loss=0.03569667600095272\n", - "Surface training t=8251, loss=0.04104631952941418\n", - "Surface training t=8252, loss=0.06105398014187813\n", - "Surface training t=8253, loss=0.04898250848054886\n", - "Surface training t=8254, loss=0.04473084583878517\n", - "Surface training t=8255, loss=0.03325364459306002\n", - "Surface training t=8256, loss=0.03232808969914913\n", - "Surface training t=8257, loss=0.03247377369552851\n", - "Surface training t=8258, loss=0.046681586652994156\n", - "Surface training t=8259, loss=0.030174356885254383\n", - "Surface training t=8260, loss=0.031153300777077675\n", - "Surface training t=8261, loss=0.032263096421957016\n", - "Surface training t=8262, loss=0.04919477179646492\n", - "Surface training t=8263, loss=0.037971118465065956\n", - "Surface training t=8264, loss=0.050030261278152466\n", - "Surface training t=8265, loss=0.055599767714738846\n", - "Surface training t=8266, loss=0.04354579746723175\n", - "Surface training t=8267, loss=0.03752061910927296\n", - "Surface training t=8268, loss=0.03792121447622776\n", - "Surface training t=8269, loss=0.032529592514038086\n", - "Surface training t=8270, loss=0.030885319225490093\n", - "Surface training t=8271, loss=0.03380708210170269\n", - "Surface training t=8272, loss=0.026754316873848438\n", - "Surface training t=8273, loss=0.03183779865503311\n", - "Surface training t=8274, loss=0.03255602903664112\n", - "Surface training t=8275, loss=0.03331843763589859\n", - "Surface training t=8276, loss=0.03526376932859421\n", - "Surface training t=8277, loss=0.034892475232481956\n", - "Surface training t=8278, loss=0.02781333588063717\n", - "Surface training t=8279, loss=0.03267665021121502\n", - "Surface training t=8280, loss=0.03394125681370497\n", - "Surface training t=8281, loss=0.03565680794417858\n", - "Surface training t=8282, loss=0.03194417618215084\n", - "Surface training t=8283, loss=0.03506183996796608\n", - "Surface training t=8284, loss=0.0393892340362072\n", - "Surface training t=8285, loss=0.04644952341914177\n", - "Surface training t=8286, loss=0.03334442339837551\n", - "Surface training t=8287, loss=0.044795384630560875\n", - "Surface training t=8288, loss=0.0440982561558485\n", - "Surface training t=8289, loss=0.03905697539448738\n", - "Surface training t=8290, loss=0.04885396547615528\n", - "Surface training t=8291, loss=0.06349024549126625\n", - "Surface training t=8292, loss=0.048866186290979385\n", - "Surface training t=8293, loss=0.05403919890522957\n", - "Surface training t=8294, loss=0.044029600918293\n", - "Surface training t=8295, loss=0.044656009413301945\n", - "Surface training t=8296, loss=0.04117082431912422\n", - "Surface training t=8297, loss=0.04333432111889124\n", - "Surface training t=8298, loss=0.04931427538394928\n", - "Surface training t=8299, loss=0.04870997555553913\n", - "Surface training t=8300, loss=0.06307640112936497\n", - "Surface training t=8301, loss=0.05061435513198376\n", - "Surface training t=8302, loss=0.041544850915670395\n", - "Surface training t=8303, loss=0.03594118356704712\n", - "Surface training t=8304, loss=0.03341960348188877\n", - "Surface training t=8305, loss=0.037882596254348755\n", - "Surface training t=8306, loss=0.04215595871210098\n", - "Surface training t=8307, loss=0.042546797543764114\n", - "Surface training t=8308, loss=0.0715254582464695\n", - "Surface training t=8309, loss=0.04326873365789652\n", - "Surface training t=8310, loss=0.041619038209319115\n", - "Surface training t=8311, loss=0.05803236924111843\n", - "Surface training t=8312, loss=0.042247370816767216\n", - "Surface training t=8313, loss=0.06798036023974419\n", - "Surface training t=8314, loss=0.05275349318981171\n", - "Surface training t=8315, loss=0.06363633833825588\n", - "Surface training t=8316, loss=0.06704353354871273\n", - "Surface training t=8317, loss=0.04450143314898014\n", - "Surface training t=8318, loss=0.05504298582673073\n", - "Surface training t=8319, loss=0.05227966792881489\n", - "Surface training t=8320, loss=0.05487368814647198\n", - "Surface training t=8321, loss=0.05600308068096638\n", - "Surface training t=8322, loss=0.03367375861853361\n", - "Surface training t=8323, loss=0.029225997626781464\n", - "Surface training t=8324, loss=0.038667427375912666\n", - "Surface training t=8325, loss=0.03169070743024349\n", - "Surface training t=8326, loss=0.029472491703927517\n", - "Surface training t=8327, loss=0.034587014466524124\n", - "Surface training t=8328, loss=0.02668710146099329\n", - "Surface training t=8329, loss=0.033562103286385536\n", - "Surface training t=8330, loss=0.031366356648504734\n", - "Surface training t=8331, loss=0.026886671781539917\n", - "Surface training t=8332, loss=0.028743427246809006\n", - "Surface training t=8333, loss=0.03432873263955116\n", - "Surface training t=8334, loss=0.027495136484503746\n", - "Surface training t=8335, loss=0.026656211353838444\n", - "Surface training t=8336, loss=0.04255424253642559\n", - "Surface training t=8337, loss=0.029918868094682693\n", - "Surface training t=8338, loss=0.02684694714844227\n", - "Surface training t=8339, loss=0.02610689029097557\n", - "Surface training t=8340, loss=0.024049967527389526\n", - "Surface training t=8341, loss=0.02337418496608734\n", - "Surface training t=8342, loss=0.025275783613324165\n", - "Surface training t=8343, loss=0.02615114487707615\n", - "Surface training t=8344, loss=0.029808033257722855\n", - "Surface training t=8345, loss=0.02931084856390953\n", - "Surface training t=8346, loss=0.032859621569514275\n", - "Surface training t=8347, loss=0.03852892108261585\n", - "Surface training t=8348, loss=0.0383668914437294\n", - "Surface training t=8349, loss=0.04166810028254986\n", - "Surface training t=8350, loss=0.04321316536515951\n", - "Surface training t=8351, loss=0.05153868347406387\n", - "Surface training t=8352, loss=0.048231277614831924\n", - "Surface training t=8353, loss=0.04611060582101345\n", - "Surface training t=8354, loss=0.035844586789608\n", - "Surface training t=8355, loss=0.0422224048525095\n", - "Surface training t=8356, loss=0.03250591550022364\n", - "Surface training t=8357, loss=0.03600284084677696\n", - "Surface training t=8358, loss=0.03784950077533722\n", - "Surface training t=8359, loss=0.03339073061943054\n", - "Surface training t=8360, loss=0.03222723864018917\n", - "Surface training t=8361, loss=0.03175214026123285\n", - "Surface training t=8362, loss=0.026254245080053806\n", - "Surface training t=8363, loss=0.02715477254241705\n", - "Surface training t=8364, loss=0.0381023995578289\n", - "Surface training t=8365, loss=0.04558165185153484\n", - "Surface training t=8366, loss=0.03880011476576328\n", - "Surface training t=8367, loss=0.05082217790186405\n", - "Surface training t=8368, loss=0.05200410261750221\n", - "Surface training t=8369, loss=0.03830001689493656\n", - "Surface training t=8370, loss=0.03267601318657398\n", - "Surface training t=8371, loss=0.04293033666908741\n", - "Surface training t=8372, loss=0.02972140721976757\n", - "Surface training t=8373, loss=0.03866324760019779\n", - "Surface training t=8374, loss=0.030847922898828983\n", - "Surface training t=8375, loss=0.028333532623946667\n", - "Surface training t=8376, loss=0.03810955211520195\n", - "Surface training t=8377, loss=0.04928925447165966\n", - "Surface training t=8378, loss=0.028605449944734573\n", - "Surface training t=8379, loss=0.02545198332518339\n", - "Surface training t=8380, loss=0.031312370672822\n", - "Surface training t=8381, loss=0.02683196309953928\n", - "Surface training t=8382, loss=0.0257329223677516\n", - "Surface training t=8383, loss=0.0344505263492465\n", - "Surface training t=8384, loss=0.03682582266628742\n", - "Surface training t=8385, loss=0.032837086357176304\n", - "Surface training t=8386, loss=0.030305813997983932\n", - "Surface training t=8387, loss=0.0403779037296772\n", - "Surface training t=8388, loss=0.032369714230298996\n", - "Surface training t=8389, loss=0.034729027189314365\n", - "Surface training t=8390, loss=0.04590030387043953\n", - "Surface training t=8391, loss=0.039511680603027344\n", - "Surface training t=8392, loss=0.032553404569625854\n", - "Surface training t=8393, loss=0.03553882986307144\n", - "Surface training t=8394, loss=0.03719790652394295\n", - "Surface training t=8395, loss=0.04524189978837967\n", - "Surface training t=8396, loss=0.047144729644060135\n", - "Surface training t=8397, loss=0.03634160757064819\n", - "Surface training t=8398, loss=0.043530721217393875\n", - "Surface training t=8399, loss=0.05275377258658409\n", - "Surface training t=8400, loss=0.0672764852643013\n", - "Surface training t=8401, loss=0.04361075162887573\n", - "Surface training t=8402, loss=0.040113311260938644\n", - "Surface training t=8403, loss=0.04099966585636139\n", - "Surface training t=8404, loss=0.03041326440870762\n", - "Surface training t=8405, loss=0.037634195759892464\n", - "Surface training t=8406, loss=0.047636423259973526\n", - "Surface training t=8407, loss=0.026269052177667618\n", - "Surface training t=8408, loss=0.03270510025322437\n", - "Surface training t=8409, loss=0.03162575140595436\n", - "Surface training t=8410, loss=0.026964737102389336\n", - "Surface training t=8411, loss=0.0223327549174428\n", - "Surface training t=8412, loss=0.025426611304283142\n", - "Surface training t=8413, loss=0.02558442112058401\n", - "Surface training t=8414, loss=0.03787286952137947\n", - "Surface training t=8415, loss=0.029593749903142452\n", - "Surface training t=8416, loss=0.027819769456982613\n", - "Surface training t=8417, loss=0.03179425932466984\n", - "Surface training t=8418, loss=0.02364980150014162\n", - "Surface training t=8419, loss=0.037036117166280746\n", - "Surface training t=8420, loss=0.03300974331796169\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=8421, loss=0.03806858882308006\n", - "Surface training t=8422, loss=0.03419074974954128\n", - "Surface training t=8423, loss=0.03777211345732212\n", - "Surface training t=8424, loss=0.050778916105628014\n", - "Surface training t=8425, loss=0.052591562271118164\n", - "Surface training t=8426, loss=0.0322908740490675\n", - "Surface training t=8427, loss=0.037472331896424294\n", - "Surface training t=8428, loss=0.045466091483831406\n", - "Surface training t=8429, loss=0.032402606680989265\n", - "Surface training t=8430, loss=0.027437803335487843\n", - "Surface training t=8431, loss=0.023364226333796978\n", - "Surface training t=8432, loss=0.03125680051743984\n", - "Surface training t=8433, loss=0.04286826401948929\n", - "Surface training t=8434, loss=0.03168961778283119\n", - "Surface training t=8435, loss=0.03529270365834236\n", - "Surface training t=8436, loss=0.04103880934417248\n", - "Surface training t=8437, loss=0.033197845332324505\n", - "Surface training t=8438, loss=0.042486466467380524\n", - "Surface training t=8439, loss=0.03246404882520437\n", - "Surface training t=8440, loss=0.03483757562935352\n", - "Surface training t=8441, loss=0.04459569416940212\n", - "Surface training t=8442, loss=0.04324157629162073\n", - "Surface training t=8443, loss=0.03239503502845764\n", - "Surface training t=8444, loss=0.03425284381955862\n", - "Surface training t=8445, loss=0.038818007335066795\n", - "Surface training t=8446, loss=0.03394310548901558\n", - "Surface training t=8447, loss=0.03562464192509651\n", - "Surface training t=8448, loss=0.04485797602683306\n", - "Surface training t=8449, loss=0.05094423331320286\n", - "Surface training t=8450, loss=0.05288161151111126\n", - "Surface training t=8451, loss=0.046684570610523224\n", - "Surface training t=8452, loss=0.046377796679735184\n", - "Surface training t=8453, loss=0.03279853332787752\n", - "Surface training t=8454, loss=0.028990505263209343\n", - "Surface training t=8455, loss=0.025056163780391216\n", - "Surface training t=8456, loss=0.02564324624836445\n", - "Surface training t=8457, loss=0.0271984301507473\n", - "Surface training t=8458, loss=0.033239033073186874\n", - "Surface training t=8459, loss=0.03610076941549778\n", - "Surface training t=8460, loss=0.0387368518859148\n", - "Surface training t=8461, loss=0.027498728595674038\n", - "Surface training t=8462, loss=0.03640221059322357\n", - "Surface training t=8463, loss=0.0367925688624382\n", - "Surface training t=8464, loss=0.030356822535395622\n", - "Surface training t=8465, loss=0.037000445649027824\n", - "Surface training t=8466, loss=0.035992274060845375\n", - "Surface training t=8467, loss=0.04097479581832886\n", - "Surface training t=8468, loss=0.03730248287320137\n", - "Surface training t=8469, loss=0.03252730518579483\n", - "Surface training t=8470, loss=0.03213957138359547\n", - "Surface training t=8471, loss=0.03400816582143307\n", - "Surface training t=8472, loss=0.03718698490411043\n", - "Surface training t=8473, loss=0.042735276743769646\n", - "Surface training t=8474, loss=0.029819616116583347\n", - "Surface training t=8475, loss=0.029783503152430058\n", - "Surface training t=8476, loss=0.023450659587979317\n", - "Surface training t=8477, loss=0.03718236833810806\n", - "Surface training t=8478, loss=0.03208225034177303\n", - "Surface training t=8479, loss=0.028389006853103638\n", - "Surface training t=8480, loss=0.028559256345033646\n", - "Surface training t=8481, loss=0.02862981054931879\n", - "Surface training t=8482, loss=0.03089744597673416\n", - "Surface training t=8483, loss=0.029224512167274952\n", - "Surface training t=8484, loss=0.02565834391862154\n", - "Surface training t=8485, loss=0.03221403528004885\n", - "Surface training t=8486, loss=0.04047921672463417\n", - "Surface training t=8487, loss=0.052191002294421196\n", - "Surface training t=8488, loss=0.06453154049813747\n", - "Surface training t=8489, loss=0.09307565540075302\n", - "Surface training t=8490, loss=0.08618203923106194\n", - "Surface training t=8491, loss=0.06189657002687454\n", - "Surface training t=8492, loss=0.07159928232431412\n", - "Surface training t=8493, loss=0.0886184312403202\n", - "Surface training t=8494, loss=0.06568017415702343\n", - "Surface training t=8495, loss=0.07179119810461998\n", - "Surface training t=8496, loss=0.09717674180865288\n", - "Surface training t=8497, loss=0.05346042197197676\n", - "Surface training t=8498, loss=0.05747765488922596\n", - "Surface training t=8499, loss=0.0552070178091526\n", - "Surface training t=8500, loss=0.0552162267267704\n", - "Surface training t=8501, loss=0.05395914614200592\n", - "Surface training t=8502, loss=0.03648000955581665\n", - "Surface training t=8503, loss=0.04512394033372402\n", - "Surface training t=8504, loss=0.038451265543699265\n", - "Surface training t=8505, loss=0.030515579506754875\n", - "Surface training t=8506, loss=0.0325472317636013\n", - "Surface training t=8507, loss=0.03455593064427376\n", - "Surface training t=8508, loss=0.02706945687532425\n", - "Surface training t=8509, loss=0.02977156825363636\n", - "Surface training t=8510, loss=0.035434963181614876\n", - "Surface training t=8511, loss=0.04139469750225544\n", - "Surface training t=8512, loss=0.039951881393790245\n", - "Surface training t=8513, loss=0.0402162317186594\n", - "Surface training t=8514, loss=0.03363301698118448\n", - "Surface training t=8515, loss=0.033002725802361965\n", - "Surface training t=8516, loss=0.030719948932528496\n", - "Surface training t=8517, loss=0.026906698942184448\n", - "Surface training t=8518, loss=0.0331565048545599\n", - "Surface training t=8519, loss=0.04133288562297821\n", - "Surface training t=8520, loss=0.035303485579788685\n", - "Surface training t=8521, loss=0.0372987249866128\n", - "Surface training t=8522, loss=0.03956822492182255\n", - "Surface training t=8523, loss=0.040961854159832\n", - "Surface training t=8524, loss=0.04003574699163437\n", - "Surface training t=8525, loss=0.034416559152305126\n", - "Surface training t=8526, loss=0.06524260342121124\n", - "Surface training t=8527, loss=0.04396708682179451\n", - "Surface training t=8528, loss=0.04731481708586216\n", - "Surface training t=8529, loss=0.03747159894555807\n", - "Surface training t=8530, loss=0.043040016666054726\n", - "Surface training t=8531, loss=0.06075005978345871\n", - "Surface training t=8532, loss=0.05298566073179245\n", - "Surface training t=8533, loss=0.05549537856131792\n", - "Surface training t=8534, loss=0.06464127264916897\n", - "Surface training t=8535, loss=0.05447326973080635\n", - "Surface training t=8536, loss=0.04391613230109215\n", - "Surface training t=8537, loss=0.06078867241740227\n", - "Surface training t=8538, loss=0.05610815808176994\n", - "Surface training t=8539, loss=0.03359811380505562\n", - "Surface training t=8540, loss=0.04405696876347065\n", - "Surface training t=8541, loss=0.055155180394649506\n", - "Surface training t=8542, loss=0.05335861071944237\n", - "Surface training t=8543, loss=0.04175317846238613\n", - "Surface training t=8544, loss=0.028176273219287395\n", - "Surface training t=8545, loss=0.04057690314948559\n", - "Surface training t=8546, loss=0.034207940101623535\n", - "Surface training t=8547, loss=0.028108949773013592\n", - "Surface training t=8548, loss=0.0308383796364069\n", - "Surface training t=8549, loss=0.027424631640315056\n", - "Surface training t=8550, loss=0.042028434574604034\n", - "Surface training t=8551, loss=0.04838795028626919\n", - "Surface training t=8552, loss=0.07060546427965164\n", - "Surface training t=8553, loss=0.053999973461031914\n", - "Surface training t=8554, loss=0.05707959458231926\n", - "Surface training t=8555, loss=0.048775430768728256\n", - "Surface training t=8556, loss=0.056771181523799896\n", - "Surface training t=8557, loss=0.04356921836733818\n", - "Surface training t=8558, loss=0.04186958447098732\n", - "Surface training t=8559, loss=0.058652590960264206\n", - "Surface training t=8560, loss=0.03745941258966923\n", - "Surface training t=8561, loss=0.054407453164458275\n", - "Surface training t=8562, loss=0.038462741300463676\n", - "Surface training t=8563, loss=0.026944664306938648\n", - "Surface training t=8564, loss=0.030147714540362358\n", - "Surface training t=8565, loss=0.04545678198337555\n", - "Surface training t=8566, loss=0.042426759377121925\n", - "Surface training t=8567, loss=0.04554985277354717\n", - "Surface training t=8568, loss=0.04399576969444752\n", - "Surface training t=8569, loss=0.043260009959340096\n", - "Surface training t=8570, loss=0.05252891406416893\n", - "Surface training t=8571, loss=0.05120967701077461\n", - "Surface training t=8572, loss=0.04553987458348274\n", - "Surface training t=8573, loss=0.04056198708713055\n", - "Surface training t=8574, loss=0.034575385972857475\n", - "Surface training t=8575, loss=0.038320910185575485\n", - "Surface training t=8576, loss=0.05548761785030365\n", - "Surface training t=8577, loss=0.03917786851525307\n", - "Surface training t=8578, loss=0.0686364583671093\n", - "Surface training t=8579, loss=0.058101486414670944\n", - "Surface training t=8580, loss=0.053437111899256706\n", - "Surface training t=8581, loss=0.06120077148079872\n", - "Surface training t=8582, loss=0.0559074692428112\n", - "Surface training t=8583, loss=0.06433731317520142\n", - "Surface training t=8584, loss=0.0512600839138031\n", - "Surface training t=8585, loss=0.054759399965405464\n", - "Surface training t=8586, loss=0.05439741536974907\n", - "Surface training t=8587, loss=0.060668254271149635\n", - "Surface training t=8588, loss=0.04505122732371092\n", - "Surface training t=8589, loss=0.04170176200568676\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=8590, loss=0.04120366647839546\n", - "Surface training t=8591, loss=0.04495367035269737\n", - "Surface training t=8592, loss=0.06304826959967613\n", - "Surface training t=8593, loss=0.040725940838456154\n", - "Surface training t=8594, loss=0.04425687901675701\n", - "Surface training t=8595, loss=0.050428127869963646\n", - "Surface training t=8596, loss=0.03355045057833195\n", - "Surface training t=8597, loss=0.034738507121801376\n", - "Surface training t=8598, loss=0.04613933898508549\n", - "Surface training t=8599, loss=0.04045627452433109\n", - "Surface training t=8600, loss=0.04175115469843149\n", - "Surface training t=8601, loss=0.045630889013409615\n", - "Surface training t=8602, loss=0.03943430818617344\n", - "Surface training t=8603, loss=0.045000456273555756\n", - "Surface training t=8604, loss=0.061066970229148865\n", - "Surface training t=8605, loss=0.043608732521533966\n", - "Surface training t=8606, loss=0.05655316822230816\n", - "Surface training t=8607, loss=0.05448123812675476\n", - "Surface training t=8608, loss=0.07207123935222626\n", - "Surface training t=8609, loss=0.06199603155255318\n", - "Surface training t=8610, loss=0.06298527307808399\n", - "Surface training t=8611, loss=0.05331350862979889\n", - "Surface training t=8612, loss=0.05763895809650421\n", - "Surface training t=8613, loss=0.04100610688328743\n", - "Surface training t=8614, loss=0.04518002085387707\n", - "Surface training t=8615, loss=0.060694461688399315\n", - "Surface training t=8616, loss=0.054858364164829254\n", - "Surface training t=8617, loss=0.03534819558262825\n", - "Surface training t=8618, loss=0.035146718844771385\n", - "Surface training t=8619, loss=0.04802039824426174\n", - "Surface training t=8620, loss=0.04719438962638378\n", - "Surface training t=8621, loss=0.03302959352731705\n", - "Surface training t=8622, loss=0.03524607978761196\n", - "Surface training t=8623, loss=0.02890037838369608\n", - "Surface training t=8624, loss=0.027474738657474518\n", - "Surface training t=8625, loss=0.03765398357063532\n", - "Surface training t=8626, loss=0.03859684430062771\n", - "Surface training t=8627, loss=0.0340210497379303\n", - "Surface training t=8628, loss=0.033716704696416855\n", - "Surface training t=8629, loss=0.032481599599123\n", - "Surface training t=8630, loss=0.032912345603108406\n", - "Surface training t=8631, loss=0.02870309818536043\n", - "Surface training t=8632, loss=0.042248696088790894\n", - "Surface training t=8633, loss=0.04313143342733383\n", - "Surface training t=8634, loss=0.027073688805103302\n", - "Surface training t=8635, loss=0.030360032804310322\n", - "Surface training t=8636, loss=0.025079960003495216\n", - "Surface training t=8637, loss=0.03550627175718546\n", - "Surface training t=8638, loss=0.02797766774892807\n", - "Surface training t=8639, loss=0.027065834030508995\n", - "Surface training t=8640, loss=0.03247906267642975\n", - "Surface training t=8641, loss=0.03101431205868721\n", - "Surface training t=8642, loss=0.028507594019174576\n", - "Surface training t=8643, loss=0.02986968494951725\n", - "Surface training t=8644, loss=0.02500348538160324\n", - "Surface training t=8645, loss=0.026994885876774788\n", - "Surface training t=8646, loss=0.026348814368247986\n", - "Surface training t=8647, loss=0.024944995529949665\n", - "Surface training t=8648, loss=0.02743593044579029\n", - "Surface training t=8649, loss=0.03018425777554512\n", - "Surface training t=8650, loss=0.03739744517952204\n", - "Surface training t=8651, loss=0.043032802641391754\n", - "Surface training t=8652, loss=0.04849240183830261\n", - "Surface training t=8653, loss=0.05252799764275551\n", - "Surface training t=8654, loss=0.04672188684344292\n", - "Surface training t=8655, loss=0.04419776052236557\n", - "Surface training t=8656, loss=0.09107520058751106\n", - "Surface training t=8657, loss=0.058635422959923744\n", - "Surface training t=8658, loss=0.05776609480381012\n", - "Surface training t=8659, loss=0.04633975122123957\n", - "Surface training t=8660, loss=0.04493186995387077\n", - "Surface training t=8661, loss=0.04326248541474342\n", - "Surface training t=8662, loss=0.03413564059883356\n", - "Surface training t=8663, loss=0.038307154551148415\n", - "Surface training t=8664, loss=0.03544015809893608\n", - "Surface training t=8665, loss=0.033610918559134007\n", - "Surface training t=8666, loss=0.03527624253183603\n", - "Surface training t=8667, loss=0.046721264719963074\n", - "Surface training t=8668, loss=0.03470868431031704\n", - "Surface training t=8669, loss=0.034260597079992294\n", - "Surface training t=8670, loss=0.04322134144604206\n", - "Surface training t=8671, loss=0.04057275131344795\n", - "Surface training t=8672, loss=0.04312007874250412\n", - "Surface training t=8673, loss=0.04690355062484741\n", - "Surface training t=8674, loss=0.047358933836221695\n", - "Surface training t=8675, loss=0.039667416363954544\n", - "Surface training t=8676, loss=0.02807666826993227\n", - "Surface training t=8677, loss=0.02808855101466179\n", - "Surface training t=8678, loss=0.026092523708939552\n", - "Surface training t=8679, loss=0.03608546871691942\n", - "Surface training t=8680, loss=0.030877931974828243\n", - "Surface training t=8681, loss=0.03989415429532528\n", - "Surface training t=8682, loss=0.03224645461887121\n", - "Surface training t=8683, loss=0.04041922464966774\n", - "Surface training t=8684, loss=0.04219259321689606\n", - "Surface training t=8685, loss=0.04188403859734535\n", - "Surface training t=8686, loss=0.07089226692914963\n", - "Surface training t=8687, loss=0.04874473251402378\n", - "Surface training t=8688, loss=0.04496477730572224\n", - "Surface training t=8689, loss=0.044590216130018234\n", - "Surface training t=8690, loss=0.042676907032728195\n", - "Surface training t=8691, loss=0.048286810517311096\n", - "Surface training t=8692, loss=0.037013232707977295\n", - "Surface training t=8693, loss=0.02616109699010849\n", - "Surface training t=8694, loss=0.03047047834843397\n", - "Surface training t=8695, loss=0.029797937721014023\n", - "Surface training t=8696, loss=0.022512580268085003\n", - "Surface training t=8697, loss=0.021005917340517044\n", - "Surface training t=8698, loss=0.0324371550232172\n", - "Surface training t=8699, loss=0.026143740862607956\n", - "Surface training t=8700, loss=0.02790961228311062\n", - "Surface training t=8701, loss=0.032369882799685\n", - "Surface training t=8702, loss=0.033891184255480766\n", - "Surface training t=8703, loss=0.03586271870881319\n", - "Surface training t=8704, loss=0.034093535505235195\n", - "Surface training t=8705, loss=0.025244192220270634\n", - "Surface training t=8706, loss=0.03135286271572113\n", - "Surface training t=8707, loss=0.03969708830118179\n", - "Surface training t=8708, loss=0.03741838317364454\n", - "Surface training t=8709, loss=0.0379132479429245\n", - "Surface training t=8710, loss=0.02279147319495678\n", - "Surface training t=8711, loss=0.022180035710334778\n", - "Surface training t=8712, loss=0.026722377166152\n", - "Surface training t=8713, loss=0.034969648346304893\n", - "Surface training t=8714, loss=0.019953135401010513\n", - "Surface training t=8715, loss=0.03164475876837969\n", - "Surface training t=8716, loss=0.023827003315091133\n", - "Surface training t=8717, loss=0.024665395729243755\n", - "Surface training t=8718, loss=0.025349310599267483\n", - "Surface training t=8719, loss=0.020983023568987846\n", - "Surface training t=8720, loss=0.026507949456572533\n", - "Surface training t=8721, loss=0.03166052792221308\n", - "Surface training t=8722, loss=0.030377968214452267\n", - "Surface training t=8723, loss=0.024193720892071724\n", - "Surface training t=8724, loss=0.02679349295794964\n", - "Surface training t=8725, loss=0.022703596390783787\n", - "Surface training t=8726, loss=0.024787994101643562\n", - "Surface training t=8727, loss=0.028730133548378944\n", - "Surface training t=8728, loss=0.03595929313451052\n", - "Surface training t=8729, loss=0.030977165326476097\n", - "Surface training t=8730, loss=0.03611760959029198\n", - "Surface training t=8731, loss=0.052209051325917244\n", - "Surface training t=8732, loss=0.07369446754455566\n", - "Surface training t=8733, loss=0.04886517766863108\n", - "Surface training t=8734, loss=0.042969778180122375\n", - "Surface training t=8735, loss=0.06354779191315174\n", - "Surface training t=8736, loss=0.05123344622552395\n", - "Surface training t=8737, loss=0.04008638486266136\n", - "Surface training t=8738, loss=0.03701843414455652\n", - "Surface training t=8739, loss=0.02650341670960188\n", - "Surface training t=8740, loss=0.02235960867255926\n", - "Surface training t=8741, loss=0.029902756214141846\n", - "Surface training t=8742, loss=0.027565550059080124\n", - "Surface training t=8743, loss=0.03545612283051014\n", - "Surface training t=8744, loss=0.03368627745658159\n", - "Surface training t=8745, loss=0.03821113519370556\n", - "Surface training t=8746, loss=0.043413279578089714\n", - "Surface training t=8747, loss=0.035452001728117466\n", - "Surface training t=8748, loss=0.038919027894735336\n", - "Surface training t=8749, loss=0.03100158181041479\n", - "Surface training t=8750, loss=0.05060794949531555\n", - "Surface training t=8751, loss=0.04435811750590801\n", - "Surface training t=8752, loss=0.040321940556168556\n", - "Surface training t=8753, loss=0.04711967892944813\n", - "Surface training t=8754, loss=0.03672055900096893\n", - "Surface training t=8755, loss=0.02929061558097601\n", - "Surface training t=8756, loss=0.041964175179600716\n", - "Surface training t=8757, loss=0.03550213947892189\n", - "Surface training t=8758, loss=0.0299752289429307\n", - "Surface training t=8759, loss=0.03368497174233198\n", - "Surface training t=8760, loss=0.026621323078870773\n", - "Surface training t=8761, loss=0.026269038207829\n", - "Surface training t=8762, loss=0.03768421057611704\n", - "Surface training t=8763, loss=0.047051817178726196\n", - "Surface training t=8764, loss=0.046415332704782486\n", - "Surface training t=8765, loss=0.03621060773730278\n", - "Surface training t=8766, loss=0.03444654121994972\n", - "Surface training t=8767, loss=0.03905180096626282\n", - "Surface training t=8768, loss=0.04209697060286999\n", - "Surface training t=8769, loss=0.027168993838131428\n", - "Surface training t=8770, loss=0.032054221257567406\n", - "Surface training t=8771, loss=0.04025077074766159\n", - "Surface training t=8772, loss=0.034696588292717934\n", - "Surface training t=8773, loss=0.03123406507074833\n", - "Surface training t=8774, loss=0.04137144051492214\n", - "Surface training t=8775, loss=0.03430458251386881\n", - "Surface training t=8776, loss=0.03249218314886093\n", - "Surface training t=8777, loss=0.03100114595144987\n", - "Surface training t=8778, loss=0.049797963351011276\n", - "Surface training t=8779, loss=0.03846661001443863\n", - "Surface training t=8780, loss=0.04025913402438164\n", - "Surface training t=8781, loss=0.05681147798895836\n", - "Surface training t=8782, loss=0.032262000255286694\n", - "Surface training t=8783, loss=0.04017559625208378\n", - "Surface training t=8784, loss=0.04624457098543644\n", - "Surface training t=8785, loss=0.03709207847714424\n", - "Surface training t=8786, loss=0.047847453504800797\n", - "Surface training t=8787, loss=0.040569037199020386\n", - "Surface training t=8788, loss=0.07250053808093071\n", - "Surface training t=8789, loss=0.03515064902603626\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=8790, loss=0.03267552703619003\n", - "Surface training t=8791, loss=0.04127473570406437\n", - "Surface training t=8792, loss=0.05244019255042076\n", - "Surface training t=8793, loss=0.058395497500896454\n", - "Surface training t=8794, loss=0.0414341539144516\n", - "Surface training t=8795, loss=0.03466613404452801\n", - "Surface training t=8796, loss=0.049307992681860924\n", - "Surface training t=8797, loss=0.024167467840015888\n", - "Surface training t=8798, loss=0.04487140476703644\n", - "Surface training t=8799, loss=0.06434222310781479\n", - "Surface training t=8800, loss=0.06342291086912155\n", - "Surface training t=8801, loss=0.046335915103554726\n", - "Surface training t=8802, loss=0.05392441060394049\n", - "Surface training t=8803, loss=0.06154932640492916\n", - "Surface training t=8804, loss=0.06527073122560978\n", - "Surface training t=8805, loss=0.08735622093081474\n", - "Surface training t=8806, loss=0.054115382954478264\n", - "Surface training t=8807, loss=0.05083785019814968\n", - "Surface training t=8808, loss=0.047687262296676636\n", - "Surface training t=8809, loss=0.060092490166425705\n", - "Surface training t=8810, loss=0.05340146645903587\n", - "Surface training t=8811, loss=0.055058807134628296\n", - "Surface training t=8812, loss=0.06053851172327995\n", - "Surface training t=8813, loss=0.0437497403472662\n", - "Surface training t=8814, loss=0.06402922794222832\n", - "Surface training t=8815, loss=0.042841874063014984\n", - "Surface training t=8816, loss=0.05491653457283974\n", - "Surface training t=8817, loss=0.03816364333033562\n", - "Surface training t=8818, loss=0.037552485242486\n", - "Surface training t=8819, loss=0.030756663531064987\n", - "Surface training t=8820, loss=0.028098917566239834\n", - "Surface training t=8821, loss=0.040945637971162796\n", - "Surface training t=8822, loss=0.04188002645969391\n", - "Surface training t=8823, loss=0.056816233322024345\n", - "Surface training t=8824, loss=0.051932962611317635\n", - "Surface training t=8825, loss=0.08518504723906517\n", - "Surface training t=8826, loss=0.0576284546405077\n", - "Surface training t=8827, loss=0.04797864519059658\n", - "Surface training t=8828, loss=0.046034377068281174\n", - "Surface training t=8829, loss=0.03716541640460491\n", - "Surface training t=8830, loss=0.04314965382218361\n", - "Surface training t=8831, loss=0.05357117112725973\n", - "Surface training t=8832, loss=0.0542377233505249\n", - "Surface training t=8833, loss=0.04565661773085594\n", - "Surface training t=8834, loss=0.06404427252709866\n", - "Surface training t=8835, loss=0.04825129359960556\n", - "Surface training t=8836, loss=0.04805957153439522\n", - "Surface training t=8837, loss=0.03818697854876518\n", - "Surface training t=8838, loss=0.028053290210664272\n", - "Surface training t=8839, loss=0.046796765178442\n", - "Surface training t=8840, loss=0.03822582866996527\n", - "Surface training t=8841, loss=0.06063270941376686\n", - "Surface training t=8842, loss=0.05285369511693716\n", - "Surface training t=8843, loss=0.03407614305615425\n", - "Surface training t=8844, loss=0.03934296406805515\n", - "Surface training t=8845, loss=0.03967551700770855\n", - "Surface training t=8846, loss=0.03587019257247448\n", - "Surface training t=8847, loss=0.04162111505866051\n", - "Surface training t=8848, loss=0.02873521577566862\n", - "Surface training t=8849, loss=0.026779165491461754\n", - "Surface training t=8850, loss=0.028716477565467358\n", - "Surface training t=8851, loss=0.030229682102799416\n", - "Surface training t=8852, loss=0.0304533364251256\n", - "Surface training t=8853, loss=0.03885871544480324\n", - "Surface training t=8854, loss=0.03447168320417404\n", - "Surface training t=8855, loss=0.03677883464843035\n", - "Surface training t=8856, loss=0.043931061401963234\n", - "Surface training t=8857, loss=0.03310988750308752\n", - "Surface training t=8858, loss=0.029593439772725105\n", - "Surface training t=8859, loss=0.046034326776862144\n", - "Surface training t=8860, loss=0.04073456861078739\n", - "Surface training t=8861, loss=0.03391414135694504\n", - "Surface training t=8862, loss=0.0571452509611845\n", - "Surface training t=8863, loss=0.035937740467488766\n", - "Surface training t=8864, loss=0.05948411114513874\n", - "Surface training t=8865, loss=0.0429141316562891\n", - "Surface training t=8866, loss=0.039916057139635086\n", - "Surface training t=8867, loss=0.04088976979255676\n", - "Surface training t=8868, loss=0.036493049934506416\n", - "Surface training t=8869, loss=0.04222449101507664\n", - "Surface training t=8870, loss=0.04476003162562847\n", - "Surface training t=8871, loss=0.03886500187218189\n", - "Surface training t=8872, loss=0.03141728788614273\n", - "Surface training t=8873, loss=0.03510068543255329\n", - "Surface training t=8874, loss=0.031076669692993164\n", - "Surface training t=8875, loss=0.05239298939704895\n", - "Surface training t=8876, loss=0.038394853472709656\n", - "Surface training t=8877, loss=0.03301810659468174\n", - "Surface training t=8878, loss=0.03413145896047354\n", - "Surface training t=8879, loss=0.03344156872481108\n", - "Surface training t=8880, loss=0.04808161407709122\n", - "Surface training t=8881, loss=0.03963588923215866\n", - "Surface training t=8882, loss=0.04389322176575661\n", - "Surface training t=8883, loss=0.03962460346519947\n", - "Surface training t=8884, loss=0.03644478973001242\n", - "Surface training t=8885, loss=0.04544940963387489\n", - "Surface training t=8886, loss=0.05455924943089485\n", - "Surface training t=8887, loss=0.07144764065742493\n", - "Surface training t=8888, loss=0.057136958464980125\n", - "Surface training t=8889, loss=0.06246192008256912\n", - "Surface training t=8890, loss=0.08990675210952759\n", - "Surface training t=8891, loss=0.06848908960819244\n", - "Surface training t=8892, loss=0.05876639299094677\n", - "Surface training t=8893, loss=0.0625324547290802\n", - "Surface training t=8894, loss=0.04640280082821846\n", - "Surface training t=8895, loss=0.0404199231415987\n", - "Surface training t=8896, loss=0.04881717078387737\n", - "Surface training t=8897, loss=0.04505310766398907\n", - "Surface training t=8898, loss=0.043185457587242126\n", - "Surface training t=8899, loss=0.05604764074087143\n", - "Surface training t=8900, loss=0.044901760295033455\n", - "Surface training t=8901, loss=0.05474299378693104\n", - "Surface training t=8902, loss=0.032943896017968655\n", - "Surface training t=8903, loss=0.03191487304866314\n", - "Surface training t=8904, loss=0.029392301104962826\n", - "Surface training t=8905, loss=0.030700547620654106\n", - "Surface training t=8906, loss=0.030074849724769592\n", - "Surface training t=8907, loss=0.030575374141335487\n", - "Surface training t=8908, loss=0.028458578512072563\n", - "Surface training t=8909, loss=0.021866166964173317\n", - "Surface training t=8910, loss=0.03050656709820032\n", - "Surface training t=8911, loss=0.03456718847155571\n", - "Surface training t=8912, loss=0.03951781988143921\n", - "Surface training t=8913, loss=0.04173392802476883\n", - "Surface training t=8914, loss=0.046113098971545696\n", - "Surface training t=8915, loss=0.058815715834498405\n", - "Surface training t=8916, loss=0.06612926349043846\n", - "Surface training t=8917, loss=0.0831293836236\n", - "Surface training t=8918, loss=0.07065516710281372\n", - "Surface training t=8919, loss=0.04838740453124046\n", - "Surface training t=8920, loss=0.04787909984588623\n", - "Surface training t=8921, loss=0.04134342633187771\n", - "Surface training t=8922, loss=0.043442437425255775\n", - "Surface training t=8923, loss=0.06206730008125305\n", - "Surface training t=8924, loss=0.035973940044641495\n", - "Surface training t=8925, loss=0.04057471454143524\n", - "Surface training t=8926, loss=0.03838181495666504\n", - "Surface training t=8927, loss=0.03783265873789787\n", - "Surface training t=8928, loss=0.048112377524375916\n", - "Surface training t=8929, loss=0.034121936187148094\n", - "Surface training t=8930, loss=0.030019858852028847\n", - "Surface training t=8931, loss=0.029580501839518547\n", - "Surface training t=8932, loss=0.03441365621984005\n", - "Surface training t=8933, loss=0.037479303777217865\n", - "Surface training t=8934, loss=0.03270862624049187\n", - "Surface training t=8935, loss=0.03214342426508665\n", - "Surface training t=8936, loss=0.0372394397854805\n", - "Surface training t=8937, loss=0.02988292183727026\n", - "Surface training t=8938, loss=0.037693457677960396\n", - "Surface training t=8939, loss=0.0328481700271368\n", - "Surface training t=8940, loss=0.048864953219890594\n", - "Surface training t=8941, loss=0.041367772966623306\n", - "Surface training t=8942, loss=0.036646248772740364\n", - "Surface training t=8943, loss=0.04363502189517021\n", - "Surface training t=8944, loss=0.045190706849098206\n", - "Surface training t=8945, loss=0.035919543355703354\n", - "Surface training t=8946, loss=0.04402393102645874\n", - "Surface training t=8947, loss=0.03275059536099434\n", - "Surface training t=8948, loss=0.027905739843845367\n", - "Surface training t=8949, loss=0.03830169886350632\n", - "Surface training t=8950, loss=0.03127820324152708\n", - "Surface training t=8951, loss=0.027855477295815945\n", - "Surface training t=8952, loss=0.02529353555291891\n", - "Surface training t=8953, loss=0.025236062705516815\n", - "Surface training t=8954, loss=0.025244463235139847\n", - "Surface training t=8955, loss=0.031062197871506214\n", - "Surface training t=8956, loss=0.027699753642082214\n", - "Surface training t=8957, loss=0.0348152257502079\n", - "Surface training t=8958, loss=0.03342475742101669\n", - "Surface training t=8959, loss=0.032992416992783546\n", - "Surface training t=8960, loss=0.036759513430297375\n", - "Surface training t=8961, loss=0.06354088708758354\n", - "Surface training t=8962, loss=0.07081625051796436\n", - "Surface training t=8963, loss=0.03772066906094551\n", - "Surface training t=8964, loss=0.04189802147448063\n", - "Surface training t=8965, loss=0.049039142206311226\n", - "Surface training t=8966, loss=0.04630161635577679\n", - "Surface training t=8967, loss=0.043664176017045975\n", - "Surface training t=8968, loss=0.04488266259431839\n", - "Surface training t=8969, loss=0.04502543807029724\n", - "Surface training t=8970, loss=0.033645352348685265\n", - "Surface training t=8971, loss=0.036694468930363655\n", - "Surface training t=8972, loss=0.03540980443358421\n", - "Surface training t=8973, loss=0.02345788199454546\n", - "Surface training t=8974, loss=0.03962133079767227\n", - "Surface training t=8975, loss=0.03388152830302715\n", - "Surface training t=8976, loss=0.03181675914674997\n", - "Surface training t=8977, loss=0.047220274806022644\n", - "Surface training t=8978, loss=0.07876689359545708\n", - "Surface training t=8979, loss=0.03376643359661102\n", - "Surface training t=8980, loss=0.05381617695093155\n", - "Surface training t=8981, loss=0.03831835463643074\n", - "Surface training t=8982, loss=0.04595581814646721\n", - "Surface training t=8983, loss=0.037121161818504333\n", - "Surface training t=8984, loss=0.037405794486403465\n", - "Surface training t=8985, loss=0.027786731719970703\n", - "Surface training t=8986, loss=0.034312186762690544\n", - "Surface training t=8987, loss=0.037164054811000824\n", - "Surface training t=8988, loss=0.054625941440463066\n", - "Surface training t=8989, loss=0.06320977210998535\n", - "Surface training t=8990, loss=0.07178392820060253\n", - "Surface training t=8991, loss=0.06653273478150368\n", - "Surface training t=8992, loss=0.09005193784832954\n", - "Surface training t=8993, loss=0.07381825521588326\n", - "Surface training t=8994, loss=0.06228427402675152\n", - "Surface training t=8995, loss=0.06544915772974491\n", - "Surface training t=8996, loss=0.08557527884840965\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=8997, loss=0.06467096880078316\n", - "Surface training t=8998, loss=0.052940307185053825\n", - "Surface training t=8999, loss=0.03787241503596306\n", - "Surface training t=9000, loss=0.033488526940345764\n", - "Surface training t=9001, loss=0.03219687845557928\n", - "Surface training t=9002, loss=0.03747228439897299\n", - "Surface training t=9003, loss=0.03788110613822937\n", - "Surface training t=9004, loss=0.03870030306279659\n", - "Surface training t=9005, loss=0.041955405846238136\n", - "Surface training t=9006, loss=0.04036170430481434\n", - "Surface training t=9007, loss=0.040683429688215256\n", - "Surface training t=9008, loss=0.043087467551231384\n", - "Surface training t=9009, loss=0.04776549153029919\n", - "Surface training t=9010, loss=0.05009455792605877\n", - "Surface training t=9011, loss=0.03985644690692425\n", - "Surface training t=9012, loss=0.04667816869914532\n", - "Surface training t=9013, loss=0.04011625424027443\n", - "Surface training t=9014, loss=0.034620506688952446\n", - "Surface training t=9015, loss=0.02624970953911543\n", - "Surface training t=9016, loss=0.027836056426167488\n", - "Surface training t=9017, loss=0.037137105129659176\n", - "Surface training t=9018, loss=0.04319116473197937\n", - "Surface training t=9019, loss=0.03235566150397062\n", - "Surface training t=9020, loss=0.024478712119162083\n", - "Surface training t=9021, loss=0.02199179958552122\n", - "Surface training t=9022, loss=0.037324244156479836\n", - "Surface training t=9023, loss=0.03840330895036459\n", - "Surface training t=9024, loss=0.04401404783129692\n", - "Surface training t=9025, loss=0.048093406483531\n", - "Surface training t=9026, loss=0.03394410666078329\n", - "Surface training t=9027, loss=0.03786455560475588\n", - "Surface training t=9028, loss=0.032206080853939056\n", - "Surface training t=9029, loss=0.03784734383225441\n", - "Surface training t=9030, loss=0.032880269922316074\n", - "Surface training t=9031, loss=0.03269075229763985\n", - "Surface training t=9032, loss=0.037061724811792374\n", - "Surface training t=9033, loss=0.04015375021845102\n", - "Surface training t=9034, loss=0.051925303414464\n", - "Surface training t=9035, loss=0.04680037498474121\n", - "Surface training t=9036, loss=0.03883221745491028\n", - "Surface training t=9037, loss=0.04090317711234093\n", - "Surface training t=9038, loss=0.04098729137331247\n", - "Surface training t=9039, loss=0.038076212629675865\n", - "Surface training t=9040, loss=0.04655885323882103\n", - "Surface training t=9041, loss=0.05995256267488003\n", - "Surface training t=9042, loss=0.03851095587015152\n", - "Surface training t=9043, loss=0.04969829320907593\n", - "Surface training t=9044, loss=0.05582602135837078\n", - "Surface training t=9045, loss=0.043964930810034275\n", - "Surface training t=9046, loss=0.036029448732733727\n", - "Surface training t=9047, loss=0.03308268263936043\n", - "Surface training t=9048, loss=0.0335842277854681\n", - "Surface training t=9049, loss=0.0396036421880126\n", - "Surface training t=9050, loss=0.033639490604400635\n", - "Surface training t=9051, loss=0.031475591473281384\n", - "Surface training t=9052, loss=0.030819952487945557\n", - "Surface training t=9053, loss=0.03993488475680351\n", - "Surface training t=9054, loss=0.03514761105179787\n", - "Surface training t=9055, loss=0.04291854240000248\n", - "Surface training t=9056, loss=0.05935150943696499\n", - "Surface training t=9057, loss=0.0678061880171299\n", - "Surface training t=9058, loss=0.04943862184882164\n", - "Surface training t=9059, loss=0.04873432777822018\n", - "Surface training t=9060, loss=0.08518403396010399\n", - "Surface training t=9061, loss=0.06017633527517319\n", - "Surface training t=9062, loss=0.05508715845644474\n", - "Surface training t=9063, loss=0.05144286900758743\n", - "Surface training t=9064, loss=0.04486594721674919\n", - "Surface training t=9065, loss=0.04012720659375191\n", - "Surface training t=9066, loss=0.035870444029569626\n", - "Surface training t=9067, loss=0.03351537697017193\n", - "Surface training t=9068, loss=0.0307844216004014\n", - "Surface training t=9069, loss=0.040039319545030594\n", - "Surface training t=9070, loss=0.033331301994621754\n", - "Surface training t=9071, loss=0.030270908027887344\n", - "Surface training t=9072, loss=0.03880375251173973\n", - "Surface training t=9073, loss=0.02470590267330408\n", - "Surface training t=9074, loss=0.04070534557104111\n", - "Surface training t=9075, loss=0.03553789108991623\n", - "Surface training t=9076, loss=0.032645152881741524\n", - "Surface training t=9077, loss=0.030232827179133892\n", - "Surface training t=9078, loss=0.028690656647086143\n", - "Surface training t=9079, loss=0.03079486731439829\n", - "Surface training t=9080, loss=0.048640742897987366\n", - "Surface training t=9081, loss=0.038928452879190445\n", - "Surface training t=9082, loss=0.03324159886687994\n", - "Surface training t=9083, loss=0.04902801290154457\n", - "Surface training t=9084, loss=0.043975191190838814\n", - "Surface training t=9085, loss=0.037504689767956734\n", - "Surface training t=9086, loss=0.050688913092017174\n", - "Surface training t=9087, loss=0.045283494517207146\n", - "Surface training t=9088, loss=0.027833708561956882\n", - "Surface training t=9089, loss=0.04413316398859024\n", - "Surface training t=9090, loss=0.04087117128074169\n", - "Surface training t=9091, loss=0.028810993768274784\n", - "Surface training t=9092, loss=0.03670782409608364\n", - "Surface training t=9093, loss=0.0316113093867898\n", - "Surface training t=9094, loss=0.03291160985827446\n", - "Surface training t=9095, loss=0.02771972119808197\n", - "Surface training t=9096, loss=0.03679269179701805\n", - "Surface training t=9097, loss=0.034975032322108746\n", - "Surface training t=9098, loss=0.03007899969816208\n", - "Surface training t=9099, loss=0.034401699900627136\n", - "Surface training t=9100, loss=0.035838592797517776\n", - "Surface training t=9101, loss=0.061604879796504974\n", - "Surface training t=9102, loss=0.05947299860417843\n", - "Surface training t=9103, loss=0.032221716828644276\n", - "Surface training t=9104, loss=0.030785735696554184\n", - "Surface training t=9105, loss=0.05586876720190048\n", - "Surface training t=9106, loss=0.05331740528345108\n", - "Surface training t=9107, loss=0.049060309305787086\n", - "Surface training t=9108, loss=0.03260068502277136\n", - "Surface training t=9109, loss=0.02880630549043417\n", - "Surface training t=9110, loss=0.03592299297451973\n", - "Surface training t=9111, loss=0.038310504518449306\n", - "Surface training t=9112, loss=0.048649825155735016\n", - "Surface training t=9113, loss=0.05243545025587082\n", - "Surface training t=9114, loss=0.045891473069787025\n", - "Surface training t=9115, loss=0.03487679548561573\n", - "Surface training t=9116, loss=0.043696463108062744\n", - "Surface training t=9117, loss=0.029888205230236053\n", - "Surface training t=9118, loss=0.022275852970778942\n", - "Surface training t=9119, loss=0.029515642672777176\n", - "Surface training t=9120, loss=0.025464624166488647\n", - "Surface training t=9121, loss=0.028793826699256897\n", - "Surface training t=9122, loss=0.032421061769127846\n", - "Surface training t=9123, loss=0.03389350324869156\n", - "Surface training t=9124, loss=0.03659271448850632\n", - "Surface training t=9125, loss=0.04011462815105915\n", - "Surface training t=9126, loss=0.03899003192782402\n", - "Surface training t=9127, loss=0.06606147810816765\n", - "Surface training t=9128, loss=0.05050034448504448\n", - "Surface training t=9129, loss=0.04190464783459902\n", - "Surface training t=9130, loss=0.039187937043607235\n", - "Surface training t=9131, loss=0.058000970631837845\n", - "Surface training t=9132, loss=0.06189853325486183\n", - "Surface training t=9133, loss=0.09725512191653252\n", - "Surface training t=9134, loss=0.07205785252153873\n", - "Surface training t=9135, loss=0.10755034908652306\n", - "Surface training t=9136, loss=0.09898222237825394\n", - "Surface training t=9137, loss=0.06438913196325302\n", - "Surface training t=9138, loss=0.08461014181375504\n", - "Surface training t=9139, loss=0.06807370483875275\n", - "Surface training t=9140, loss=0.04372835345566273\n", - "Surface training t=9141, loss=0.041680989786982536\n", - "Surface training t=9142, loss=0.04135672375559807\n", - "Surface training t=9143, loss=0.03843805566430092\n", - "Surface training t=9144, loss=0.046891674399375916\n", - "Surface training t=9145, loss=0.04844535514712334\n", - "Surface training t=9146, loss=0.05069698952138424\n", - "Surface training t=9147, loss=0.04858116805553436\n", - "Surface training t=9148, loss=0.045431774109601974\n", - "Surface training t=9149, loss=0.04343756474554539\n", - "Surface training t=9150, loss=0.047162991017103195\n", - "Surface training t=9151, loss=0.07286317646503448\n", - "Surface training t=9152, loss=0.061069357208907604\n", - "Surface training t=9153, loss=0.07063106447458267\n", - "Surface training t=9154, loss=0.09271491691470146\n", - "Surface training t=9155, loss=0.05489208549261093\n", - "Surface training t=9156, loss=0.05059465207159519\n", - "Surface training t=9157, loss=0.048085350543260574\n", - "Surface training t=9158, loss=0.042243842035532\n", - "Surface training t=9159, loss=0.03600376658141613\n", - "Surface training t=9160, loss=0.04201754741370678\n", - "Surface training t=9161, loss=0.04689886420965195\n", - "Surface training t=9162, loss=0.03893079794943333\n", - "Surface training t=9163, loss=0.04642428457736969\n", - "Surface training t=9164, loss=0.039806803688406944\n", - "Surface training t=9165, loss=0.05240930803120136\n", - "Surface training t=9166, loss=0.061670588329434395\n", - "Surface training t=9167, loss=0.043578751385211945\n", - "Surface training t=9168, loss=0.049137845635414124\n", - "Surface training t=9169, loss=0.04069693014025688\n", - "Surface training t=9170, loss=0.05000240541994572\n", - "Surface training t=9171, loss=0.061302945017814636\n", - "Surface training t=9172, loss=0.053535765036940575\n", - "Surface training t=9173, loss=0.055536188185214996\n", - "Surface training t=9174, loss=0.04812537878751755\n", - "Surface training t=9175, loss=0.04369666613638401\n", - "Surface training t=9176, loss=0.05515441298484802\n", - "Surface training t=9177, loss=0.055086007341742516\n", - "Surface training t=9178, loss=0.07103319838643074\n", - "Surface training t=9179, loss=0.05230259150266647\n", - "Surface training t=9180, loss=0.041496869176626205\n", - "Surface training t=9181, loss=0.0507239643484354\n", - "Surface training t=9182, loss=0.06346852332353592\n", - "Surface training t=9183, loss=0.044052327051758766\n", - "Surface training t=9184, loss=0.04730119742453098\n", - "Surface training t=9185, loss=0.04848405718803406\n", - "Surface training t=9186, loss=0.049750762060284615\n", - "Surface training t=9187, loss=0.05345262587070465\n", - "Surface training t=9188, loss=0.0514921173453331\n", - "Surface training t=9189, loss=0.053960489109158516\n", - "Surface training t=9190, loss=0.05776619166135788\n", - "Surface training t=9191, loss=0.05711659975349903\n", - "Surface training t=9192, loss=0.05057850666344166\n", - "Surface training t=9193, loss=0.0425436869263649\n", - "Surface training t=9194, loss=0.0471696462482214\n", - "Surface training t=9195, loss=0.04948009178042412\n", - "Surface training t=9196, loss=0.039148999378085136\n", - "Surface training t=9197, loss=0.03730692155659199\n", - "Surface training t=9198, loss=0.035875122994184494\n", - "Surface training t=9199, loss=0.03927031718194485\n", - "Surface training t=9200, loss=0.03733363375067711\n", - "Surface training t=9201, loss=0.04518562741577625\n", - "Surface training t=9202, loss=0.04325328394770622\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=9203, loss=0.036486171185970306\n", - "Surface training t=9204, loss=0.043193329125642776\n", - "Surface training t=9205, loss=0.0495010893791914\n", - "Surface training t=9206, loss=0.06461868435144424\n", - "Surface training t=9207, loss=0.06637059524655342\n", - "Surface training t=9208, loss=0.07368624582886696\n", - "Surface training t=9209, loss=0.0578827690333128\n", - "Surface training t=9210, loss=0.07283424213528633\n", - "Surface training t=9211, loss=0.06037662923336029\n", - "Surface training t=9212, loss=0.053349947556853294\n", - "Surface training t=9213, loss=0.042936135083436966\n", - "Surface training t=9214, loss=0.04430665820837021\n", - "Surface training t=9215, loss=0.04387260787189007\n", - "Surface training t=9216, loss=0.04928137548267841\n", - "Surface training t=9217, loss=0.03875306900590658\n", - "Surface training t=9218, loss=0.037214649841189384\n", - "Surface training t=9219, loss=0.033042967319488525\n", - "Surface training t=9220, loss=0.03553689271211624\n", - "Surface training t=9221, loss=0.030580446124076843\n", - "Surface training t=9222, loss=0.026368238031864166\n", - "Surface training t=9223, loss=0.033941869623959064\n", - "Surface training t=9224, loss=0.03341324161738157\n", - "Surface training t=9225, loss=0.03479900863021612\n", - "Surface training t=9226, loss=0.02664579451084137\n", - "Surface training t=9227, loss=0.04339221864938736\n", - "Surface training t=9228, loss=0.03058135975152254\n", - "Surface training t=9229, loss=0.035566315054893494\n", - "Surface training t=9230, loss=0.03536365460604429\n", - "Surface training t=9231, loss=0.028649779967963696\n", - "Surface training t=9232, loss=0.023112209513783455\n", - "Surface training t=9233, loss=0.03189937025308609\n", - "Surface training t=9234, loss=0.03633125126361847\n", - "Surface training t=9235, loss=0.03191683441400528\n", - "Surface training t=9236, loss=0.036245932802557945\n", - "Surface training t=9237, loss=0.029574118554592133\n", - "Surface training t=9238, loss=0.051368676126003265\n", - "Surface training t=9239, loss=0.058016423135995865\n", - "Surface training t=9240, loss=0.033852278254926205\n", - "Surface training t=9241, loss=0.04696212150156498\n", - "Surface training t=9242, loss=0.024284101091325283\n", - "Surface training t=9243, loss=0.023755304515361786\n", - "Surface training t=9244, loss=0.026947504840791225\n", - "Surface training t=9245, loss=0.02340209297835827\n", - "Surface training t=9246, loss=0.0321287726983428\n", - "Surface training t=9247, loss=0.032571932300925255\n", - "Surface training t=9248, loss=0.026376718655228615\n", - "Surface training t=9249, loss=0.036671316251158714\n", - "Surface training t=9250, loss=0.03450730349868536\n", - "Surface training t=9251, loss=0.04924037307500839\n", - "Surface training t=9252, loss=0.03618144616484642\n", - "Surface training t=9253, loss=0.02790053840726614\n", - "Surface training t=9254, loss=0.03411268349736929\n", - "Surface training t=9255, loss=0.05477692373096943\n", - "Surface training t=9256, loss=0.03706091735512018\n", - "Surface training t=9257, loss=0.06517073884606361\n", - "Surface training t=9258, loss=0.046241129748523235\n", - "Surface training t=9259, loss=0.06055423431098461\n", - "Surface training t=9260, loss=0.06109444797039032\n", - "Surface training t=9261, loss=0.08821826055645943\n", - "Surface training t=9262, loss=0.06656152941286564\n", - "Surface training t=9263, loss=0.05651385337114334\n", - "Surface training t=9264, loss=0.04580812156200409\n", - "Surface training t=9265, loss=0.08833030238747597\n", - "Surface training t=9266, loss=0.06445923261344433\n", - "Surface training t=9267, loss=0.05831065587699413\n", - "Surface training t=9268, loss=0.06951471045613289\n", - "Surface training t=9269, loss=0.05197586305439472\n", - "Surface training t=9270, loss=0.06287333182990551\n", - "Surface training t=9271, loss=0.04303093999624252\n", - "Surface training t=9272, loss=0.04496546648442745\n", - "Surface training t=9273, loss=0.0681769885122776\n", - "Surface training t=9274, loss=0.06853461265563965\n", - "Surface training t=9275, loss=0.036360934376716614\n", - "Surface training t=9276, loss=0.049119507893919945\n", - "Surface training t=9277, loss=0.04459652863442898\n", - "Surface training t=9278, loss=0.03426731750369072\n", - "Surface training t=9279, loss=0.033871859312057495\n", - "Surface training t=9280, loss=0.035764046013355255\n", - "Surface training t=9281, loss=0.03891127835959196\n", - "Surface training t=9282, loss=0.04800350032746792\n", - "Surface training t=9283, loss=0.03781034052371979\n", - "Surface training t=9284, loss=0.032927073538303375\n", - "Surface training t=9285, loss=0.025702700950205326\n", - "Surface training t=9286, loss=0.042440904304385185\n", - "Surface training t=9287, loss=0.03745051100850105\n", - "Surface training t=9288, loss=0.04160894453525543\n", - "Surface training t=9289, loss=0.03236892260611057\n", - "Surface training t=9290, loss=0.050581714138388634\n", - "Surface training t=9291, loss=0.03153417631983757\n", - "Surface training t=9292, loss=0.033091310411691666\n", - "Surface training t=9293, loss=0.0408382173627615\n", - "Surface training t=9294, loss=0.022974719759076834\n", - "Surface training t=9295, loss=0.03065919317305088\n", - "Surface training t=9296, loss=0.03382048849016428\n", - "Surface training t=9297, loss=0.026415525935590267\n", - "Surface training t=9298, loss=0.027530022896826267\n", - "Surface training t=9299, loss=0.027932923287153244\n", - "Surface training t=9300, loss=0.022344586439430714\n", - "Surface training t=9301, loss=0.02037116326391697\n", - "Surface training t=9302, loss=0.028173301368951797\n", - "Surface training t=9303, loss=0.028346607461571693\n", - "Surface training t=9304, loss=0.03953727148473263\n", - "Surface training t=9305, loss=0.056415801867842674\n", - "Surface training t=9306, loss=0.03804071061313152\n", - "Surface training t=9307, loss=0.0359942764043808\n", - "Surface training t=9308, loss=0.041051821783185005\n", - "Surface training t=9309, loss=0.02851182222366333\n", - "Surface training t=9310, loss=0.028635852970182896\n", - "Surface training t=9311, loss=0.04207841865718365\n", - "Surface training t=9312, loss=0.029267544858157635\n", - "Surface training t=9313, loss=0.0417544674128294\n", - "Surface training t=9314, loss=0.03900420293211937\n", - "Surface training t=9315, loss=0.04095614701509476\n", - "Surface training t=9316, loss=0.05611594766378403\n", - "Surface training t=9317, loss=0.03738176077604294\n", - "Surface training t=9318, loss=0.04093070141971111\n", - "Surface training t=9319, loss=0.058572057634592056\n", - "Surface training t=9320, loss=0.049497995525598526\n", - "Surface training t=9321, loss=0.05292282812297344\n", - "Surface training t=9322, loss=0.04844074137508869\n", - "Surface training t=9323, loss=0.044662196189165115\n", - "Surface training t=9324, loss=0.03835511952638626\n", - "Surface training t=9325, loss=0.045769838616251945\n", - "Surface training t=9326, loss=0.03543853014707565\n", - "Surface training t=9327, loss=0.029756435193121433\n", - "Surface training t=9328, loss=0.0448266975581646\n", - "Surface training t=9329, loss=0.03058804664760828\n", - "Surface training t=9330, loss=0.036338645964860916\n", - "Surface training t=9331, loss=0.027110785245895386\n", - "Surface training t=9332, loss=0.03752036206424236\n", - "Surface training t=9333, loss=0.03163269255310297\n", - "Surface training t=9334, loss=0.031978134997189045\n", - "Surface training t=9335, loss=0.029983241111040115\n", - "Surface training t=9336, loss=0.028607944026589394\n", - "Surface training t=9337, loss=0.0293679628521204\n", - "Surface training t=9338, loss=0.029833978973329067\n", - "Surface training t=9339, loss=0.035976434126496315\n", - "Surface training t=9340, loss=0.03745867591351271\n", - "Surface training t=9341, loss=0.030528325587511063\n", - "Surface training t=9342, loss=0.052003320306539536\n", - "Surface training t=9343, loss=0.05012279190123081\n", - "Surface training t=9344, loss=0.08301601931452751\n", - "Surface training t=9345, loss=0.055719008669257164\n", - "Surface training t=9346, loss=0.07074682414531708\n", - "Surface training t=9347, loss=0.04139501228928566\n", - "Surface training t=9348, loss=0.05826233699917793\n", - "Surface training t=9349, loss=0.03931807354092598\n", - "Surface training t=9350, loss=0.03669351525604725\n", - "Surface training t=9351, loss=0.04534534923732281\n", - "Surface training t=9352, loss=0.033753467723727226\n", - "Surface training t=9353, loss=0.037061186507344246\n", - "Surface training t=9354, loss=0.0319995628669858\n", - "Surface training t=9355, loss=0.02784980833530426\n", - "Surface training t=9356, loss=0.028487990610301495\n", - "Surface training t=9357, loss=0.03995837178081274\n", - "Surface training t=9358, loss=0.032226323150098324\n", - "Surface training t=9359, loss=0.03085580561310053\n", - "Surface training t=9360, loss=0.03133447282016277\n", - "Surface training t=9361, loss=0.03140360862016678\n", - "Surface training t=9362, loss=0.043445490300655365\n", - "Surface training t=9363, loss=0.04066374897956848\n", - "Surface training t=9364, loss=0.05714358203113079\n", - "Surface training t=9365, loss=0.03261050581932068\n", - "Surface training t=9366, loss=0.03230946138501167\n", - "Surface training t=9367, loss=0.035324775613844395\n", - "Surface training t=9368, loss=0.030765896663069725\n", - "Surface training t=9369, loss=0.02001866325736046\n", - "Surface training t=9370, loss=0.018902079202234745\n", - "Surface training t=9371, loss=0.02757173404097557\n", - "Surface training t=9372, loss=0.02584206685423851\n", - "Surface training t=9373, loss=0.03330542426556349\n", - "Surface training t=9374, loss=0.028163423761725426\n", - "Surface training t=9375, loss=0.04604594595730305\n", - "Surface training t=9376, loss=0.05092925950884819\n", - "Surface training t=9377, loss=0.04513335041701794\n", - "Surface training t=9378, loss=0.04820854403078556\n", - "Surface training t=9379, loss=0.05299373157322407\n", - "Surface training t=9380, loss=0.04473670572042465\n", - "Surface training t=9381, loss=0.03805047832429409\n", - "Surface training t=9382, loss=0.04613860510289669\n", - "Surface training t=9383, loss=0.037142981775105\n", - "Surface training t=9384, loss=0.06951829232275486\n", - "Surface training t=9385, loss=0.0453794002532959\n", - "Surface training t=9386, loss=0.06345526315271854\n", - "Surface training t=9387, loss=0.042913876473903656\n", - "Surface training t=9388, loss=0.06635692715644836\n", - "Surface training t=9389, loss=0.04261302016675472\n", - "Surface training t=9390, loss=0.04871981404721737\n", - "Surface training t=9391, loss=0.03719557449221611\n", - "Surface training t=9392, loss=0.03875715937465429\n", - "Surface training t=9393, loss=0.03954089619219303\n", - "Surface training t=9394, loss=0.038342349231243134\n", - "Surface training t=9395, loss=0.05606582760810852\n", - "Surface training t=9396, loss=0.06858978793025017\n", - "Surface training t=9397, loss=0.037738146260380745\n", - "Surface training t=9398, loss=0.04273421689867973\n", - "Surface training t=9399, loss=0.04124572314321995\n", - "Surface training t=9400, loss=0.03920966573059559\n", - "Surface training t=9401, loss=0.034065366722643375\n", - "Surface training t=9402, loss=0.04092341288924217\n", - "Surface training t=9403, loss=0.050320954993367195\n", - "Surface training t=9404, loss=0.040520137175917625\n", - "Surface training t=9405, loss=0.043500762432813644\n", - "Surface training t=9406, loss=0.029458075761795044\n", - "Surface training t=9407, loss=0.03322787210345268\n", - "Surface training t=9408, loss=0.029567484743893147\n", - "Surface training t=9409, loss=0.03264963999390602\n", - "Surface training t=9410, loss=0.026248817332088947\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=9411, loss=0.04531131125986576\n", - "Surface training t=9412, loss=0.040717482566833496\n", - "Surface training t=9413, loss=0.039907658472657204\n", - "Surface training t=9414, loss=0.034401108510792255\n", - "Surface training t=9415, loss=0.03297878336161375\n", - "Surface training t=9416, loss=0.03137690760195255\n", - "Surface training t=9417, loss=0.05117006041109562\n", - "Surface training t=9418, loss=0.039555517956614494\n", - "Surface training t=9419, loss=0.036402941681444645\n", - "Surface training t=9420, loss=0.04861992411315441\n", - "Surface training t=9421, loss=0.03445363789796829\n", - "Surface training t=9422, loss=0.052827904000878334\n", - "Surface training t=9423, loss=0.03459468297660351\n", - "Surface training t=9424, loss=0.03765659686177969\n", - "Surface training t=9425, loss=0.046241870149970055\n", - "Surface training t=9426, loss=0.03740713093429804\n", - "Surface training t=9427, loss=0.045330965891480446\n", - "Surface training t=9428, loss=0.05775504559278488\n", - "Surface training t=9429, loss=0.03484032861888409\n", - "Surface training t=9430, loss=0.044250862672924995\n", - "Surface training t=9431, loss=0.028882614336907864\n", - "Surface training t=9432, loss=0.03816020768135786\n", - "Surface training t=9433, loss=0.05114907957613468\n", - "Surface training t=9434, loss=0.05488325096666813\n", - "Surface training t=9435, loss=0.049530964344739914\n", - "Surface training t=9436, loss=0.052753522992134094\n", - "Surface training t=9437, loss=0.04983909800648689\n", - "Surface training t=9438, loss=0.05326547473669052\n", - "Surface training t=9439, loss=0.05096021853387356\n", - "Surface training t=9440, loss=0.0358355138450861\n", - "Surface training t=9441, loss=0.0524157527834177\n", - "Surface training t=9442, loss=0.04549489915370941\n", - "Surface training t=9443, loss=0.04544457979500294\n", - "Surface training t=9444, loss=0.0357059296220541\n", - "Surface training t=9445, loss=0.041278207674622536\n", - "Surface training t=9446, loss=0.0339883454144001\n", - "Surface training t=9447, loss=0.03597686253488064\n", - "Surface training t=9448, loss=0.035731883719563484\n", - "Surface training t=9449, loss=0.03898325189948082\n", - "Surface training t=9450, loss=0.04403608478605747\n", - "Surface training t=9451, loss=0.04800115153193474\n", - "Surface training t=9452, loss=0.03999685309827328\n", - "Surface training t=9453, loss=0.04132380895316601\n", - "Surface training t=9454, loss=0.03684141859412193\n", - "Surface training t=9455, loss=0.037521399557590485\n", - "Surface training t=9456, loss=0.047760797664523125\n", - "Surface training t=9457, loss=0.0471021942794323\n", - "Surface training t=9458, loss=0.0420520044863224\n", - "Surface training t=9459, loss=0.029831314459443092\n", - "Surface training t=9460, loss=0.027477889321744442\n", - "Surface training t=9461, loss=0.055155303329229355\n", - "Surface training t=9462, loss=0.03407440148293972\n", - "Surface training t=9463, loss=0.049710698425769806\n", - "Surface training t=9464, loss=0.0561954602599144\n", - "Surface training t=9465, loss=0.07604735903441906\n", - "Surface training t=9466, loss=0.049185339361429214\n", - "Surface training t=9467, loss=0.05046398192644119\n", - "Surface training t=9468, loss=0.04254394583404064\n", - "Surface training t=9469, loss=0.043941982090473175\n", - "Surface training t=9470, loss=0.0347881093621254\n", - "Surface training t=9471, loss=0.03149825241416693\n", - "Surface training t=9472, loss=0.03080001100897789\n", - "Surface training t=9473, loss=0.051015716046094894\n", - "Surface training t=9474, loss=0.04858156852424145\n", - "Surface training t=9475, loss=0.05505391024053097\n", - "Surface training t=9476, loss=0.03902131970971823\n", - "Surface training t=9477, loss=0.06572695076465607\n", - "Surface training t=9478, loss=0.04334241338074207\n", - "Surface training t=9479, loss=0.055795131251215935\n", - "Surface training t=9480, loss=0.03108187858015299\n", - "Surface training t=9481, loss=0.03228906821459532\n", - "Surface training t=9482, loss=0.029160006903111935\n", - "Surface training t=9483, loss=0.029204286634922028\n", - "Surface training t=9484, loss=0.03280270006507635\n", - "Surface training t=9485, loss=0.03810999169945717\n", - "Surface training t=9486, loss=0.04148893617093563\n", - "Surface training t=9487, loss=0.03385917190462351\n", - "Surface training t=9488, loss=0.03500455059111118\n", - "Surface training t=9489, loss=0.033200533129274845\n", - "Surface training t=9490, loss=0.0463880468159914\n", - "Surface training t=9491, loss=0.03972793556749821\n", - "Surface training t=9492, loss=0.034790217876434326\n", - "Surface training t=9493, loss=0.034410638734698296\n", - "Surface training t=9494, loss=0.041108179837465286\n", - "Surface training t=9495, loss=0.05083504505455494\n", - "Surface training t=9496, loss=0.08036389201879501\n", - "Surface training t=9497, loss=0.057935746386647224\n", - "Surface training t=9498, loss=0.07737915404140949\n", - "Surface training t=9499, loss=0.10892084240913391\n", - "Surface training t=9500, loss=0.0636223703622818\n", - "Surface training t=9501, loss=0.07314918003976345\n", - "Surface training t=9502, loss=0.051253655925393105\n", - "Surface training t=9503, loss=0.04204200580716133\n", - "Surface training t=9504, loss=0.03301149234175682\n", - "Surface training t=9505, loss=0.039969660341739655\n", - "Surface training t=9506, loss=0.03747519478201866\n", - "Surface training t=9507, loss=0.026881759986281395\n", - "Surface training t=9508, loss=0.04110708460211754\n", - "Surface training t=9509, loss=0.036814162507653236\n", - "Surface training t=9510, loss=0.04259492829442024\n", - "Surface training t=9511, loss=0.046477245166897774\n", - "Surface training t=9512, loss=0.055951036512851715\n", - "Surface training t=9513, loss=0.035708075389266014\n", - "Surface training t=9514, loss=0.05418341048061848\n", - "Surface training t=9515, loss=0.0514382217079401\n", - "Surface training t=9516, loss=0.05224362947046757\n", - "Surface training t=9517, loss=0.03968128003180027\n", - "Surface training t=9518, loss=0.05413299240171909\n", - "Surface training t=9519, loss=0.0551674272865057\n", - "Surface training t=9520, loss=0.05456366017460823\n", - "Surface training t=9521, loss=0.06000149995088577\n", - "Surface training t=9522, loss=0.04813349526375532\n", - "Surface training t=9523, loss=0.05920427292585373\n", - "Surface training t=9524, loss=0.06854667142033577\n", - "Surface training t=9525, loss=0.04329613596200943\n", - "Surface training t=9526, loss=0.046027932316064835\n", - "Surface training t=9527, loss=0.041057026013731956\n", - "Surface training t=9528, loss=0.04484689235687256\n", - "Surface training t=9529, loss=0.04337307345122099\n", - "Surface training t=9530, loss=0.052431872114539146\n", - "Surface training t=9531, loss=0.04126713238656521\n", - "Surface training t=9532, loss=0.034216709434986115\n", - "Surface training t=9533, loss=0.037354566156864166\n", - "Surface training t=9534, loss=0.03213785961270332\n", - "Surface training t=9535, loss=0.03480968717485666\n", - "Surface training t=9536, loss=0.029013676568865776\n", - "Surface training t=9537, loss=0.020104320254176855\n", - "Surface training t=9538, loss=0.025962729938328266\n", - "Surface training t=9539, loss=0.02905179839581251\n", - "Surface training t=9540, loss=0.027658787555992603\n", - "Surface training t=9541, loss=0.02678409032523632\n", - "Surface training t=9542, loss=0.04070964828133583\n", - "Surface training t=9543, loss=0.03279068320989609\n", - "Surface training t=9544, loss=0.028811958618462086\n", - "Surface training t=9545, loss=0.02688753604888916\n", - "Surface training t=9546, loss=0.027422786690294743\n", - "Surface training t=9547, loss=0.02712935209274292\n", - "Surface training t=9548, loss=0.03227257076650858\n", - "Surface training t=9549, loss=0.033889747224748135\n", - "Surface training t=9550, loss=0.044934485107660294\n", - "Surface training t=9551, loss=0.030478467233479023\n", - "Surface training t=9552, loss=0.03049886878579855\n", - "Surface training t=9553, loss=0.03997766971588135\n", - "Surface training t=9554, loss=0.032717084512114525\n", - "Surface training t=9555, loss=0.031025304459035397\n", - "Surface training t=9556, loss=0.04944908432662487\n", - "Surface training t=9557, loss=0.03520946949720383\n", - "Surface training t=9558, loss=0.03867669869214296\n", - "Surface training t=9559, loss=0.05273156613111496\n", - "Surface training t=9560, loss=0.035710349678993225\n", - "Surface training t=9561, loss=0.05747068673372269\n", - "Surface training t=9562, loss=0.049215707927942276\n", - "Surface training t=9563, loss=0.03515752870589495\n", - "Surface training t=9564, loss=0.05236336961388588\n", - "Surface training t=9565, loss=0.04030350036919117\n", - "Surface training t=9566, loss=0.04783260449767113\n", - "Surface training t=9567, loss=0.03758461866527796\n", - "Surface training t=9568, loss=0.044380079954862595\n", - "Surface training t=9569, loss=0.04594072513282299\n", - "Surface training t=9570, loss=0.07116217724978924\n", - "Surface training t=9571, loss=0.06522830575704575\n", - "Surface training t=9572, loss=0.05965801887214184\n", - "Surface training t=9573, loss=0.050160931423306465\n", - "Surface training t=9574, loss=0.04306916892528534\n", - "Surface training t=9575, loss=0.045991742983460426\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=9576, loss=0.061697859317064285\n", - "Surface training t=9577, loss=0.050147656351327896\n", - "Surface training t=9578, loss=0.04457305930554867\n", - "Surface training t=9579, loss=0.040787141770124435\n", - "Surface training t=9580, loss=0.05068940483033657\n", - "Surface training t=9581, loss=0.03779012709856033\n", - "Surface training t=9582, loss=0.04037729650735855\n", - "Surface training t=9583, loss=0.03928259201347828\n", - "Surface training t=9584, loss=0.029398612678050995\n", - "Surface training t=9585, loss=0.021959098055958748\n", - "Surface training t=9586, loss=0.027613315731287003\n", - "Surface training t=9587, loss=0.03114286530762911\n", - "Surface training t=9588, loss=0.029583580791950226\n", - "Surface training t=9589, loss=0.02349835354834795\n", - "Surface training t=9590, loss=0.023457981646060944\n", - "Surface training t=9591, loss=0.029532795771956444\n", - "Surface training t=9592, loss=0.03228394128382206\n", - "Surface training t=9593, loss=0.05022148974239826\n", - "Surface training t=9594, loss=0.08519252017140388\n", - "Surface training t=9595, loss=0.05715499445796013\n", - "Surface training t=9596, loss=0.05444946512579918\n", - "Surface training t=9597, loss=0.04275214672088623\n", - "Surface training t=9598, loss=0.038327381014823914\n", - "Surface training t=9599, loss=0.043680014088749886\n", - "Surface training t=9600, loss=0.04741479456424713\n", - "Surface training t=9601, loss=0.051451221108436584\n", - "Surface training t=9602, loss=0.050843991339206696\n", - "Surface training t=9603, loss=0.05559903010725975\n", - "Surface training t=9604, loss=0.07972249761223793\n", - "Surface training t=9605, loss=0.05389361083507538\n", - "Surface training t=9606, loss=0.051591549068689346\n", - "Surface training t=9607, loss=0.06458930671215057\n", - "Surface training t=9608, loss=0.09324443340301514\n", - "Surface training t=9609, loss=0.05589481070637703\n", - "Surface training t=9610, loss=0.059662291780114174\n", - "Surface training t=9611, loss=0.08070912957191467\n", - "Surface training t=9612, loss=0.06537622399628162\n", - "Surface training t=9613, loss=0.1030251532793045\n", - "Surface training t=9614, loss=0.09773481264710426\n", - "Surface training t=9615, loss=0.05376869533210993\n", - "Surface training t=9616, loss=0.08494728431105614\n", - "Surface training t=9617, loss=0.07080794870853424\n", - "Surface training t=9618, loss=0.06408644653856754\n", - "Surface training t=9619, loss=0.09025547653436661\n", - "Surface training t=9620, loss=0.05096572823822498\n", - "Surface training t=9621, loss=0.055591631680727005\n", - "Surface training t=9622, loss=0.06853910908102989\n", - "Surface training t=9623, loss=0.07944243773818016\n", - "Surface training t=9624, loss=0.05409197881817818\n", - "Surface training t=9625, loss=0.06228103116154671\n", - "Surface training t=9626, loss=0.04511628299951553\n", - "Surface training t=9627, loss=0.04267706349492073\n", - "Surface training t=9628, loss=0.03981582075357437\n", - "Surface training t=9629, loss=0.0512133352458477\n", - "Surface training t=9630, loss=0.04367625713348389\n", - "Surface training t=9631, loss=0.040670245885849\n", - "Surface training t=9632, loss=0.036961184814572334\n", - "Surface training t=9633, loss=0.0328025259077549\n", - "Surface training t=9634, loss=0.024608075618743896\n", - "Surface training t=9635, loss=0.035683467984199524\n", - "Surface training t=9636, loss=0.04048912227153778\n", - "Surface training t=9637, loss=0.033353839069604874\n", - "Surface training t=9638, loss=0.029496613889932632\n", - "Surface training t=9639, loss=0.024243250489234924\n", - "Surface training t=9640, loss=0.0413667056709528\n", - "Surface training t=9641, loss=0.032224529422819614\n", - "Surface training t=9642, loss=0.06127243861556053\n", - "Surface training t=9643, loss=0.0427035391330719\n", - "Surface training t=9644, loss=0.04325283691287041\n", - "Surface training t=9645, loss=0.04072938673198223\n", - "Surface training t=9646, loss=0.039362452924251556\n", - "Surface training t=9647, loss=0.03235528990626335\n", - "Surface training t=9648, loss=0.03354926221072674\n", - "Surface training t=9649, loss=0.02771482802927494\n", - "Surface training t=9650, loss=0.03448040783405304\n", - "Surface training t=9651, loss=0.03549465723335743\n", - "Surface training t=9652, loss=0.027388805523514748\n", - "Surface training t=9653, loss=0.03026576153934002\n", - "Surface training t=9654, loss=0.05495193973183632\n", - "Surface training t=9655, loss=0.04096831753849983\n", - "Surface training t=9656, loss=0.04998972453176975\n", - "Surface training t=9657, loss=0.03820373211055994\n", - "Surface training t=9658, loss=0.04685964994132519\n", - "Surface training t=9659, loss=0.04174238629639149\n", - "Surface training t=9660, loss=0.048475438728928566\n", - "Surface training t=9661, loss=0.09136119484901428\n", - "Surface training t=9662, loss=0.04767816141247749\n", - "Surface training t=9663, loss=0.03835471346974373\n", - "Surface training t=9664, loss=0.03901979140937328\n", - "Surface training t=9665, loss=0.040290042757987976\n", - "Surface training t=9666, loss=0.04047124646604061\n", - "Surface training t=9667, loss=0.04652789793908596\n", - "Surface training t=9668, loss=0.05165460519492626\n", - "Surface training t=9669, loss=0.03693537972867489\n", - "Surface training t=9670, loss=0.03542547672986984\n", - "Surface training t=9671, loss=0.034970201551914215\n", - "Surface training t=9672, loss=0.03913707099854946\n", - "Surface training t=9673, loss=0.03581010177731514\n", - "Surface training t=9674, loss=0.058096855878829956\n", - "Surface training t=9675, loss=0.04551977664232254\n", - "Surface training t=9676, loss=0.04601316899061203\n", - "Surface training t=9677, loss=0.02752481773495674\n", - "Surface training t=9678, loss=0.028341968543827534\n", - "Surface training t=9679, loss=0.021652281284332275\n", - "Surface training t=9680, loss=0.028540683910250664\n", - "Surface training t=9681, loss=0.02807280421257019\n", - "Surface training t=9682, loss=0.026121693663299084\n", - "Surface training t=9683, loss=0.031979845836758614\n", - "Surface training t=9684, loss=0.02994145080447197\n", - "Surface training t=9685, loss=0.02723598200827837\n", - "Surface training t=9686, loss=0.032264893874526024\n", - "Surface training t=9687, loss=0.02331255003809929\n", - "Surface training t=9688, loss=0.027916375547647476\n", - "Surface training t=9689, loss=0.02188572846353054\n", - "Surface training t=9690, loss=0.028100019320845604\n", - "Surface training t=9691, loss=0.03550111688673496\n", - "Surface training t=9692, loss=0.03787989541888237\n", - "Surface training t=9693, loss=0.04034296702593565\n", - "Surface training t=9694, loss=0.042332058772444725\n", - "Surface training t=9695, loss=0.0580764040350914\n", - "Surface training t=9696, loss=0.040525149554014206\n", - "Surface training t=9697, loss=0.04776923172175884\n", - "Surface training t=9698, loss=0.04752679914236069\n", - "Surface training t=9699, loss=0.050335297361016273\n", - "Surface training t=9700, loss=0.04207035154104233\n", - "Surface training t=9701, loss=0.07913321629166603\n", - "Surface training t=9702, loss=0.05155370198190212\n", - "Surface training t=9703, loss=0.06481650471687317\n", - "Surface training t=9704, loss=0.05674724094569683\n", - "Surface training t=9705, loss=0.07580525428056717\n", - "Surface training t=9706, loss=0.04628270212560892\n", - "Surface training t=9707, loss=0.06056515872478485\n", - "Surface training t=9708, loss=0.04634849540889263\n", - "Surface training t=9709, loss=0.07873732224106789\n", - "Surface training t=9710, loss=0.04775333032011986\n", - "Surface training t=9711, loss=0.051997408270835876\n", - "Surface training t=9712, loss=0.04067164473235607\n", - "Surface training t=9713, loss=0.04588287137448788\n", - "Surface training t=9714, loss=0.03470141813158989\n", - "Surface training t=9715, loss=0.040035003796219826\n", - "Surface training t=9716, loss=0.03483313973993063\n", - "Surface training t=9717, loss=0.044902531430125237\n", - "Surface training t=9718, loss=0.05480106733739376\n", - "Surface training t=9719, loss=0.03802408929914236\n", - "Surface training t=9720, loss=0.033179871737957\n", - "Surface training t=9721, loss=0.03341903816908598\n", - "Surface training t=9722, loss=0.0360405994579196\n", - "Surface training t=9723, loss=0.032299199141561985\n", - "Surface training t=9724, loss=0.03686586953699589\n", - "Surface training t=9725, loss=0.0374327190220356\n", - "Surface training t=9726, loss=0.035309815779328346\n", - "Surface training t=9727, loss=0.03177106939256191\n", - "Surface training t=9728, loss=0.041969183832407\n", - "Surface training t=9729, loss=0.04386531189084053\n", - "Surface training t=9730, loss=0.046255506575107574\n", - "Surface training t=9731, loss=0.04588194936513901\n", - "Surface training t=9732, loss=0.041171811521053314\n", - "Surface training t=9733, loss=0.04797145538032055\n", - "Surface training t=9734, loss=0.05640877224504948\n", - "Surface training t=9735, loss=0.04319402202963829\n", - "Surface training t=9736, loss=0.046799033880233765\n", - "Surface training t=9737, loss=0.049804169684648514\n", - "Surface training t=9738, loss=0.043122146278619766\n", - "Surface training t=9739, loss=0.03397925663739443\n", - "Surface training t=9740, loss=0.03472306951880455\n", - "Surface training t=9741, loss=0.03452576603740454\n", - "Surface training t=9742, loss=0.025295660831034184\n", - "Surface training t=9743, loss=0.024610275402665138\n", - "Surface training t=9744, loss=0.01982066687196493\n", - "Surface training t=9745, loss=0.026164928451180458\n", - "Surface training t=9746, loss=0.021937541663646698\n", - "Surface training t=9747, loss=0.028930291533470154\n", - "Surface training t=9748, loss=0.041878306306898594\n", - "Surface training t=9749, loss=0.06223225221037865\n", - "Surface training t=9750, loss=0.050932345911860466\n", - "Surface training t=9751, loss=0.031629906967282295\n", - "Surface training t=9752, loss=0.04046468436717987\n", - "Surface training t=9753, loss=0.035214792005717754\n", - "Surface training t=9754, loss=0.03572136349976063\n", - "Surface training t=9755, loss=0.032507757656276226\n", - "Surface training t=9756, loss=0.025383979082107544\n", - "Surface training t=9757, loss=0.030492953956127167\n", - "Surface training t=9758, loss=0.028841997496783733\n", - "Surface training t=9759, loss=0.031140918843448162\n", - "Surface training t=9760, loss=0.025309761054813862\n", - "Surface training t=9761, loss=0.02563254628330469\n", - "Surface training t=9762, loss=0.02107082214206457\n", - "Surface training t=9763, loss=0.02259235642850399\n", - "Surface training t=9764, loss=0.03294987324625254\n", - "Surface training t=9765, loss=0.0250629847869277\n", - "Surface training t=9766, loss=0.02570464089512825\n", - "Surface training t=9767, loss=0.0316918920725584\n", - "Surface training t=9768, loss=0.0221020532771945\n", - "Surface training t=9769, loss=0.02888836245983839\n", - "Surface training t=9770, loss=0.025691034272313118\n", - "Surface training t=9771, loss=0.03012376930564642\n", - "Surface training t=9772, loss=0.033816058188676834\n", - "Surface training t=9773, loss=0.034473590552806854\n", - "Surface training t=9774, loss=0.03275670018047094\n", - "Surface training t=9775, loss=0.03478014189749956\n", - "Surface training t=9776, loss=0.04749942384660244\n", - "Surface training t=9777, loss=0.05500585585832596\n", - "Surface training t=9778, loss=0.05502730421721935\n", - "Surface training t=9779, loss=0.041955118998885155\n", - "Surface training t=9780, loss=0.06417053192853928\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=9781, loss=0.046845052391290665\n", - "Surface training t=9782, loss=0.05558740720152855\n", - "Surface training t=9783, loss=0.042470917105674744\n", - "Surface training t=9784, loss=0.04015364404767752\n", - "Surface training t=9785, loss=0.05125902406871319\n", - "Surface training t=9786, loss=0.03614345006644726\n", - "Surface training t=9787, loss=0.034554870799183846\n", - "Surface training t=9788, loss=0.03435451164841652\n", - "Surface training t=9789, loss=0.03089738730341196\n", - "Surface training t=9790, loss=0.031422413885593414\n", - "Surface training t=9791, loss=0.02586725540459156\n", - "Surface training t=9792, loss=0.027485317550599575\n", - "Surface training t=9793, loss=0.028855178505182266\n", - "Surface training t=9794, loss=0.025477871298789978\n", - "Surface training t=9795, loss=0.024517958983778954\n", - "Surface training t=9796, loss=0.03277725912630558\n", - "Surface training t=9797, loss=0.03995757922530174\n", - "Surface training t=9798, loss=0.02935598697513342\n", - "Surface training t=9799, loss=0.04120850935578346\n", - "Surface training t=9800, loss=0.032350036315619946\n", - "Surface training t=9801, loss=0.031068935990333557\n", - "Surface training t=9802, loss=0.029241779819130898\n", - "Surface training t=9803, loss=0.03024875931441784\n", - "Surface training t=9804, loss=0.0340093057602644\n", - "Surface training t=9805, loss=0.0254001310095191\n", - "Surface training t=9806, loss=0.026202308014035225\n", - "Surface training t=9807, loss=0.01988877821713686\n", - "Surface training t=9808, loss=0.019833034835755825\n", - "Surface training t=9809, loss=0.025605501607060432\n", - "Surface training t=9810, loss=0.02306707203388214\n", - "Surface training t=9811, loss=0.04053156264126301\n", - "Surface training t=9812, loss=0.04044343903660774\n", - "Surface training t=9813, loss=0.04054510220885277\n", - "Surface training t=9814, loss=0.03673507645726204\n", - "Surface training t=9815, loss=0.036589449271559715\n", - "Surface training t=9816, loss=0.04192679654806852\n", - "Surface training t=9817, loss=0.042464567348361015\n", - "Surface training t=9818, loss=0.04389127902686596\n", - "Surface training t=9819, loss=0.030117040500044823\n", - "Surface training t=9820, loss=0.029859211295843124\n", - "Surface training t=9821, loss=0.029658429324626923\n", - "Surface training t=9822, loss=0.03004886768758297\n", - "Surface training t=9823, loss=0.027326665818691254\n", - "Surface training t=9824, loss=0.03422069922089577\n", - "Surface training t=9825, loss=0.025610674172639847\n", - "Surface training t=9826, loss=0.04044313542544842\n", - "Surface training t=9827, loss=0.03133660461753607\n", - "Surface training t=9828, loss=0.037571960128843784\n", - "Surface training t=9829, loss=0.03089696727693081\n", - "Surface training t=9830, loss=0.019786283373832703\n", - "Surface training t=9831, loss=0.022971102967858315\n", - "Surface training t=9832, loss=0.02991261798888445\n", - "Surface training t=9833, loss=0.032658349722623825\n", - "Surface training t=9834, loss=0.03630463220179081\n", - "Surface training t=9835, loss=0.029939375817775726\n", - "Surface training t=9836, loss=0.04860666207969189\n", - "Surface training t=9837, loss=0.03785592317581177\n", - "Surface training t=9838, loss=0.06328899972140789\n", - "Surface training t=9839, loss=0.04503430984914303\n", - "Surface training t=9840, loss=0.06322731077671051\n", - "Surface training t=9841, loss=0.032554587349295616\n", - "Surface training t=9842, loss=0.03244688920676708\n", - "Surface training t=9843, loss=0.03422770835459232\n", - "Surface training t=9844, loss=0.025853966362774372\n", - "Surface training t=9845, loss=0.028578310273587704\n", - "Surface training t=9846, loss=0.03600117936730385\n", - "Surface training t=9847, loss=0.03025093674659729\n", - "Surface training t=9848, loss=0.027431972324848175\n", - "Surface training t=9849, loss=0.025682665407657623\n", - "Surface training t=9850, loss=0.02262546494603157\n", - "Surface training t=9851, loss=0.03106637392193079\n", - "Surface training t=9852, loss=0.025001823902130127\n", - "Surface training t=9853, loss=0.03303942456841469\n", - "Surface training t=9854, loss=0.038126105442643166\n", - "Surface training t=9855, loss=0.03684781491756439\n", - "Surface training t=9856, loss=0.050304166972637177\n", - "Surface training t=9857, loss=0.05062614940106869\n", - "Surface training t=9858, loss=0.06105678901076317\n", - "Surface training t=9859, loss=0.06497257575392723\n", - "Surface training t=9860, loss=0.061710599809885025\n", - "Surface training t=9861, loss=0.09231666848063469\n", - "Surface training t=9862, loss=0.09500177577137947\n", - "Surface training t=9863, loss=0.06341942586004734\n", - "Surface training t=9864, loss=0.07825279980897903\n", - "Surface training t=9865, loss=0.05198800191283226\n", - "Surface training t=9866, loss=0.06309714540839195\n", - "Surface training t=9867, loss=0.059427034109830856\n", - "Surface training t=9868, loss=0.062377069145441055\n", - "Surface training t=9869, loss=0.053951533511281013\n", - "Surface training t=9870, loss=0.06392696313560009\n", - "Surface training t=9871, loss=0.05181345343589783\n", - "Surface training t=9872, loss=0.06595994532108307\n", - "Surface training t=9873, loss=0.0426073158159852\n", - "Surface training t=9874, loss=0.05544991046190262\n", - "Surface training t=9875, loss=0.03630150482058525\n", - "Surface training t=9876, loss=0.05216534808278084\n", - "Surface training t=9877, loss=0.05957831256091595\n", - "Surface training t=9878, loss=0.07713223248720169\n", - "Surface training t=9879, loss=0.06256452761590481\n", - "Surface training t=9880, loss=0.06471492163836956\n", - "Surface training t=9881, loss=0.04116855934262276\n", - "Surface training t=9882, loss=0.03661922086030245\n", - "Surface training t=9883, loss=0.033974237740039825\n", - "Surface training t=9884, loss=0.029743600636720657\n", - "Surface training t=9885, loss=0.024250414222478867\n", - "Surface training t=9886, loss=0.028775472193956375\n", - "Surface training t=9887, loss=0.021571089513599873\n", - "Surface training t=9888, loss=0.022054464556276798\n", - "Surface training t=9889, loss=0.0450966227799654\n", - "Surface training t=9890, loss=0.0736551284790039\n", - "Surface training t=9891, loss=0.04962374269962311\n", - "Surface training t=9892, loss=0.054248249158263206\n", - "Surface training t=9893, loss=0.07569807209074497\n", - "Surface training t=9894, loss=0.08034750819206238\n", - "Surface training t=9895, loss=0.053210845217108727\n", - "Surface training t=9896, loss=0.050148120149970055\n", - "Surface training t=9897, loss=0.05084865167737007\n", - "Surface training t=9898, loss=0.055280519649386406\n", - "Surface training t=9899, loss=0.04295804165303707\n", - "Surface training t=9900, loss=0.039004847407341\n", - "Surface training t=9901, loss=0.038425687700510025\n", - "Surface training t=9902, loss=0.027777206152677536\n", - "Surface training t=9903, loss=0.03342422563582659\n", - "Surface training t=9904, loss=0.03936477843672037\n", - "Surface training t=9905, loss=0.03225699905306101\n", - "Surface training t=9906, loss=0.028677593916654587\n", - "Surface training t=9907, loss=0.029800566844642162\n", - "Surface training t=9908, loss=0.03560225013643503\n", - "Surface training t=9909, loss=0.029753895476460457\n", - "Surface training t=9910, loss=0.03147604875266552\n", - "Surface training t=9911, loss=0.043466974049806595\n", - "Surface training t=9912, loss=0.02908132318407297\n", - "Surface training t=9913, loss=0.029013480991125107\n", - "Surface training t=9914, loss=0.036918558180332184\n", - "Surface training t=9915, loss=0.041765378788113594\n", - "Surface training t=9916, loss=0.046316832304000854\n", - "Surface training t=9917, loss=0.04024007171392441\n", - "Surface training t=9918, loss=0.02525930292904377\n", - "Surface training t=9919, loss=0.02582209836691618\n", - "Surface training t=9920, loss=0.030994855798780918\n", - "Surface training t=9921, loss=0.04750153794884682\n", - "Surface training t=9922, loss=0.036150360479950905\n", - "Surface training t=9923, loss=0.042304592207074165\n", - "Surface training t=9924, loss=0.03407760430127382\n", - "Surface training t=9925, loss=0.04683597944676876\n", - "Surface training t=9926, loss=0.07207256183028221\n", - "Surface training t=9927, loss=0.046765596605837345\n", - "Surface training t=9928, loss=0.05672438256442547\n", - "Surface training t=9929, loss=0.05093419551849365\n", - "Surface training t=9930, loss=0.1120775155723095\n", - "Surface training t=9931, loss=0.0649009495973587\n", - "Surface training t=9932, loss=0.10376643016934395\n", - "Surface training t=9933, loss=0.07983535900712013\n", - "Surface training t=9934, loss=0.0602797232568264\n", - "Surface training t=9935, loss=0.06487612798810005\n", - "Surface training t=9936, loss=0.05333993211388588\n", - "Surface training t=9937, loss=0.04502210021018982\n", - "Surface training t=9938, loss=0.04467012546956539\n", - "Surface training t=9939, loss=0.04566812328994274\n", - "Surface training t=9940, loss=0.04065253213047981\n", - "Surface training t=9941, loss=0.043726781383156776\n", - "Surface training t=9942, loss=0.03772701323032379\n", - "Surface training t=9943, loss=0.053089918568730354\n", - "Surface training t=9944, loss=0.03968043904751539\n", - "Surface training t=9945, loss=0.044239116832613945\n", - "Surface training t=9946, loss=0.03249960392713547\n", - "Surface training t=9947, loss=0.03550666756927967\n", - "Surface training t=9948, loss=0.03768976218998432\n", - "Surface training t=9949, loss=0.03947068564593792\n", - "Surface training t=9950, loss=0.05047670565545559\n", - "Surface training t=9951, loss=0.048138244077563286\n", - "Surface training t=9952, loss=0.03888723719865084\n", - "Surface training t=9953, loss=0.03439189866185188\n", - "Surface training t=9954, loss=0.06670074164867401\n", - "Surface training t=9955, loss=0.0484831091016531\n", - "Surface training t=9956, loss=0.044133247807621956\n", - "Surface training t=9957, loss=0.04253001883625984\n", - "Surface training t=9958, loss=0.04961005039513111\n", - "Surface training t=9959, loss=0.0832429900765419\n", - "Surface training t=9960, loss=0.06547260843217373\n", - "Surface training t=9961, loss=0.07965943403542042\n", - "Surface training t=9962, loss=0.1098501868546009\n", - "Surface training t=9963, loss=0.058495599776506424\n", - "Surface training t=9964, loss=0.0457482673227787\n", - "Surface training t=9965, loss=0.038222795352339745\n", - "Surface training t=9966, loss=0.03202606085687876\n", - "Surface training t=9967, loss=0.03426981158554554\n", - "Surface training t=9968, loss=0.038480695337057114\n", - "Surface training t=9969, loss=0.03482761047780514\n", - "Surface training t=9970, loss=0.03078118059784174\n", - "Surface training t=9971, loss=0.02579465415328741\n", - "Surface training t=9972, loss=0.03555011749267578\n", - "Surface training t=9973, loss=0.03186333738267422\n", - "Surface training t=9974, loss=0.04782022349536419\n", - "Surface training t=9975, loss=0.03161400742828846\n", - "Surface training t=9976, loss=0.03836209233850241\n", - "Surface training t=9977, loss=0.04053671099245548\n", - "Surface training t=9978, loss=0.03392592817544937\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=9979, loss=0.03513842076063156\n", - "Surface training t=9980, loss=0.02911230083554983\n", - "Surface training t=9981, loss=0.03213627636432648\n", - "Surface training t=9982, loss=0.035955335944890976\n", - "Surface training t=9983, loss=0.026778013445436954\n", - "Surface training t=9984, loss=0.028628237545490265\n", - "Surface training t=9985, loss=0.019306113477796316\n", - "Surface training t=9986, loss=0.021754919551312923\n", - "Surface training t=9987, loss=0.021722011268138885\n", - "Surface training t=9988, loss=0.02328298892825842\n", - "Surface training t=9989, loss=0.03128358907997608\n", - "Surface training t=9990, loss=0.031944140791893005\n", - "Surface training t=9991, loss=0.03851511888206005\n", - "Surface training t=9992, loss=0.045129429548978806\n", - "Surface training t=9993, loss=0.04487387277185917\n", - "Surface training t=9994, loss=0.04326272942125797\n", - "Surface training t=9995, loss=0.0630657747387886\n", - "Surface training t=9996, loss=0.061392663046717644\n", - "Surface training t=9997, loss=0.05690637230873108\n", - "Surface training t=9998, loss=0.06861584447324276\n", - "Surface training t=9999, loss=0.05559576116502285\n", - "Surface training t=10000, loss=0.0552041232585907\n", - "Surface training t=10001, loss=0.053176818415522575\n", - "Surface training t=10002, loss=0.0668993890285492\n", - "Surface training t=10003, loss=0.053711721673607826\n", - "Surface training t=10004, loss=0.05412929505109787\n", - "Surface training t=10005, loss=0.041481535881757736\n", - "Surface training t=10006, loss=0.05261689983308315\n", - "Surface training t=10007, loss=0.05061380937695503\n", - "Surface training t=10008, loss=0.03995688445866108\n", - "Surface training t=10009, loss=0.05743413418531418\n", - "Surface training t=10010, loss=0.06143329478800297\n", - "Surface training t=10011, loss=0.05623563565313816\n", - "Surface training t=10012, loss=0.06789541989564896\n", - "Surface training t=10013, loss=0.11653601378202438\n", - "Surface training t=10014, loss=0.0687463004142046\n", - "Surface training t=10015, loss=0.12056971341371536\n", - "Surface training t=10016, loss=0.06651921849697828\n", - "Surface training t=10017, loss=0.07269874215126038\n", - "Surface training t=10018, loss=0.12217768281698227\n", - "Surface training t=10019, loss=0.08255169168114662\n", - "Surface training t=10020, loss=0.11834921315312386\n", - "Surface training t=10021, loss=0.06449250690639019\n", - "Surface training t=10022, loss=0.07664654217660427\n", - "Surface training t=10023, loss=0.07400002330541611\n", - "Surface training t=10024, loss=0.055634185671806335\n", - "Surface training t=10025, loss=0.049993470311164856\n", - "Surface training t=10026, loss=0.04627246782183647\n", - "Surface training t=10027, loss=0.03961518406867981\n", - "Surface training t=10028, loss=0.05354587361216545\n", - "Surface training t=10029, loss=0.05192580632865429\n", - "Surface training t=10030, loss=0.040375834330916405\n", - "Surface training t=10031, loss=0.05572260916233063\n", - "Surface training t=10032, loss=0.055518703535199165\n", - "Surface training t=10033, loss=0.053782498463988304\n", - "Surface training t=10034, loss=0.05879073403775692\n", - "Surface training t=10035, loss=0.05371899902820587\n", - "Surface training t=10036, loss=0.05233234167098999\n", - "Surface training t=10037, loss=0.04716517589986324\n", - "Surface training t=10038, loss=0.07891199365258217\n", - "Surface training t=10039, loss=0.0534086087718606\n", - "Surface training t=10040, loss=0.053868163377046585\n", - "Surface training t=10041, loss=0.08119812607765198\n", - "Surface training t=10042, loss=0.044754695147275925\n", - "Surface training t=10043, loss=0.04634913243353367\n", - "Surface training t=10044, loss=0.04866505041718483\n", - "Surface training t=10045, loss=0.06509912386536598\n", - "Surface training t=10046, loss=0.049819594249129295\n", - "Surface training t=10047, loss=0.05708593130111694\n", - "Surface training t=10048, loss=0.05396900698542595\n", - "Surface training t=10049, loss=0.061180684715509415\n", - "Surface training t=10050, loss=0.07677885517477989\n", - "Surface training t=10051, loss=0.07904712110757828\n", - "Surface training t=10052, loss=0.054377997294068336\n", - "Surface training t=10053, loss=0.06409035995602608\n", - "Surface training t=10054, loss=0.03973081894218922\n", - "Surface training t=10055, loss=0.046222032979130745\n", - "Surface training t=10056, loss=0.04702114313840866\n", - "Surface training t=10057, loss=0.038458364084362984\n", - "Surface training t=10058, loss=0.0435914508998394\n", - "Surface training t=10059, loss=0.04407105967402458\n", - "Surface training t=10060, loss=0.0434817997738719\n", - "Surface training t=10061, loss=0.06018920801579952\n", - "Surface training t=10062, loss=0.05618672072887421\n", - "Surface training t=10063, loss=0.046082017943263054\n", - "Surface training t=10064, loss=0.042998867109417915\n", - "Surface training t=10065, loss=0.04944811202585697\n", - "Surface training t=10066, loss=0.03588935825973749\n", - "Surface training t=10067, loss=0.03463468700647354\n", - "Surface training t=10068, loss=0.037223391234874725\n", - "Surface training t=10069, loss=0.04326380416750908\n", - "Surface training t=10070, loss=0.030343296006321907\n", - "Surface training t=10071, loss=0.03187241964042187\n", - "Surface training t=10072, loss=0.026870728470385075\n", - "Surface training t=10073, loss=0.028946534730494022\n", - "Surface training t=10074, loss=0.02675891574472189\n", - "Surface training t=10075, loss=0.04072811733931303\n", - "Surface training t=10076, loss=0.045844689942896366\n", - "Surface training t=10077, loss=0.057266825810074806\n", - "Surface training t=10078, loss=0.04769870266318321\n", - "Surface training t=10079, loss=0.07368579134345055\n", - "Surface training t=10080, loss=0.057704322040081024\n", - "Surface training t=10081, loss=0.0429528784006834\n", - "Surface training t=10082, loss=0.04970583878457546\n", - "Surface training t=10083, loss=0.052121374756097794\n", - "Surface training t=10084, loss=0.058404453098773956\n", - "Surface training t=10085, loss=0.04809127189218998\n", - "Surface training t=10086, loss=0.03895685262978077\n", - "Surface training t=10087, loss=0.03892005793750286\n", - "Surface training t=10088, loss=0.05157887563109398\n", - "Surface training t=10089, loss=0.0396233182400465\n", - "Surface training t=10090, loss=0.03532646410167217\n", - "Surface training t=10091, loss=0.038371266797184944\n", - "Surface training t=10092, loss=0.04098943620920181\n", - "Surface training t=10093, loss=0.05357351526618004\n", - "Surface training t=10094, loss=0.03897012397646904\n", - "Surface training t=10095, loss=0.05055328458547592\n", - "Surface training t=10096, loss=0.044959332793951035\n", - "Surface training t=10097, loss=0.06199840269982815\n", - "Surface training t=10098, loss=0.056280072778463364\n", - "Surface training t=10099, loss=0.05898223631083965\n", - "Surface training t=10100, loss=0.03866659291088581\n", - "Surface training t=10101, loss=0.03914599772542715\n", - "Surface training t=10102, loss=0.036914730444550514\n", - "Surface training t=10103, loss=0.03428987227380276\n", - "Surface training t=10104, loss=0.03709630109369755\n", - "Surface training t=10105, loss=0.04364946484565735\n", - "Surface training t=10106, loss=0.03159980196505785\n", - "Surface training t=10107, loss=0.040154374204576015\n", - "Surface training t=10108, loss=0.041275457479059696\n", - "Surface training t=10109, loss=0.05291826277971268\n", - "Surface training t=10110, loss=0.06508992984890938\n", - "Surface training t=10111, loss=0.03842878434807062\n", - "Surface training t=10112, loss=0.044659778475761414\n", - "Surface training t=10113, loss=0.039658937603235245\n", - "Surface training t=10114, loss=0.037680793553590775\n", - "Surface training t=10115, loss=0.03968821745365858\n", - "Surface training t=10116, loss=0.053207285702228546\n", - "Surface training t=10117, loss=0.05052236560732126\n", - "Surface training t=10118, loss=0.040446002036333084\n", - "Surface training t=10119, loss=0.05578835494816303\n", - "Surface training t=10120, loss=0.05274380370974541\n", - "Surface training t=10121, loss=0.05330612789839506\n", - "Surface training t=10122, loss=0.06245820224285126\n", - "Surface training t=10123, loss=0.09076216071844101\n", - "Surface training t=10124, loss=0.06009232625365257\n", - "Surface training t=10125, loss=0.051950063556432724\n", - "Surface training t=10126, loss=0.04080612771213055\n", - "Surface training t=10127, loss=0.061554279178380966\n", - "Surface training t=10128, loss=0.046287041157484055\n", - "Surface training t=10129, loss=0.04589936509728432\n", - "Surface training t=10130, loss=0.0449000708758831\n", - "Surface training t=10131, loss=0.039347318932414055\n", - "Surface training t=10132, loss=0.03852219507098198\n", - "Surface training t=10133, loss=0.048353444784879684\n", - "Surface training t=10134, loss=0.052006373181939125\n", - "Surface training t=10135, loss=0.06853253021836281\n", - "Surface training t=10136, loss=0.055752710439264774\n", - "Surface training t=10137, loss=0.0884203352034092\n", - "Surface training t=10138, loss=0.06818754598498344\n", - "Surface training t=10139, loss=0.048604393377900124\n", - "Surface training t=10140, loss=0.05408797040581703\n", - "Surface training t=10141, loss=0.07509317994117737\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=10142, loss=0.05164557881653309\n", - "Surface training t=10143, loss=0.050653401762247086\n", - "Surface training t=10144, loss=0.05137175880372524\n", - "Surface training t=10145, loss=0.08287498354911804\n", - "Surface training t=10146, loss=0.047613443806767464\n", - "Surface training t=10147, loss=0.04886421002447605\n", - "Surface training t=10148, loss=0.03512655571103096\n", - "Surface training t=10149, loss=0.030551722273230553\n", - "Surface training t=10150, loss=0.03185121342539787\n", - "Surface training t=10151, loss=0.03168782405555248\n", - "Surface training t=10152, loss=0.026615488342940807\n", - "Surface training t=10153, loss=0.024617253802716732\n", - "Surface training t=10154, loss=0.02414338756352663\n", - "Surface training t=10155, loss=0.02661123964935541\n", - "Surface training t=10156, loss=0.03253814112395048\n", - "Surface training t=10157, loss=0.0468501690775156\n", - "Surface training t=10158, loss=0.04174484871327877\n", - "Surface training t=10159, loss=0.039210086688399315\n", - "Surface training t=10160, loss=0.0551311019808054\n", - "Surface training t=10161, loss=0.043258825317025185\n", - "Surface training t=10162, loss=0.040702554397284985\n", - "Surface training t=10163, loss=0.04365590214729309\n", - "Surface training t=10164, loss=0.047865571454167366\n", - "Surface training t=10165, loss=0.04236300848424435\n", - "Surface training t=10166, loss=0.04419406317174435\n", - "Surface training t=10167, loss=0.07637339271605015\n", - "Surface training t=10168, loss=0.04829160496592522\n", - "Surface training t=10169, loss=0.04910353757441044\n", - "Surface training t=10170, loss=0.0375076113268733\n", - "Surface training t=10171, loss=0.03193048760294914\n", - "Surface training t=10172, loss=0.02487209253013134\n", - "Surface training t=10173, loss=0.030872889794409275\n", - "Surface training t=10174, loss=0.027221872471272945\n", - "Surface training t=10175, loss=0.03397348336875439\n", - "Surface training t=10176, loss=0.027830010280013084\n", - "Surface training t=10177, loss=0.028233695775270462\n", - "Surface training t=10178, loss=0.02258757594972849\n", - "Surface training t=10179, loss=0.03291437216103077\n", - "Surface training t=10180, loss=0.026247069239616394\n", - "Surface training t=10181, loss=0.041936418041586876\n", - "Surface training t=10182, loss=0.03820056654512882\n", - "Surface training t=10183, loss=0.0371690820902586\n", - "Surface training t=10184, loss=0.03853004239499569\n", - "Surface training t=10185, loss=0.031143338419497013\n", - "Surface training t=10186, loss=0.03369366005063057\n", - "Surface training t=10187, loss=0.04160961788147688\n", - "Surface training t=10188, loss=0.0413852259516716\n", - "Surface training t=10189, loss=0.04577409662306309\n", - "Surface training t=10190, loss=0.02826558891683817\n", - "Surface training t=10191, loss=0.03688550088554621\n", - "Surface training t=10192, loss=0.03389245644211769\n", - "Surface training t=10193, loss=0.029560739174485207\n", - "Surface training t=10194, loss=0.02915916219353676\n", - "Surface training t=10195, loss=0.03117198497056961\n", - "Surface training t=10196, loss=0.02744162641465664\n", - "Surface training t=10197, loss=0.0316119771450758\n", - "Surface training t=10198, loss=0.02641559299081564\n", - "Surface training t=10199, loss=0.02468715701252222\n", - "Surface training t=10200, loss=0.023612082935869694\n", - "Surface training t=10201, loss=0.030111941508948803\n", - "Surface training t=10202, loss=0.026073862798511982\n", - "Surface training t=10203, loss=0.029508890584111214\n", - "Surface training t=10204, loss=0.030209733173251152\n", - "Surface training t=10205, loss=0.03423548210412264\n", - "Surface training t=10206, loss=0.03365482576191425\n", - "Surface training t=10207, loss=0.030245058238506317\n", - "Surface training t=10208, loss=0.03064719494432211\n", - "Surface training t=10209, loss=0.02879456616938114\n", - "Surface training t=10210, loss=0.033775823190808296\n", - "Surface training t=10211, loss=0.03809588402509689\n", - "Surface training t=10212, loss=0.0331625621765852\n", - "Surface training t=10213, loss=0.03654100559651852\n", - "Surface training t=10214, loss=0.04219038411974907\n", - "Surface training t=10215, loss=0.037985507398843765\n", - "Surface training t=10216, loss=0.038014279678463936\n", - "Surface training t=10217, loss=0.040300989523530006\n", - "Surface training t=10218, loss=0.03987761773169041\n", - "Surface training t=10219, loss=0.03905288502573967\n", - "Surface training t=10220, loss=0.030323071405291557\n", - "Surface training t=10221, loss=0.03988369181752205\n", - "Surface training t=10222, loss=0.04164737742394209\n", - "Surface training t=10223, loss=0.03798751346766949\n", - "Surface training t=10224, loss=0.041907090693712234\n", - "Surface training t=10225, loss=0.04195941239595413\n", - "Surface training t=10226, loss=0.03529318142682314\n", - "Surface training t=10227, loss=0.04130350612103939\n", - "Surface training t=10228, loss=0.034645332023501396\n", - "Surface training t=10229, loss=0.040404289960861206\n", - "Surface training t=10230, loss=0.03256712481379509\n", - "Surface training t=10231, loss=0.03470898699015379\n", - "Surface training t=10232, loss=0.034676726907491684\n", - "Surface training t=10233, loss=0.028044359758496284\n", - "Surface training t=10234, loss=0.03443706128746271\n", - "Surface training t=10235, loss=0.03371697012335062\n", - "Surface training t=10236, loss=0.03183699585497379\n", - "Surface training t=10237, loss=0.040588621981441975\n", - "Surface training t=10238, loss=0.06975742056965828\n", - "Surface training t=10239, loss=0.05205198656767607\n", - "Surface training t=10240, loss=0.05725729092955589\n", - "Surface training t=10241, loss=0.0764884278178215\n", - "Surface training t=10242, loss=0.05112484470009804\n", - "Surface training t=10243, loss=0.05060353875160217\n", - "Surface training t=10244, loss=0.07504701055586338\n", - "Surface training t=10245, loss=0.08416920155286789\n", - "Surface training t=10246, loss=0.06034698523581028\n", - "Surface training t=10247, loss=0.0864882804453373\n", - "Surface training t=10248, loss=0.057651400566101074\n", - "Surface training t=10249, loss=0.0523360799998045\n", - "Surface training t=10250, loss=0.059643425047397614\n", - "Surface training t=10251, loss=0.04274960421025753\n", - "Surface training t=10252, loss=0.05232745595276356\n", - "Surface training t=10253, loss=0.030609366483986378\n", - "Surface training t=10254, loss=0.027925084345042706\n", - "Surface training t=10255, loss=0.03684339299798012\n", - "Surface training t=10256, loss=0.034067437052726746\n", - "Surface training t=10257, loss=0.033058520406484604\n", - "Surface training t=10258, loss=0.030579732730984688\n", - "Surface training t=10259, loss=0.03044566512107849\n", - "Surface training t=10260, loss=0.024730922654271126\n", - "Surface training t=10261, loss=0.035100968554615974\n", - "Surface training t=10262, loss=0.02316869981586933\n", - "Surface training t=10263, loss=0.028907266445457935\n", - "Surface training t=10264, loss=0.02657398022711277\n", - "Surface training t=10265, loss=0.023579571396112442\n", - "Surface training t=10266, loss=0.02716299705207348\n", - "Surface training t=10267, loss=0.022095728665590286\n", - "Surface training t=10268, loss=0.025254477746784687\n", - "Surface training t=10269, loss=0.02550618350505829\n", - "Surface training t=10270, loss=0.035784730687737465\n", - "Surface training t=10271, loss=0.03809582442045212\n", - "Surface training t=10272, loss=0.04981626570224762\n", - "Surface training t=10273, loss=0.03942711278796196\n", - "Surface training t=10274, loss=0.03831825964152813\n", - "Surface training t=10275, loss=0.044275565072894096\n", - "Surface training t=10276, loss=0.05108219012618065\n", - "Surface training t=10277, loss=0.050042192451655865\n", - "Surface training t=10278, loss=0.09088393673300743\n", - "Surface training t=10279, loss=0.06289412453770638\n", - "Surface training t=10280, loss=0.07390052452683449\n", - "Surface training t=10281, loss=0.038265518844127655\n", - "Surface training t=10282, loss=0.06480318680405617\n", - "Surface training t=10283, loss=0.042924802750349045\n", - "Surface training t=10284, loss=0.057474978268146515\n", - "Surface training t=10285, loss=0.04242348298430443\n", - "Surface training t=10286, loss=0.07943232357501984\n", - "Surface training t=10287, loss=0.04092517774552107\n", - "Surface training t=10288, loss=0.0634765774011612\n", - "Surface training t=10289, loss=0.04998117871582508\n", - "Surface training t=10290, loss=0.061519548296928406\n", - "Surface training t=10291, loss=0.039690712466835976\n", - "Surface training t=10292, loss=0.04949751868844032\n", - "Surface training t=10293, loss=0.04341285303235054\n", - "Surface training t=10294, loss=0.04334530420601368\n", - "Surface training t=10295, loss=0.04037601873278618\n", - "Surface training t=10296, loss=0.03532003052532673\n", - "Surface training t=10297, loss=0.041481902822852135\n", - "Surface training t=10298, loss=0.040257083252072334\n", - "Surface training t=10299, loss=0.03954309970140457\n", - "Surface training t=10300, loss=0.03658858500421047\n", - "Surface training t=10301, loss=0.031401876360177994\n", - "Surface training t=10302, loss=0.030122479423880577\n", - "Surface training t=10303, loss=0.03546759206801653\n", - "Surface training t=10304, loss=0.03861340694129467\n", - "Surface training t=10305, loss=0.029197081923484802\n", - "Surface training t=10306, loss=0.023041173815727234\n", - "Surface training t=10307, loss=0.022154697217047215\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=10308, loss=0.022264442406594753\n", - "Surface training t=10309, loss=0.034806784242391586\n", - "Surface training t=10310, loss=0.029838894493877888\n", - "Surface training t=10311, loss=0.01955081894993782\n", - "Surface training t=10312, loss=0.025854877196252346\n", - "Surface training t=10313, loss=0.021305694244801998\n", - "Surface training t=10314, loss=0.019803152419626713\n", - "Surface training t=10315, loss=0.02099719177931547\n", - "Surface training t=10316, loss=0.021786095574498177\n", - "Surface training t=10317, loss=0.031986369751393795\n", - "Surface training t=10318, loss=0.02350609190762043\n", - "Surface training t=10319, loss=0.025771548971533775\n", - "Surface training t=10320, loss=0.022226085886359215\n", - "Surface training t=10321, loss=0.025358865037560463\n", - "Surface training t=10322, loss=0.03148622810840607\n", - "Surface training t=10323, loss=0.042726971209049225\n", - "Surface training t=10324, loss=0.0317647960036993\n", - "Surface training t=10325, loss=0.04019859805703163\n", - "Surface training t=10326, loss=0.041154056787490845\n", - "Surface training t=10327, loss=0.04610336758196354\n", - "Surface training t=10328, loss=0.043056756258010864\n", - "Surface training t=10329, loss=0.0365313496440649\n", - "Surface training t=10330, loss=0.04904133453965187\n", - "Surface training t=10331, loss=0.04008457902818918\n", - "Surface training t=10332, loss=0.039284732192754745\n", - "Surface training t=10333, loss=0.03795858100056648\n", - "Surface training t=10334, loss=0.03766319900751114\n", - "Surface training t=10335, loss=0.04027015343308449\n", - "Surface training t=10336, loss=0.03712175600230694\n", - "Surface training t=10337, loss=0.04258379898965359\n", - "Surface training t=10338, loss=0.038580965250730515\n", - "Surface training t=10339, loss=0.03690105676651001\n", - "Surface training t=10340, loss=0.042586660012602806\n", - "Surface training t=10341, loss=0.040245224721729755\n", - "Surface training t=10342, loss=0.03662216104567051\n", - "Surface training t=10343, loss=0.024936582893133163\n", - "Surface training t=10344, loss=0.02520741242915392\n", - "Surface training t=10345, loss=0.02643571887165308\n", - "Surface training t=10346, loss=0.02363696414977312\n", - "Surface training t=10347, loss=0.02485257387161255\n", - "Surface training t=10348, loss=0.02928951196372509\n", - "Surface training t=10349, loss=0.020431715063750744\n", - "Surface training t=10350, loss=0.03575311228632927\n", - "Surface training t=10351, loss=0.032945615239441395\n", - "Surface training t=10352, loss=0.04724055342376232\n", - "Surface training t=10353, loss=0.06159903481602669\n", - "Surface training t=10354, loss=0.042982058599591255\n", - "Surface training t=10355, loss=0.039262499660253525\n", - "Surface training t=10356, loss=0.0423861239105463\n", - "Surface training t=10357, loss=0.04813875071704388\n", - "Surface training t=10358, loss=0.032667020335793495\n", - "Surface training t=10359, loss=0.0668123159557581\n", - "Surface training t=10360, loss=0.06442930921912193\n", - "Surface training t=10361, loss=0.04254647810012102\n", - "Surface training t=10362, loss=0.048341650515794754\n", - "Surface training t=10363, loss=0.04355290066450834\n", - "Surface training t=10364, loss=0.06134597957134247\n", - "Surface training t=10365, loss=0.04776672273874283\n", - "Surface training t=10366, loss=0.04063998442143202\n", - "Surface training t=10367, loss=0.03762879967689514\n", - "Surface training t=10368, loss=0.036902520805597305\n", - "Surface training t=10369, loss=0.0381644032895565\n", - "Surface training t=10370, loss=0.0316401869058609\n", - "Surface training t=10371, loss=0.025287476368248463\n", - "Surface training t=10372, loss=0.02629378717392683\n", - "Surface training t=10373, loss=0.028134833090007305\n", - "Surface training t=10374, loss=0.01866371463984251\n", - "Surface training t=10375, loss=0.03391370829194784\n", - "Surface training t=10376, loss=0.0368014220148325\n", - "Surface training t=10377, loss=0.040422579273581505\n", - "Surface training t=10378, loss=0.03712454345077276\n", - "Surface training t=10379, loss=0.037484840489923954\n", - "Surface training t=10380, loss=0.032123331911861897\n", - "Surface training t=10381, loss=0.030061624944210052\n", - "Surface training t=10382, loss=0.031847625970840454\n", - "Surface training t=10383, loss=0.03589693270623684\n", - "Surface training t=10384, loss=0.03807973489165306\n", - "Surface training t=10385, loss=0.03332462254911661\n", - "Surface training t=10386, loss=0.03492157720029354\n", - "Surface training t=10387, loss=0.0414896160364151\n", - "Surface training t=10388, loss=0.047856248915195465\n", - "Surface training t=10389, loss=0.043843261897563934\n", - "Surface training t=10390, loss=0.03877134621143341\n", - "Surface training t=10391, loss=0.02910782303661108\n", - "Surface training t=10392, loss=0.031780194491147995\n", - "Surface training t=10393, loss=0.04119581915438175\n", - "Surface training t=10394, loss=0.038183968514204025\n", - "Surface training t=10395, loss=0.026926840655505657\n", - "Surface training t=10396, loss=0.04479452036321163\n", - "Surface training t=10397, loss=0.033798808231949806\n", - "Surface training t=10398, loss=0.05306596681475639\n", - "Surface training t=10399, loss=0.0466107502579689\n", - "Surface training t=10400, loss=0.04333961009979248\n", - "Surface training t=10401, loss=0.03755320329219103\n", - "Surface training t=10402, loss=0.04678507708013058\n", - "Surface training t=10403, loss=0.05550185963511467\n", - "Surface training t=10404, loss=0.04076077602803707\n", - "Surface training t=10405, loss=0.049694670364260674\n", - "Surface training t=10406, loss=0.044249244034290314\n", - "Surface training t=10407, loss=0.040348904207348824\n", - "Surface training t=10408, loss=0.04561097174882889\n", - "Surface training t=10409, loss=0.030331257730722427\n", - "Surface training t=10410, loss=0.032737305387854576\n", - "Surface training t=10411, loss=0.044342679902911186\n", - "Surface training t=10412, loss=0.03197923116385937\n", - "Surface training t=10413, loss=0.0323129678145051\n", - "Surface training t=10414, loss=0.037368254736065865\n", - "Surface training t=10415, loss=0.03547900728881359\n", - "Surface training t=10416, loss=0.03405768610537052\n", - "Surface training t=10417, loss=0.03793451003730297\n", - "Surface training t=10418, loss=0.02961980551481247\n", - "Surface training t=10419, loss=0.02840995229780674\n", - "Surface training t=10420, loss=0.03329671174287796\n", - "Surface training t=10421, loss=0.029960574582219124\n", - "Surface training t=10422, loss=0.02471729926764965\n", - "Surface training t=10423, loss=0.025833532214164734\n", - "Surface training t=10424, loss=0.02395201288163662\n", - "Surface training t=10425, loss=0.025886230170726776\n", - "Surface training t=10426, loss=0.033580880612134933\n", - "Surface training t=10427, loss=0.03116112295538187\n", - "Surface training t=10428, loss=0.033088937401771545\n", - "Surface training t=10429, loss=0.03170208819210529\n", - "Surface training t=10430, loss=0.02848398219794035\n", - "Surface training t=10431, loss=0.033313912339508533\n", - "Surface training t=10432, loss=0.026817772537469864\n", - "Surface training t=10433, loss=0.03232817351818085\n", - "Surface training t=10434, loss=0.024106563068926334\n", - "Surface training t=10435, loss=0.03315241541713476\n", - "Surface training t=10436, loss=0.03597105946391821\n", - "Surface training t=10437, loss=0.028611368499696255\n", - "Surface training t=10438, loss=0.02814294584095478\n", - "Surface training t=10439, loss=0.03446531295776367\n", - "Surface training t=10440, loss=0.02653154917061329\n", - "Surface training t=10441, loss=0.037221189588308334\n", - "Surface training t=10442, loss=0.035899532958865166\n", - "Surface training t=10443, loss=0.050381433218717575\n", - "Surface training t=10444, loss=0.04819227010011673\n", - "Surface training t=10445, loss=0.049000538885593414\n", - "Surface training t=10446, loss=0.04296630993485451\n", - "Surface training t=10447, loss=0.04176160879433155\n", - "Surface training t=10448, loss=0.04179989546537399\n", - "Surface training t=10449, loss=0.06548439897596836\n", - "Surface training t=10450, loss=0.05287350341677666\n", - "Surface training t=10451, loss=0.0352574922144413\n", - "Surface training t=10452, loss=0.039752643555402756\n", - "Surface training t=10453, loss=0.02926633320748806\n", - "Surface training t=10454, loss=0.02643778920173645\n", - "Surface training t=10455, loss=0.03208394628018141\n", - "Surface training t=10456, loss=0.033109499141573906\n", - "Surface training t=10457, loss=0.04289858974516392\n", - "Surface training t=10458, loss=0.0380049217492342\n", - "Surface training t=10459, loss=0.04852033779025078\n", - "Surface training t=10460, loss=0.029411365278065205\n", - "Surface training t=10461, loss=0.028883269988000393\n", - "Surface training t=10462, loss=0.023066715337336063\n", - "Surface training t=10463, loss=0.02961540035903454\n", - "Surface training t=10464, loss=0.0248001953586936\n", - "Surface training t=10465, loss=0.02685715164989233\n", - "Surface training t=10466, loss=0.03490010928362608\n", - "Surface training t=10467, loss=0.0308806411921978\n", - "Surface training t=10468, loss=0.02617691084742546\n", - "Surface training t=10469, loss=0.02760316524654627\n", - "Surface training t=10470, loss=0.028142917901277542\n", - "Surface training t=10471, loss=0.03181967046111822\n", - "Surface training t=10472, loss=0.030316168442368507\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=10473, loss=0.02667766809463501\n", - "Surface training t=10474, loss=0.02154628373682499\n", - "Surface training t=10475, loss=0.023650379851460457\n", - "Surface training t=10476, loss=0.027034244500100613\n", - "Surface training t=10477, loss=0.027596281841397285\n", - "Surface training t=10478, loss=0.03154455218464136\n", - "Surface training t=10479, loss=0.04870262183248997\n", - "Surface training t=10480, loss=0.03482109121978283\n", - "Surface training t=10481, loss=0.026511293835937977\n", - "Surface training t=10482, loss=0.048429735004901886\n", - "Surface training t=10483, loss=0.03266201540827751\n", - "Surface training t=10484, loss=0.0327626820653677\n", - "Surface training t=10485, loss=0.03288419172167778\n", - "Surface training t=10486, loss=0.02870188932865858\n", - "Surface training t=10487, loss=0.03912295773625374\n", - "Surface training t=10488, loss=0.036443707533180714\n", - "Surface training t=10489, loss=0.04158764239400625\n", - "Surface training t=10490, loss=0.04852004162967205\n", - "Surface training t=10491, loss=0.04033356253057718\n", - "Surface training t=10492, loss=0.06501326896250248\n", - "Surface training t=10493, loss=0.07285935804247856\n", - "Surface training t=10494, loss=0.06226470321416855\n", - "Surface training t=10495, loss=0.05265854485332966\n", - "Surface training t=10496, loss=0.08212151005864143\n", - "Surface training t=10497, loss=0.061180323362350464\n", - "Surface training t=10498, loss=0.0840926393866539\n", - "Surface training t=10499, loss=0.05771430395543575\n", - "Surface training t=10500, loss=0.030833685770630836\n", - "Surface training t=10501, loss=0.031037844717502594\n", - "Surface training t=10502, loss=0.028105645440518856\n", - "Surface training t=10503, loss=0.029551485553383827\n", - "Surface training t=10504, loss=0.028896297328174114\n", - "Surface training t=10505, loss=0.0222398042678833\n", - "Surface training t=10506, loss=0.028602697886526585\n", - "Surface training t=10507, loss=0.024248333647847176\n", - "Surface training t=10508, loss=0.03370511904358864\n", - "Surface training t=10509, loss=0.03950043395161629\n", - "Surface training t=10510, loss=0.036582913249731064\n", - "Surface training t=10511, loss=0.04119616933166981\n", - "Surface training t=10512, loss=0.045269036665558815\n", - "Surface training t=10513, loss=0.046139344573020935\n", - "Surface training t=10514, loss=0.03537360206246376\n", - "Surface training t=10515, loss=0.03275374881923199\n", - "Surface training t=10516, loss=0.030099579133093357\n", - "Surface training t=10517, loss=0.029888578690588474\n", - "Surface training t=10518, loss=0.02668571937829256\n", - "Surface training t=10519, loss=0.02903863787651062\n", - "Surface training t=10520, loss=0.027890988625586033\n", - "Surface training t=10521, loss=0.029017831198871136\n", - "Surface training t=10522, loss=0.039518747478723526\n", - "Surface training t=10523, loss=0.031044079922139645\n", - "Surface training t=10524, loss=0.03953508287668228\n", - "Surface training t=10525, loss=0.03783324547111988\n", - "Surface training t=10526, loss=0.03468124195933342\n", - "Surface training t=10527, loss=0.06134353205561638\n", - "Surface training t=10528, loss=0.04366491362452507\n", - "Surface training t=10529, loss=0.029430123046040535\n", - "Surface training t=10530, loss=0.04767957702279091\n", - "Surface training t=10531, loss=0.03571981191635132\n", - "Surface training t=10532, loss=0.028589338064193726\n", - "Surface training t=10533, loss=0.03470889013260603\n", - "Surface training t=10534, loss=0.03884916566312313\n", - "Surface training t=10535, loss=0.06203284673392773\n", - "Surface training t=10536, loss=0.04863422177731991\n", - "Surface training t=10537, loss=0.04309943504631519\n", - "Surface training t=10538, loss=0.04191130958497524\n", - "Surface training t=10539, loss=0.04331866465508938\n", - "Surface training t=10540, loss=0.03861136455088854\n", - "Surface training t=10541, loss=0.0422394834458828\n", - "Surface training t=10542, loss=0.05153198540210724\n", - "Surface training t=10543, loss=0.04022700898349285\n", - "Surface training t=10544, loss=0.05552507936954498\n", - "Surface training t=10545, loss=0.035093882121145725\n", - "Surface training t=10546, loss=0.04196321591734886\n", - "Surface training t=10547, loss=0.03167292661964893\n", - "Surface training t=10548, loss=0.02422194927930832\n", - "Surface training t=10549, loss=0.026987510733306408\n", - "Surface training t=10550, loss=0.034305253997445107\n", - "Surface training t=10551, loss=0.025505672208964825\n", - "Surface training t=10552, loss=0.029612621292471886\n", - "Surface training t=10553, loss=0.023912029340863228\n", - "Surface training t=10554, loss=0.026658199727535248\n", - "Surface training t=10555, loss=0.02837461233139038\n", - "Surface training t=10556, loss=0.02411506511271\n", - "Surface training t=10557, loss=0.03186775930225849\n", - "Surface training t=10558, loss=0.02976404409855604\n", - "Surface training t=10559, loss=0.032113151624798775\n", - "Surface training t=10560, loss=0.03493677731603384\n", - "Surface training t=10561, loss=0.020406474359333515\n", - "Surface training t=10562, loss=0.019114219583570957\n", - "Surface training t=10563, loss=0.027700438164174557\n", - "Surface training t=10564, loss=0.030615882948040962\n", - "Surface training t=10565, loss=0.04279706999659538\n", - "Surface training t=10566, loss=0.05011170916259289\n", - "Surface training t=10567, loss=0.04515198152512312\n", - "Surface training t=10568, loss=0.05194425769150257\n", - "Surface training t=10569, loss=0.061072420328855515\n", - "Surface training t=10570, loss=0.0530824288725853\n", - "Surface training t=10571, loss=0.05412288382649422\n", - "Surface training t=10572, loss=0.058149661868810654\n", - "Surface training t=10573, loss=0.07063490524888039\n", - "Surface training t=10574, loss=0.054543815553188324\n", - "Surface training t=10575, loss=0.054346151649951935\n", - "Surface training t=10576, loss=0.07032732293009758\n", - "Surface training t=10577, loss=0.0407723356038332\n", - "Surface training t=10578, loss=0.04887453652918339\n", - "Surface training t=10579, loss=0.03949305787682533\n", - "Surface training t=10580, loss=0.04183978773653507\n", - "Surface training t=10581, loss=0.040418267250061035\n", - "Surface training t=10582, loss=0.03210793901234865\n", - "Surface training t=10583, loss=0.024333054199814796\n", - "Surface training t=10584, loss=0.032207902520895004\n", - "Surface training t=10585, loss=0.04726953059434891\n", - "Surface training t=10586, loss=0.036697544157505035\n", - "Surface training t=10587, loss=0.06203989312052727\n", - "Surface training t=10588, loss=0.02782799955457449\n", - "Surface training t=10589, loss=0.021162117831408978\n", - "Surface training t=10590, loss=0.02982545830309391\n", - "Surface training t=10591, loss=0.02090198453515768\n", - "Surface training t=10592, loss=0.025443321093916893\n", - "Surface training t=10593, loss=0.03034942038357258\n", - "Surface training t=10594, loss=0.035134030506014824\n", - "Surface training t=10595, loss=0.029423994943499565\n", - "Surface training t=10596, loss=0.03225875925272703\n", - "Surface training t=10597, loss=0.030065398663282394\n", - "Surface training t=10598, loss=0.028548619709908962\n", - "Surface training t=10599, loss=0.0331017579883337\n", - "Surface training t=10600, loss=0.032094644382596016\n", - "Surface training t=10601, loss=0.02923988178372383\n", - "Surface training t=10602, loss=0.02697175182402134\n", - "Surface training t=10603, loss=0.02375225070863962\n", - "Surface training t=10604, loss=0.03451364766806364\n", - "Surface training t=10605, loss=0.05060939863324165\n", - "Surface training t=10606, loss=0.03196030855178833\n", - "Surface training t=10607, loss=0.04487640783190727\n", - "Surface training t=10608, loss=0.04483645595610142\n", - "Surface training t=10609, loss=0.03913031332194805\n", - "Surface training t=10610, loss=0.04011109285056591\n", - "Surface training t=10611, loss=0.02879066951572895\n", - "Surface training t=10612, loss=0.03351656720042229\n", - "Surface training t=10613, loss=0.04685969464480877\n", - "Surface training t=10614, loss=0.044928304851055145\n", - "Surface training t=10615, loss=0.055711107328534126\n", - "Surface training t=10616, loss=0.03667363151907921\n", - "Surface training t=10617, loss=0.04710390791296959\n", - "Surface training t=10618, loss=0.044050028547644615\n", - "Surface training t=10619, loss=0.043288715183734894\n", - "Surface training t=10620, loss=0.049854788929224014\n", - "Surface training t=10621, loss=0.036180827766656876\n", - "Surface training t=10622, loss=0.05145513452589512\n", - "Surface training t=10623, loss=0.03700250107795\n", - "Surface training t=10624, loss=0.02766193449497223\n", - "Surface training t=10625, loss=0.05324385315179825\n", - "Surface training t=10626, loss=0.035786401480436325\n", - "Surface training t=10627, loss=0.04900624044239521\n", - "Surface training t=10628, loss=0.047600459307432175\n", - "Surface training t=10629, loss=0.04292517155408859\n", - "Surface training t=10630, loss=0.0377275999635458\n", - "Surface training t=10631, loss=0.02541211061179638\n", - "Surface training t=10632, loss=0.027124525979161263\n", - "Surface training t=10633, loss=0.03742269426584244\n", - "Surface training t=10634, loss=0.0348898945376277\n", - "Surface training t=10635, loss=0.02923096902668476\n", - "Surface training t=10636, loss=0.02872171625494957\n", - "Surface training t=10637, loss=0.035342889837920666\n", - "Surface training t=10638, loss=0.03296511620283127\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=10639, loss=0.03135202545672655\n", - "Surface training t=10640, loss=0.0339801786467433\n", - "Surface training t=10641, loss=0.04048769734799862\n", - "Surface training t=10642, loss=0.029940308071672916\n", - "Surface training t=10643, loss=0.02939990535378456\n", - "Surface training t=10644, loss=0.04992597550153732\n", - "Surface training t=10645, loss=0.03702995181083679\n", - "Surface training t=10646, loss=0.04777006804943085\n", - "Surface training t=10647, loss=0.037252962589263916\n", - "Surface training t=10648, loss=0.03960291109979153\n", - "Surface training t=10649, loss=0.03490830771625042\n", - "Surface training t=10650, loss=0.03681675344705582\n", - "Surface training t=10651, loss=0.030155791901051998\n", - "Surface training t=10652, loss=0.04036552272737026\n", - "Surface training t=10653, loss=0.03737049363553524\n", - "Surface training t=10654, loss=0.034311854280531406\n", - "Surface training t=10655, loss=0.038804154843091965\n", - "Surface training t=10656, loss=0.046590808779001236\n", - "Surface training t=10657, loss=0.035832544788718224\n", - "Surface training t=10658, loss=0.044175734743475914\n", - "Surface training t=10659, loss=0.031154428608715534\n", - "Surface training t=10660, loss=0.04288890026509762\n", - "Surface training t=10661, loss=0.03962626866996288\n", - "Surface training t=10662, loss=0.029372980818152428\n", - "Surface training t=10663, loss=0.02968711033463478\n", - "Surface training t=10664, loss=0.024450271390378475\n", - "Surface training t=10665, loss=0.032492514699697495\n", - "Surface training t=10666, loss=0.02317952085286379\n", - "Surface training t=10667, loss=0.03469776175916195\n", - "Surface training t=10668, loss=0.03171565476804972\n", - "Surface training t=10669, loss=0.031125047244131565\n", - "Surface training t=10670, loss=0.051489636301994324\n", - "Surface training t=10671, loss=0.042039357125759125\n", - "Surface training t=10672, loss=0.04897236451506615\n", - "Surface training t=10673, loss=0.04250265099108219\n", - "Surface training t=10674, loss=0.03919205069541931\n", - "Surface training t=10675, loss=0.03527502715587616\n", - "Surface training t=10676, loss=0.03393911197781563\n", - "Surface training t=10677, loss=0.033614382147789\n", - "Surface training t=10678, loss=0.021367358043789864\n", - "Surface training t=10679, loss=0.02760409377515316\n", - "Surface training t=10680, loss=0.044204797595739365\n", - "Surface training t=10681, loss=0.03074798360466957\n", - "Surface training t=10682, loss=0.040142931044101715\n", - "Surface training t=10683, loss=0.04531336948275566\n", - "Surface training t=10684, loss=0.053206631913781166\n", - "Surface training t=10685, loss=0.03316459618508816\n", - "Surface training t=10686, loss=0.0352976992726326\n", - "Surface training t=10687, loss=0.040114281699061394\n", - "Surface training t=10688, loss=0.04951820336282253\n", - "Surface training t=10689, loss=0.02869750652462244\n", - "Surface training t=10690, loss=0.035994757898151875\n", - "Surface training t=10691, loss=0.045843906700611115\n", - "Surface training t=10692, loss=0.038587117567658424\n", - "Surface training t=10693, loss=0.04163921996951103\n", - "Surface training t=10694, loss=0.039264015853405\n", - "Surface training t=10695, loss=0.02660965546965599\n", - "Surface training t=10696, loss=0.038325498811900616\n", - "Surface training t=10697, loss=0.03612651117146015\n", - "Surface training t=10698, loss=0.023622612468898296\n", - "Surface training t=10699, loss=0.031218654476106167\n", - "Surface training t=10700, loss=0.02398365270346403\n", - "Surface training t=10701, loss=0.031536039896309376\n", - "Surface training t=10702, loss=0.025497011840343475\n", - "Surface training t=10703, loss=0.03532562591135502\n", - "Surface training t=10704, loss=0.05332927033305168\n", - "Surface training t=10705, loss=0.05114104226231575\n", - "Surface training t=10706, loss=0.042087944224476814\n", - "Surface training t=10707, loss=0.040262531489133835\n", - "Surface training t=10708, loss=0.032077585346996784\n", - "Surface training t=10709, loss=0.03784048929810524\n", - "Surface training t=10710, loss=0.04256594181060791\n", - "Surface training t=10711, loss=0.036507404409348965\n", - "Surface training t=10712, loss=0.07293251901865005\n", - "Surface training t=10713, loss=0.05113727040588856\n", - "Surface training t=10714, loss=0.053180805407464504\n", - "Surface training t=10715, loss=0.06453780643641949\n", - "Surface training t=10716, loss=0.07993806898593903\n", - "Surface training t=10717, loss=0.048759251832962036\n", - "Surface training t=10718, loss=0.06291973032057285\n", - "Surface training t=10719, loss=0.084276232868433\n", - "Surface training t=10720, loss=0.06161842495203018\n", - "Surface training t=10721, loss=0.06437389925122261\n", - "Surface training t=10722, loss=0.06360597722232342\n", - "Surface training t=10723, loss=0.057475678622722626\n", - "Surface training t=10724, loss=0.0455114021897316\n", - "Surface training t=10725, loss=0.04760575294494629\n", - "Surface training t=10726, loss=0.04278850741684437\n", - "Surface training t=10727, loss=0.05603527091443539\n", - "Surface training t=10728, loss=0.06108280085027218\n", - "Surface training t=10729, loss=0.0419567059725523\n", - "Surface training t=10730, loss=0.048026200383901596\n", - "Surface training t=10731, loss=0.042280176654458046\n", - "Surface training t=10732, loss=0.04500989802181721\n", - "Surface training t=10733, loss=0.0469709113240242\n", - "Surface training t=10734, loss=0.05965266562998295\n", - "Surface training t=10735, loss=0.047141071408987045\n", - "Surface training t=10736, loss=0.040896112099289894\n", - "Surface training t=10737, loss=0.035519497469067574\n", - "Surface training t=10738, loss=0.027173236943781376\n", - "Surface training t=10739, loss=0.04328189417719841\n", - "Surface training t=10740, loss=0.02451085578650236\n", - "Surface training t=10741, loss=0.024618791416287422\n", - "Surface training t=10742, loss=0.02720135822892189\n", - "Surface training t=10743, loss=0.03090658411383629\n", - "Surface training t=10744, loss=0.029027962125837803\n", - "Surface training t=10745, loss=0.03730078227818012\n", - "Surface training t=10746, loss=0.03638355992734432\n", - "Surface training t=10747, loss=0.046225761994719505\n", - "Surface training t=10748, loss=0.05782622471451759\n", - "Surface training t=10749, loss=0.047923644073307514\n", - "Surface training t=10750, loss=0.045833111740648746\n", - "Surface training t=10751, loss=0.04338754154741764\n", - "Surface training t=10752, loss=0.044852644205093384\n", - "Surface training t=10753, loss=0.0479472316801548\n", - "Surface training t=10754, loss=0.05065562576055527\n", - "Surface training t=10755, loss=0.05485492944717407\n", - "Surface training t=10756, loss=0.04480411112308502\n", - "Surface training t=10757, loss=0.06136839650571346\n", - "Surface training t=10758, loss=0.04196491651237011\n", - "Surface training t=10759, loss=0.05432366952300072\n", - "Surface training t=10760, loss=0.03804968576878309\n", - "Surface training t=10761, loss=0.04943566210567951\n", - "Surface training t=10762, loss=0.041760873049497604\n", - "Surface training t=10763, loss=0.03106127493083477\n", - "Surface training t=10764, loss=0.0354672372341156\n", - "Surface training t=10765, loss=0.032546939328312874\n", - "Surface training t=10766, loss=0.030781744979321957\n", - "Surface training t=10767, loss=0.025138622149825096\n", - "Surface training t=10768, loss=0.02497603092342615\n", - "Surface training t=10769, loss=0.028889302164316177\n", - "Surface training t=10770, loss=0.03411855176091194\n", - "Surface training t=10771, loss=0.03157641086727381\n", - "Surface training t=10772, loss=0.02633505128324032\n", - "Surface training t=10773, loss=0.029445679858326912\n", - "Surface training t=10774, loss=0.04394670017063618\n", - "Surface training t=10775, loss=0.056510915979743004\n", - "Surface training t=10776, loss=0.04696076922118664\n", - "Surface training t=10777, loss=0.041503679007291794\n", - "Surface training t=10778, loss=0.03528369218111038\n", - "Surface training t=10779, loss=0.03949617221951485\n", - "Surface training t=10780, loss=0.04284738749265671\n", - "Surface training t=10781, loss=0.061343805864453316\n", - "Surface training t=10782, loss=0.04981871880590916\n", - "Surface training t=10783, loss=0.037470946088433266\n", - "Surface training t=10784, loss=0.03538359887897968\n", - "Surface training t=10785, loss=0.036222491413354874\n", - "Surface training t=10786, loss=0.041755372658371925\n", - "Surface training t=10787, loss=0.04418297205120325\n", - "Surface training t=10788, loss=0.049460720270872116\n", - "Surface training t=10789, loss=0.04273960366845131\n", - "Surface training t=10790, loss=0.058095017448067665\n", - "Surface training t=10791, loss=0.052130354568362236\n", - "Surface training t=10792, loss=0.042904166504740715\n", - "Surface training t=10793, loss=0.052305297926068306\n", - "Surface training t=10794, loss=0.040412004105746746\n", - "Surface training t=10795, loss=0.03474356606602669\n", - "Surface training t=10796, loss=0.030194020830094814\n", - "Surface training t=10797, loss=0.025345097295939922\n", - "Surface training t=10798, loss=0.026510796509683132\n", - "Surface training t=10799, loss=0.031115809455513954\n", - "Surface training t=10800, loss=0.028955568559467793\n", - "Surface training t=10801, loss=0.02691280096769333\n", - "Surface training t=10802, loss=0.020632791332900524\n", - "Surface training t=10803, loss=0.02119307406246662\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=10804, loss=0.02182989288121462\n", - "Surface training t=10805, loss=0.02125230897217989\n", - "Surface training t=10806, loss=0.02671710727736354\n", - "Surface training t=10807, loss=0.026030404958873987\n", - "Surface training t=10808, loss=0.030275175347924232\n", - "Surface training t=10809, loss=0.03423696104437113\n", - "Surface training t=10810, loss=0.031982299871742725\n", - "Surface training t=10811, loss=0.027277909219264984\n", - "Surface training t=10812, loss=0.024952255189418793\n", - "Surface training t=10813, loss=0.028680535964667797\n", - "Surface training t=10814, loss=0.018084790091961622\n", - "Surface training t=10815, loss=0.020466486923396587\n", - "Surface training t=10816, loss=0.022014047019183636\n", - "Surface training t=10817, loss=0.016064967960119247\n", - "Surface training t=10818, loss=0.01880833599716425\n", - "Surface training t=10819, loss=0.027507459744811058\n", - "Surface training t=10820, loss=0.02903564739972353\n", - "Surface training t=10821, loss=0.018083248287439346\n", - "Surface training t=10822, loss=0.02270052209496498\n", - "Surface training t=10823, loss=0.026085812598466873\n", - "Surface training t=10824, loss=0.026145382784307003\n", - "Surface training t=10825, loss=0.03259713388979435\n", - "Surface training t=10826, loss=0.034948620945215225\n", - "Surface training t=10827, loss=0.0316650215536356\n", - "Surface training t=10828, loss=0.03249985817819834\n", - "Surface training t=10829, loss=0.035868313163518906\n", - "Surface training t=10830, loss=0.025112698785960674\n", - "Surface training t=10831, loss=0.03109772317111492\n", - "Surface training t=10832, loss=0.02637647185474634\n", - "Surface training t=10833, loss=0.03385895676910877\n", - "Surface training t=10834, loss=0.028794080018997192\n", - "Surface training t=10835, loss=0.028488454408943653\n", - "Surface training t=10836, loss=0.02589571475982666\n", - "Surface training t=10837, loss=0.02468270342797041\n", - "Surface training t=10838, loss=0.02362192887812853\n", - "Surface training t=10839, loss=0.024331655353307724\n", - "Surface training t=10840, loss=0.02050400897860527\n", - "Surface training t=10841, loss=0.02247653156518936\n", - "Surface training t=10842, loss=0.024143471382558346\n", - "Surface training t=10843, loss=0.019671843387186527\n", - "Surface training t=10844, loss=0.026409746147692204\n", - "Surface training t=10845, loss=0.018729599192738533\n", - "Surface training t=10846, loss=0.02793724089860916\n", - "Surface training t=10847, loss=0.015902189537882805\n", - "Surface training t=10848, loss=0.03149175550788641\n", - "Surface training t=10849, loss=0.03419112414121628\n", - "Surface training t=10850, loss=0.07493579015135765\n", - "Surface training t=10851, loss=0.03778894431889057\n", - "Surface training t=10852, loss=0.04792250506579876\n", - "Surface training t=10853, loss=0.040699233300983906\n", - "Surface training t=10854, loss=0.034523578360676765\n", - "Surface training t=10855, loss=0.03580716345459223\n", - "Surface training t=10856, loss=0.031964211724698544\n", - "Surface training t=10857, loss=0.03588065318763256\n", - "Surface training t=10858, loss=0.04688720218837261\n", - "Surface training t=10859, loss=0.04282287694513798\n", - "Surface training t=10860, loss=0.034446234814822674\n", - "Surface training t=10861, loss=0.03644188493490219\n", - "Surface training t=10862, loss=0.04461202211678028\n", - "Surface training t=10863, loss=0.049566082656383514\n", - "Surface training t=10864, loss=0.06684493832290173\n", - "Surface training t=10865, loss=0.053939394652843475\n", - "Surface training t=10866, loss=0.03508090414106846\n", - "Surface training t=10867, loss=0.03748917952179909\n", - "Surface training t=10868, loss=0.03492738865315914\n", - "Surface training t=10869, loss=0.030033606104552746\n", - "Surface training t=10870, loss=0.023228895850479603\n", - "Surface training t=10871, loss=0.03226271644234657\n", - "Surface training t=10872, loss=0.02377146575599909\n", - "Surface training t=10873, loss=0.022556801326572895\n", - "Surface training t=10874, loss=0.019782712683081627\n", - "Surface training t=10875, loss=0.030087538994848728\n", - "Surface training t=10876, loss=0.03469127602875233\n", - "Surface training t=10877, loss=0.027045883238315582\n", - "Surface training t=10878, loss=0.022555246949195862\n", - "Surface training t=10879, loss=0.02666017133742571\n", - "Surface training t=10880, loss=0.02448971103876829\n", - "Surface training t=10881, loss=0.02864906471222639\n", - "Surface training t=10882, loss=0.03340204246342182\n", - "Surface training t=10883, loss=0.032036153599619865\n", - "Surface training t=10884, loss=0.04099954478442669\n", - "Surface training t=10885, loss=0.038793450221419334\n", - "Surface training t=10886, loss=0.03989509120583534\n", - "Surface training t=10887, loss=0.03232881147414446\n", - "Surface training t=10888, loss=0.04101690463721752\n", - "Surface training t=10889, loss=0.04260420799255371\n", - "Surface training t=10890, loss=0.04092409461736679\n", - "Surface training t=10891, loss=0.04725709743797779\n", - "Surface training t=10892, loss=0.05788172595202923\n", - "Surface training t=10893, loss=0.06484103202819824\n", - "Surface training t=10894, loss=0.05355794541537762\n", - "Surface training t=10895, loss=0.06289570964872837\n", - "Surface training t=10896, loss=0.04495074599981308\n", - "Surface training t=10897, loss=0.05400536023080349\n", - "Surface training t=10898, loss=0.05126476287841797\n", - "Surface training t=10899, loss=0.03441243339329958\n", - "Surface training t=10900, loss=0.0386420302093029\n", - "Surface training t=10901, loss=0.047665221616625786\n", - "Surface training t=10902, loss=0.04567788075655699\n", - "Surface training t=10903, loss=0.051114872097969055\n", - "Surface training t=10904, loss=0.03995370864868164\n", - "Surface training t=10905, loss=0.04316811449825764\n", - "Surface training t=10906, loss=0.03627379983663559\n", - "Surface training t=10907, loss=0.046600041911005974\n", - "Surface training t=10908, loss=0.029196422547101974\n", - "Surface training t=10909, loss=0.036748625338077545\n", - "Surface training t=10910, loss=0.030622358433902264\n", - "Surface training t=10911, loss=0.040905365720391273\n", - "Surface training t=10912, loss=0.04913830757141113\n", - "Surface training t=10913, loss=0.03931283298879862\n", - "Surface training t=10914, loss=0.05385892279446125\n", - "Surface training t=10915, loss=0.04383918642997742\n", - "Surface training t=10916, loss=0.03481395076960325\n", - "Surface training t=10917, loss=0.035897212103009224\n", - "Surface training t=10918, loss=0.03720603883266449\n", - "Surface training t=10919, loss=0.03117867186665535\n", - "Surface training t=10920, loss=0.02126583270728588\n", - "Surface training t=10921, loss=0.02060992456972599\n", - "Surface training t=10922, loss=0.026676014065742493\n", - "Surface training t=10923, loss=0.03201455157250166\n", - "Surface training t=10924, loss=0.03592053893953562\n", - "Surface training t=10925, loss=0.03377673402428627\n", - "Surface training t=10926, loss=0.041387793608009815\n", - "Surface training t=10927, loss=0.045444389805197716\n", - "Surface training t=10928, loss=0.048120640218257904\n", - "Surface training t=10929, loss=0.0388901149854064\n", - "Surface training t=10930, loss=0.03290302585810423\n", - "Surface training t=10931, loss=0.06329094246029854\n", - "Surface training t=10932, loss=0.05164899490773678\n", - "Surface training t=10933, loss=0.0524128582328558\n", - "Surface training t=10934, loss=0.05253366008400917\n", - "Surface training t=10935, loss=0.11407863348722458\n", - "Surface training t=10936, loss=0.059735147282481194\n", - "Surface training t=10937, loss=0.06119479238986969\n", - "Surface training t=10938, loss=0.08852678537368774\n", - "Surface training t=10939, loss=0.056577712297439575\n", - "Surface training t=10940, loss=0.054726630449295044\n", - "Surface training t=10941, loss=0.05338568612933159\n", - "Surface training t=10942, loss=0.05537521652877331\n", - "Surface training t=10943, loss=0.04852870851755142\n", - "Surface training t=10944, loss=0.05174817517399788\n", - "Surface training t=10945, loss=0.05395638011395931\n", - "Surface training t=10946, loss=0.0720602534711361\n", - "Surface training t=10947, loss=0.05013006366789341\n", - "Surface training t=10948, loss=0.06184978596866131\n", - "Surface training t=10949, loss=0.056979063898324966\n", - "Surface training t=10950, loss=0.05229860544204712\n", - "Surface training t=10951, loss=0.044095929712057114\n", - "Surface training t=10952, loss=0.04782000742852688\n", - "Surface training t=10953, loss=0.03354764170944691\n", - "Surface training t=10954, loss=0.06923217698931694\n", - "Surface training t=10955, loss=0.04503660276532173\n", - "Surface training t=10956, loss=0.03664322383701801\n", - "Surface training t=10957, loss=0.0437039639800787\n", - "Surface training t=10958, loss=0.04055158607661724\n", - "Surface training t=10959, loss=0.03954434208571911\n", - "Surface training t=10960, loss=0.03668890707194805\n", - "Surface training t=10961, loss=0.035437594167888165\n", - "Surface training t=10962, loss=0.035550051368772984\n", - "Surface training t=10963, loss=0.03373430483043194\n", - "Surface training t=10964, loss=0.03351916652172804\n", - "Surface training t=10965, loss=0.029954131692647934\n", - "Surface training t=10966, loss=0.038140833377838135\n", - "Surface training t=10967, loss=0.04954799264669418\n", - "Surface training t=10968, loss=0.04772351123392582\n", - "Surface training t=10969, loss=0.04477420449256897\n", - "Surface training t=10970, loss=0.059024104848504066\n", - "Surface training t=10971, loss=0.043410396203398705\n", - "Surface training t=10972, loss=0.04652927629649639\n", - "Surface training t=10973, loss=0.04433487728238106\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=10974, loss=0.05945497378706932\n", - "Surface training t=10975, loss=0.05200235918164253\n", - "Surface training t=10976, loss=0.05681310594081879\n", - "Surface training t=10977, loss=0.052025919780135155\n", - "Surface training t=10978, loss=0.04708333872258663\n", - "Surface training t=10979, loss=0.04061117768287659\n", - "Surface training t=10980, loss=0.03346690535545349\n", - "Surface training t=10981, loss=0.04556695744395256\n", - "Surface training t=10982, loss=0.035431066527962685\n", - "Surface training t=10983, loss=0.033076317980885506\n", - "Surface training t=10984, loss=0.04061114601790905\n", - "Surface training t=10985, loss=0.037828246131539345\n", - "Surface training t=10986, loss=0.027516289614140987\n", - "Surface training t=10987, loss=0.03378842584788799\n", - "Surface training t=10988, loss=0.0381292849779129\n", - "Surface training t=10989, loss=0.03625165857374668\n", - "Surface training t=10990, loss=0.0380301745608449\n", - "Surface training t=10991, loss=0.030732966028153896\n", - "Surface training t=10992, loss=0.02800859697163105\n", - "Surface training t=10993, loss=0.020035672932863235\n", - "Surface training t=10994, loss=0.02927867043763399\n", - "Surface training t=10995, loss=0.030204166658222675\n", - "Surface training t=10996, loss=0.03508669603615999\n", - "Surface training t=10997, loss=0.03449722658842802\n", - "Surface training t=10998, loss=0.0304669551551342\n", - "Surface training t=10999, loss=0.027322061359882355\n", - "Surface training t=11000, loss=0.027827569283545017\n", - "Surface training t=11001, loss=0.021743142046034336\n", - "Surface training t=11002, loss=0.03999102767556906\n", - "Surface training t=11003, loss=0.039857182651758194\n", - "Surface training t=11004, loss=0.0478806234896183\n", - "Surface training t=11005, loss=0.04306376725435257\n", - "Surface training t=11006, loss=0.03313049953430891\n", - "Surface training t=11007, loss=0.04791368544101715\n", - "Surface training t=11008, loss=0.058404870331287384\n", - "Surface training t=11009, loss=0.04683832451701164\n", - "Surface training t=11010, loss=0.03792624641209841\n", - "Surface training t=11011, loss=0.046983420848846436\n", - "Surface training t=11012, loss=0.051636140793561935\n", - "Surface training t=11013, loss=0.06530560180544853\n", - "Surface training t=11014, loss=0.04459776729345322\n", - "Surface training t=11015, loss=0.04017213173210621\n", - "Surface training t=11016, loss=0.03432430699467659\n", - "Surface training t=11017, loss=0.02797113172709942\n", - "Surface training t=11018, loss=0.02991264872252941\n", - "Surface training t=11019, loss=0.02870272286236286\n", - "Surface training t=11020, loss=0.03230737894773483\n", - "Surface training t=11021, loss=0.027703316882252693\n", - "Surface training t=11022, loss=0.026403306052088737\n", - "Surface training t=11023, loss=0.02408428303897381\n", - "Surface training t=11024, loss=0.03139473218470812\n", - "Surface training t=11025, loss=0.023748946376144886\n", - "Surface training t=11026, loss=0.03463666792958975\n", - "Surface training t=11027, loss=0.030866118147969246\n", - "Surface training t=11028, loss=0.02541498839855194\n", - "Surface training t=11029, loss=0.03161970805376768\n", - "Surface training t=11030, loss=0.031546845100820065\n", - "Surface training t=11031, loss=0.025490702129900455\n", - "Surface training t=11032, loss=0.030150829814374447\n", - "Surface training t=11033, loss=0.029916470870375633\n", - "Surface training t=11034, loss=0.026311070658266544\n", - "Surface training t=11035, loss=0.023949009366333485\n", - "Surface training t=11036, loss=0.0292681148275733\n", - "Surface training t=11037, loss=0.023838608525693417\n", - "Surface training t=11038, loss=0.03380574844777584\n", - "Surface training t=11039, loss=0.03385463822633028\n", - "Surface training t=11040, loss=0.03379620984196663\n", - "Surface training t=11041, loss=0.04074391722679138\n", - "Surface training t=11042, loss=0.04474029690027237\n", - "Surface training t=11043, loss=0.04595031030476093\n", - "Surface training t=11044, loss=0.0551317073404789\n", - "Surface training t=11045, loss=0.045329103246331215\n", - "Surface training t=11046, loss=0.049969074316322803\n", - "Surface training t=11047, loss=0.047268821857869625\n", - "Surface training t=11048, loss=0.03787730447947979\n", - "Surface training t=11049, loss=0.035931944847106934\n", - "Surface training t=11050, loss=0.03282209113240242\n", - "Surface training t=11051, loss=0.03450717218220234\n", - "Surface training t=11052, loss=0.030459990724921227\n", - "Surface training t=11053, loss=0.030009448528289795\n", - "Surface training t=11054, loss=0.020903666503727436\n", - "Surface training t=11055, loss=0.022521440871059895\n", - "Surface training t=11056, loss=0.0255051264539361\n", - "Surface training t=11057, loss=0.024085654877126217\n", - "Surface training t=11058, loss=0.028030390851199627\n", - "Surface training t=11059, loss=0.03166860342025757\n", - "Surface training t=11060, loss=0.03955323249101639\n", - "Surface training t=11061, loss=0.03762551583349705\n", - "Surface training t=11062, loss=0.02239538636058569\n", - "Surface training t=11063, loss=0.025066646747291088\n", - "Surface training t=11064, loss=0.031870643608272076\n", - "Surface training t=11065, loss=0.03884461522102356\n", - "Surface training t=11066, loss=0.029834166169166565\n", - "Surface training t=11067, loss=0.04401626996695995\n", - "Surface training t=11068, loss=0.038002196699380875\n", - "Surface training t=11069, loss=0.03849751316010952\n", - "Surface training t=11070, loss=0.04859440587460995\n", - "Surface training t=11071, loss=0.04518369771540165\n", - "Surface training t=11072, loss=0.041935596615076065\n", - "Surface training t=11073, loss=0.04368293005973101\n", - "Surface training t=11074, loss=0.03528903238475323\n", - "Surface training t=11075, loss=0.04118398390710354\n", - "Surface training t=11076, loss=0.03841305524110794\n", - "Surface training t=11077, loss=0.03499338775873184\n", - "Surface training t=11078, loss=0.028427118435502052\n", - "Surface training t=11079, loss=0.030446866527199745\n", - "Surface training t=11080, loss=0.02677764929831028\n", - "Surface training t=11081, loss=0.034860304556787014\n", - "Surface training t=11082, loss=0.030588660389184952\n", - "Surface training t=11083, loss=0.03109389916062355\n", - "Surface training t=11084, loss=0.03279468044638634\n", - "Surface training t=11085, loss=0.02523223776370287\n", - "Surface training t=11086, loss=0.03725864551961422\n", - "Surface training t=11087, loss=0.055247919633984566\n", - "Surface training t=11088, loss=0.0590790007263422\n", - "Surface training t=11089, loss=0.07844819873571396\n", - "Surface training t=11090, loss=0.050701867789030075\n", - "Surface training t=11091, loss=0.09167860820889473\n", - "Surface training t=11092, loss=0.07658710703253746\n", - "Surface training t=11093, loss=0.0604693628847599\n", - "Surface training t=11094, loss=0.07749302685260773\n", - "Surface training t=11095, loss=0.07521400414407253\n", - "Surface training t=11096, loss=0.05018512159585953\n", - "Surface training t=11097, loss=0.07352230697870255\n", - "Surface training t=11098, loss=0.04829910025000572\n", - "Surface training t=11099, loss=0.04891192726790905\n", - "Surface training t=11100, loss=0.029878496192395687\n", - "Surface training t=11101, loss=0.029552364721894264\n", - "Surface training t=11102, loss=0.03145590145140886\n", - "Surface training t=11103, loss=0.025606281124055386\n", - "Surface training t=11104, loss=0.030120336450636387\n", - "Surface training t=11105, loss=0.03794011101126671\n", - "Surface training t=11106, loss=0.03375796042382717\n", - "Surface training t=11107, loss=0.03619900904595852\n", - "Surface training t=11108, loss=0.03288686741143465\n", - "Surface training t=11109, loss=0.03722661919891834\n", - "Surface training t=11110, loss=0.03871374577283859\n", - "Surface training t=11111, loss=0.050140850245952606\n", - "Surface training t=11112, loss=0.041137512773275375\n", - "Surface training t=11113, loss=0.04453890398144722\n", - "Surface training t=11114, loss=0.04394189082086086\n", - "Surface training t=11115, loss=0.0555911622941494\n", - "Surface training t=11116, loss=0.04471798986196518\n", - "Surface training t=11117, loss=0.04619688726961613\n", - "Surface training t=11118, loss=0.03879383020102978\n", - "Surface training t=11119, loss=0.04097231291234493\n", - "Surface training t=11120, loss=0.03403526544570923\n", - "Surface training t=11121, loss=0.03104157280176878\n", - "Surface training t=11122, loss=0.032861978746950626\n", - "Surface training t=11123, loss=0.03418836370110512\n", - "Surface training t=11124, loss=0.03547750972211361\n", - "Surface training t=11125, loss=0.030378584749996662\n", - "Surface training t=11126, loss=0.02843862771987915\n", - "Surface training t=11127, loss=0.03734388668090105\n", - "Surface training t=11128, loss=0.03732013702392578\n", - "Surface training t=11129, loss=0.03904424514621496\n", - "Surface training t=11130, loss=0.04096238687634468\n", - "Surface training t=11131, loss=0.03802429139614105\n", - "Surface training t=11132, loss=0.031755231320858\n", - "Surface training t=11133, loss=0.04243353195488453\n", - "Surface training t=11134, loss=0.03971514105796814\n", - "Surface training t=11135, loss=0.04278046265244484\n", - "Surface training t=11136, loss=0.036417581140995026\n", - "Surface training t=11137, loss=0.03403211012482643\n", - "Surface training t=11138, loss=0.043289193883538246\n", - "Surface training t=11139, loss=0.036160483956336975\n", - "Surface training t=11140, loss=0.05868368037045002\n", - "Surface training t=11141, loss=0.05042780190706253\n", - "Surface training t=11142, loss=0.04256210010498762\n", - "Surface training t=11143, loss=0.042487820610404015\n", - "Surface training t=11144, loss=0.054848119616508484\n", - "Surface training t=11145, loss=0.05271989293396473\n", - "Surface training t=11146, loss=0.03656122833490372\n", - "Surface training t=11147, loss=0.04957849346101284\n", - "Surface training t=11148, loss=0.028857842087745667\n", - "Surface training t=11149, loss=0.029366870410740376\n", - "Surface training t=11150, loss=0.03626571036875248\n", - "Surface training t=11151, loss=0.027188997715711594\n", - "Surface training t=11152, loss=0.031911347061395645\n", - "Surface training t=11153, loss=0.023212797939777374\n", - "Surface training t=11154, loss=0.022446024231612682\n", - "Surface training t=11155, loss=0.02678001392632723\n", - "Surface training t=11156, loss=0.03136262111365795\n", - "Surface training t=11157, loss=0.03237648215144873\n", - "Surface training t=11158, loss=0.033757999539375305\n", - "Surface training t=11159, loss=0.03894772753119469\n", - "Surface training t=11160, loss=0.03476575389504433\n", - "Surface training t=11161, loss=0.03078055288642645\n", - "Surface training t=11162, loss=0.03175841644406319\n", - "Surface training t=11163, loss=0.031074970960617065\n", - "Surface training t=11164, loss=0.030265034176409245\n", - "Surface training t=11165, loss=0.02067168615758419\n", - "Surface training t=11166, loss=0.03480026964098215\n", - "Surface training t=11167, loss=0.03629057668149471\n", - "Surface training t=11168, loss=0.03134308662265539\n", - "Surface training t=11169, loss=0.03488091751933098\n", - "Surface training t=11170, loss=0.035907335579395294\n", - "Surface training t=11171, loss=0.03798791207373142\n", - "Surface training t=11172, loss=0.044814230874180794\n", - "Surface training t=11173, loss=0.0315037090331316\n", - "Surface training t=11174, loss=0.02307728584855795\n", - "Surface training t=11175, loss=0.023803369142115116\n", - "Surface training t=11176, loss=0.03433940280228853\n", - "Surface training t=11177, loss=0.029247989878058434\n", - "Surface training t=11178, loss=0.03356870822608471\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=11179, loss=0.027279495261609554\n", - "Surface training t=11180, loss=0.028030581772327423\n", - "Surface training t=11181, loss=0.028657400980591774\n", - "Surface training t=11182, loss=0.029748951084911823\n", - "Surface training t=11183, loss=0.025320883840322495\n", - "Surface training t=11184, loss=0.026166697964072227\n", - "Surface training t=11185, loss=0.0335750300437212\n", - "Surface training t=11186, loss=0.0441119447350502\n", - "Surface training t=11187, loss=0.03149466495960951\n", - "Surface training t=11188, loss=0.025491079315543175\n", - "Surface training t=11189, loss=0.038921624422073364\n", - "Surface training t=11190, loss=0.04754873551428318\n", - "Surface training t=11191, loss=0.0407584011554718\n", - "Surface training t=11192, loss=0.04826445318758488\n", - "Surface training t=11193, loss=0.0325331874191761\n", - "Surface training t=11194, loss=0.039123523980379105\n", - "Surface training t=11195, loss=0.029299885034561157\n", - "Surface training t=11196, loss=0.04828532412648201\n", - "Surface training t=11197, loss=0.058950796723365784\n", - "Surface training t=11198, loss=0.03801672346889973\n", - "Surface training t=11199, loss=0.04234074056148529\n", - "Surface training t=11200, loss=0.04204142652451992\n", - "Surface training t=11201, loss=0.035676551051437855\n", - "Surface training t=11202, loss=0.04663011617958546\n", - "Surface training t=11203, loss=0.03886612318456173\n", - "Surface training t=11204, loss=0.0365255456417799\n", - "Surface training t=11205, loss=0.044707924127578735\n", - "Surface training t=11206, loss=0.03673810698091984\n", - "Surface training t=11207, loss=0.033444540575146675\n", - "Surface training t=11208, loss=0.03689511679112911\n", - "Surface training t=11209, loss=0.05935061722993851\n", - "Surface training t=11210, loss=0.04146118275821209\n", - "Surface training t=11211, loss=0.04929587431252003\n", - "Surface training t=11212, loss=0.05064902454614639\n", - "Surface training t=11213, loss=0.041516974568367004\n", - "Surface training t=11214, loss=0.049255991354584694\n", - "Surface training t=11215, loss=0.043811606243252754\n", - "Surface training t=11216, loss=0.03947215527296066\n", - "Surface training t=11217, loss=0.036072537302970886\n", - "Surface training t=11218, loss=0.03550896421074867\n", - "Surface training t=11219, loss=0.026143977418541908\n", - "Surface training t=11220, loss=0.030080311931669712\n", - "Surface training t=11221, loss=0.02685660868883133\n", - "Surface training t=11222, loss=0.032281290739774704\n", - "Surface training t=11223, loss=0.030510805547237396\n", - "Surface training t=11224, loss=0.02799601759761572\n", - "Surface training t=11225, loss=0.024477068334817886\n", - "Surface training t=11226, loss=0.02313843183219433\n", - "Surface training t=11227, loss=0.03279389999806881\n", - "Surface training t=11228, loss=0.02918354794383049\n", - "Surface training t=11229, loss=0.047030363231897354\n", - "Surface training t=11230, loss=0.06758537329733372\n", - "Surface training t=11231, loss=0.05969914235174656\n", - "Surface training t=11232, loss=0.06634508818387985\n", - "Surface training t=11233, loss=0.11174072325229645\n", - "Surface training t=11234, loss=0.0631900392472744\n", - "Surface training t=11235, loss=0.08032135292887688\n", - "Surface training t=11236, loss=0.05230497941374779\n", - "Surface training t=11237, loss=0.04635640233755112\n", - "Surface training t=11238, loss=0.04554085060954094\n", - "Surface training t=11239, loss=0.04916191101074219\n", - "Surface training t=11240, loss=0.04801309481263161\n", - "Surface training t=11241, loss=0.04400135576725006\n", - "Surface training t=11242, loss=0.048577992245554924\n", - "Surface training t=11243, loss=0.04149340093135834\n", - "Surface training t=11244, loss=0.04501849692314863\n", - "Surface training t=11245, loss=0.06709949672222137\n", - "Surface training t=11246, loss=0.06781795620918274\n", - "Surface training t=11247, loss=0.03678948059678078\n", - "Surface training t=11248, loss=0.07871599495410919\n", - "Surface training t=11249, loss=0.04363572411239147\n", - "Surface training t=11250, loss=0.05564182810485363\n", - "Surface training t=11251, loss=0.044652379117906094\n", - "Surface training t=11252, loss=0.07443896681070328\n", - "Surface training t=11253, loss=0.044677263125777245\n", - "Surface training t=11254, loss=0.054074399173259735\n", - "Surface training t=11255, loss=0.036486140452325344\n", - "Surface training t=11256, loss=0.033536831848323345\n", - "Surface training t=11257, loss=0.04013626091182232\n", - "Surface training t=11258, loss=0.034396568313241005\n", - "Surface training t=11259, loss=0.028277949430048466\n", - "Surface training t=11260, loss=0.0309640783816576\n", - "Surface training t=11261, loss=0.036947691813111305\n", - "Surface training t=11262, loss=0.03506591636687517\n", - "Surface training t=11263, loss=0.036005325615406036\n", - "Surface training t=11264, loss=0.023972424678504467\n", - "Surface training t=11265, loss=0.031052427366375923\n", - "Surface training t=11266, loss=0.03284656535834074\n", - "Surface training t=11267, loss=0.06404076889157295\n", - "Surface training t=11268, loss=0.03149169683456421\n", - "Surface training t=11269, loss=0.026764744892716408\n", - "Surface training t=11270, loss=0.025411914102733135\n", - "Surface training t=11271, loss=0.0339587340131402\n", - "Surface training t=11272, loss=0.03441932890564203\n", - "Surface training t=11273, loss=0.04661421291530132\n", - "Surface training t=11274, loss=0.04266108386218548\n", - "Surface training t=11275, loss=0.04605872184038162\n", - "Surface training t=11276, loss=0.03664335235953331\n", - "Surface training t=11277, loss=0.03199778310954571\n", - "Surface training t=11278, loss=0.04184915870428085\n", - "Surface training t=11279, loss=0.03439647704362869\n", - "Surface training t=11280, loss=0.04253818467259407\n", - "Surface training t=11281, loss=0.03099072352051735\n", - "Surface training t=11282, loss=0.02704133652150631\n", - "Surface training t=11283, loss=0.026169774122536182\n", - "Surface training t=11284, loss=0.032608529552817345\n", - "Surface training t=11285, loss=0.047314777970314026\n", - "Surface training t=11286, loss=0.06177929788827896\n", - "Surface training t=11287, loss=0.04835661128163338\n", - "Surface training t=11288, loss=0.052771296352148056\n", - "Surface training t=11289, loss=0.060540974140167236\n", - "Surface training t=11290, loss=0.0884426049888134\n", - "Surface training t=11291, loss=0.05560927093029022\n", - "Surface training t=11292, loss=0.06633590720593929\n", - "Surface training t=11293, loss=0.08765051886439323\n", - "Surface training t=11294, loss=0.055047377943992615\n", - "Surface training t=11295, loss=0.06388953141868114\n", - "Surface training t=11296, loss=0.06805517338216305\n", - "Surface training t=11297, loss=0.05683361738920212\n", - "Surface training t=11298, loss=0.0633210614323616\n", - "Surface training t=11299, loss=0.09778452664613724\n", - "Surface training t=11300, loss=0.06065656617283821\n", - "Surface training t=11301, loss=0.07381154038012028\n", - "Surface training t=11302, loss=0.060194313526153564\n", - "Surface training t=11303, loss=0.04707789421081543\n", - "Surface training t=11304, loss=0.04721560515463352\n", - "Surface training t=11305, loss=0.041418058797717094\n", - "Surface training t=11306, loss=0.04853122681379318\n", - "Surface training t=11307, loss=0.04897921346127987\n", - "Surface training t=11308, loss=0.047785211354494095\n", - "Surface training t=11309, loss=0.044893043115735054\n", - "Surface training t=11310, loss=0.03343877848237753\n", - "Surface training t=11311, loss=0.04175255447626114\n", - "Surface training t=11312, loss=0.05877929553389549\n", - "Surface training t=11313, loss=0.07927097752690315\n", - "Surface training t=11314, loss=0.06832899898290634\n", - "Surface training t=11315, loss=0.05741462856531143\n", - "Surface training t=11316, loss=0.09328053891658783\n", - "Surface training t=11317, loss=0.05499231815338135\n", - "Surface training t=11318, loss=0.05576854385435581\n", - "Surface training t=11319, loss=0.07904863730072975\n", - "Surface training t=11320, loss=0.056181471794843674\n", - "Surface training t=11321, loss=0.07548883929848671\n", - "Surface training t=11322, loss=0.05188814178109169\n", - "Surface training t=11323, loss=0.05690220184624195\n", - "Surface training t=11324, loss=0.04888194799423218\n", - "Surface training t=11325, loss=0.039660584181547165\n", - "Surface training t=11326, loss=0.05793063901364803\n", - "Surface training t=11327, loss=0.06256066262722015\n", - "Surface training t=11328, loss=0.04962901584804058\n", - "Surface training t=11329, loss=0.06784894689917564\n", - "Surface training t=11330, loss=0.05308467335999012\n", - "Surface training t=11331, loss=0.04506018944084644\n", - "Surface training t=11332, loss=0.048604099079966545\n", - "Surface training t=11333, loss=0.04660458490252495\n", - "Surface training t=11334, loss=0.07890717685222626\n", - "Surface training t=11335, loss=0.05764361657202244\n", - "Surface training t=11336, loss=0.08261112868785858\n", - "Surface training t=11337, loss=0.0744934044778347\n", - "Surface training t=11338, loss=0.04569068364799023\n", - "Surface training t=11339, loss=0.05813376605510712\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=11340, loss=0.06256229057908058\n", - "Surface training t=11341, loss=0.04694327898323536\n", - "Surface training t=11342, loss=0.03773123025894165\n", - "Surface training t=11343, loss=0.04292144253849983\n", - "Surface training t=11344, loss=0.029991980642080307\n", - "Surface training t=11345, loss=0.037470631301403046\n", - "Surface training t=11346, loss=0.06253634765744209\n", - "Surface training t=11347, loss=0.05321370251476765\n", - "Surface training t=11348, loss=0.04693162068724632\n", - "Surface training t=11349, loss=0.05876714177429676\n", - "Surface training t=11350, loss=0.06851114518940449\n", - "Surface training t=11351, loss=0.055448682978749275\n", - "Surface training t=11352, loss=0.04551924951374531\n", - "Surface training t=11353, loss=0.06917238980531693\n", - "Surface training t=11354, loss=0.05489119328558445\n", - "Surface training t=11355, loss=0.048664407804608345\n", - "Surface training t=11356, loss=0.048825453966856\n", - "Surface training t=11357, loss=0.07866957038640976\n", - "Surface training t=11358, loss=0.05562827177345753\n", - "Surface training t=11359, loss=0.052012160420417786\n", - "Surface training t=11360, loss=0.07370662316679955\n", - "Surface training t=11361, loss=0.05585993081331253\n", - "Surface training t=11362, loss=0.0663751121610403\n", - "Surface training t=11363, loss=0.06248384714126587\n", - "Surface training t=11364, loss=0.039996521547436714\n", - "Surface training t=11365, loss=0.04774046875536442\n", - "Surface training t=11366, loss=0.05204523354768753\n", - "Surface training t=11367, loss=0.05591347627341747\n", - "Surface training t=11368, loss=0.05518430098891258\n", - "Surface training t=11369, loss=0.05458247289061546\n", - "Surface training t=11370, loss=0.05729864165186882\n", - "Surface training t=11371, loss=0.04424292407929897\n", - "Surface training t=11372, loss=0.052566759288311005\n", - "Surface training t=11373, loss=0.03649114724248648\n", - "Surface training t=11374, loss=0.033453334122896194\n", - "Surface training t=11375, loss=0.038289519026875496\n", - "Surface training t=11376, loss=0.028411234728991985\n", - "Surface training t=11377, loss=0.02105150744318962\n", - "Surface training t=11378, loss=0.019927309826016426\n", - "Surface training t=11379, loss=0.027145572006702423\n", - "Surface training t=11380, loss=0.029146947897970676\n", - "Surface training t=11381, loss=0.025282910093665123\n", - "Surface training t=11382, loss=0.026104153133928776\n", - "Surface training t=11383, loss=0.04017334431409836\n", - "Surface training t=11384, loss=0.02982643712311983\n", - "Surface training t=11385, loss=0.02373102307319641\n", - "Surface training t=11386, loss=0.02644356433302164\n", - "Surface training t=11387, loss=0.03328059706836939\n", - "Surface training t=11388, loss=0.03598703909665346\n", - "Surface training t=11389, loss=0.04435192979872227\n", - "Surface training t=11390, loss=0.024593709036707878\n", - "Surface training t=11391, loss=0.025304957292973995\n", - "Surface training t=11392, loss=0.03976079635322094\n", - "Surface training t=11393, loss=0.0327102430164814\n", - "Surface training t=11394, loss=0.04763777367770672\n", - "Surface training t=11395, loss=0.0444517582654953\n", - "Surface training t=11396, loss=0.03202732093632221\n", - "Surface training t=11397, loss=0.03789703082293272\n", - "Surface training t=11398, loss=0.03166765067726374\n", - "Surface training t=11399, loss=0.028791015967726707\n", - "Surface training t=11400, loss=0.03722020983695984\n", - "Surface training t=11401, loss=0.04445054940879345\n", - "Surface training t=11402, loss=0.035373494029045105\n", - "Surface training t=11403, loss=0.04556262120604515\n", - "Surface training t=11404, loss=0.0360143817961216\n", - "Surface training t=11405, loss=0.03000570647418499\n", - "Surface training t=11406, loss=0.03954851068556309\n", - "Surface training t=11407, loss=0.032313670963048935\n", - "Surface training t=11408, loss=0.034535398706793785\n", - "Surface training t=11409, loss=0.040755610913038254\n", - "Surface training t=11410, loss=0.030053893104195595\n", - "Surface training t=11411, loss=0.023476533591747284\n", - "Surface training t=11412, loss=0.024413660168647766\n", - "Surface training t=11413, loss=0.029000372625887394\n", - "Surface training t=11414, loss=0.027729563415050507\n", - "Surface training t=11415, loss=0.02788886148482561\n", - "Surface training t=11416, loss=0.027876703068614006\n", - "Surface training t=11417, loss=0.028567031025886536\n", - "Surface training t=11418, loss=0.030440744012594223\n", - "Surface training t=11419, loss=0.027196664363145828\n", - "Surface training t=11420, loss=0.03014492802321911\n", - "Surface training t=11421, loss=0.03337198495864868\n", - "Surface training t=11422, loss=0.02525939792394638\n", - "Surface training t=11423, loss=0.023688621819019318\n", - "Surface training t=11424, loss=0.025986148044466972\n", - "Surface training t=11425, loss=0.020750414580106735\n", - "Surface training t=11426, loss=0.026563572697341442\n", - "Surface training t=11427, loss=0.0350961284711957\n", - "Surface training t=11428, loss=0.03670506924390793\n", - "Surface training t=11429, loss=0.03437509760260582\n", - "Surface training t=11430, loss=0.04256178345531225\n", - "Surface training t=11431, loss=0.03898369334638119\n", - "Surface training t=11432, loss=0.03794073313474655\n", - "Surface training t=11433, loss=0.03614658396691084\n", - "Surface training t=11434, loss=0.04332432709634304\n", - "Surface training t=11435, loss=0.03128670807927847\n", - "Surface training t=11436, loss=0.02643904834985733\n", - "Surface training t=11437, loss=0.029639512300491333\n", - "Surface training t=11438, loss=0.025195869617164135\n", - "Surface training t=11439, loss=0.02378642838448286\n", - "Surface training t=11440, loss=0.02968513686209917\n", - "Surface training t=11441, loss=0.018902769312262535\n", - "Surface training t=11442, loss=0.023495730943977833\n", - "Surface training t=11443, loss=0.03091521468013525\n", - "Surface training t=11444, loss=0.022315435111522675\n", - "Surface training t=11445, loss=0.03286361135542393\n", - "Surface training t=11446, loss=0.03266992047429085\n", - "Surface training t=11447, loss=0.028056658804416656\n", - "Surface training t=11448, loss=0.03645182214677334\n", - "Surface training t=11449, loss=0.03195179719477892\n", - "Surface training t=11450, loss=0.031709291972219944\n", - "Surface training t=11451, loss=0.028651542961597443\n", - "Surface training t=11452, loss=0.026986148208379745\n", - "Surface training t=11453, loss=0.02905412670224905\n", - "Surface training t=11454, loss=0.029664839617908\n", - "Surface training t=11455, loss=0.023317349143326283\n", - "Surface training t=11456, loss=0.02787572704255581\n", - "Surface training t=11457, loss=0.02278158161789179\n", - "Surface training t=11458, loss=0.02206432167440653\n", - "Surface training t=11459, loss=0.030329053290188313\n", - "Surface training t=11460, loss=0.03464786987751722\n", - "Surface training t=11461, loss=0.03351390175521374\n", - "Surface training t=11462, loss=0.03562829364091158\n", - "Surface training t=11463, loss=0.045165298506617546\n", - "Surface training t=11464, loss=0.033217272721230984\n", - "Surface training t=11465, loss=0.031868916004896164\n", - "Surface training t=11466, loss=0.03502443339675665\n", - "Surface training t=11467, loss=0.03132994845509529\n", - "Surface training t=11468, loss=0.06236528977751732\n", - "Surface training t=11469, loss=0.04158896300941706\n", - "Surface training t=11470, loss=0.039212413132190704\n", - "Surface training t=11471, loss=0.03616165462881327\n", - "Surface training t=11472, loss=0.0428061056882143\n", - "Surface training t=11473, loss=0.028360892087221146\n", - "Surface training t=11474, loss=0.03128928691148758\n", - "Surface training t=11475, loss=0.02004765346646309\n", - "Surface training t=11476, loss=0.023419302888214588\n", - "Surface training t=11477, loss=0.02360957581549883\n", - "Surface training t=11478, loss=0.03260624222457409\n", - "Surface training t=11479, loss=0.02273140288889408\n", - "Surface training t=11480, loss=0.03789710812270641\n", - "Surface training t=11481, loss=0.028918548487126827\n", - "Surface training t=11482, loss=0.031581347808241844\n", - "Surface training t=11483, loss=0.027396906167268753\n", - "Surface training t=11484, loss=0.02698743063956499\n", - "Surface training t=11485, loss=0.029396462254226208\n", - "Surface training t=11486, loss=0.03174661286175251\n", - "Surface training t=11487, loss=0.03600987605750561\n", - "Surface training t=11488, loss=0.030288215726614\n", - "Surface training t=11489, loss=0.03378038760274649\n", - "Surface training t=11490, loss=0.021671529859304428\n", - "Surface training t=11491, loss=0.024636548943817616\n", - "Surface training t=11492, loss=0.025365865789353848\n", - "Surface training t=11493, loss=0.027309481985867023\n", - "Surface training t=11494, loss=0.026006488129496574\n", - "Surface training t=11495, loss=0.02561031747609377\n", - "Surface training t=11496, loss=0.028790229000151157\n", - "Surface training t=11497, loss=0.03649519011378288\n", - "Surface training t=11498, loss=0.032257040962576866\n", - "Surface training t=11499, loss=0.03024490736424923\n", - "Surface training t=11500, loss=0.03204156272113323\n", - "Surface training t=11501, loss=0.03214436583220959\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=11502, loss=0.03170387912541628\n", - "Surface training t=11503, loss=0.033174825832247734\n", - "Surface training t=11504, loss=0.04536721855401993\n", - "Surface training t=11505, loss=0.02996735367923975\n", - "Surface training t=11506, loss=0.030127701349556446\n", - "Surface training t=11507, loss=0.03330632112920284\n", - "Surface training t=11508, loss=0.022919845767319202\n", - "Surface training t=11509, loss=0.024318912997841835\n", - "Surface training t=11510, loss=0.024303052574396133\n", - "Surface training t=11511, loss=0.023393738083541393\n", - "Surface training t=11512, loss=0.02820400707423687\n", - "Surface training t=11513, loss=0.021071147173643112\n", - "Surface training t=11514, loss=0.025568410754203796\n", - "Surface training t=11515, loss=0.04192521050572395\n", - "Surface training t=11516, loss=0.04976092278957367\n", - "Surface training t=11517, loss=0.05072609521448612\n", - "Surface training t=11518, loss=0.04664454981684685\n", - "Surface training t=11519, loss=0.05941338464617729\n", - "Surface training t=11520, loss=0.06736944615840912\n", - "Surface training t=11521, loss=0.05368954315781593\n", - "Surface training t=11522, loss=0.05289642419666052\n", - "Surface training t=11523, loss=0.06799268163740635\n", - "Surface training t=11524, loss=0.043144697323441505\n", - "Surface training t=11525, loss=0.05788404867053032\n", - "Surface training t=11526, loss=0.04483414348214865\n", - "Surface training t=11527, loss=0.06708406470716\n", - "Surface training t=11528, loss=0.06506906822323799\n", - "Surface training t=11529, loss=0.04283582977950573\n", - "Surface training t=11530, loss=0.04111601039767265\n", - "Surface training t=11531, loss=0.04453452676534653\n", - "Surface training t=11532, loss=0.03851154074072838\n", - "Surface training t=11533, loss=0.048751574009656906\n", - "Surface training t=11534, loss=0.03486087918281555\n", - "Surface training t=11535, loss=0.04658287763595581\n", - "Surface training t=11536, loss=0.03951745666563511\n", - "Surface training t=11537, loss=0.032109225168824196\n", - "Surface training t=11538, loss=0.05288400501012802\n", - "Surface training t=11539, loss=0.03967327997088432\n", - "Surface training t=11540, loss=0.02690905425697565\n", - "Surface training t=11541, loss=0.030057040974497795\n", - "Surface training t=11542, loss=0.033305300399661064\n", - "Surface training t=11543, loss=0.03130077663809061\n", - "Surface training t=11544, loss=0.03529734257608652\n", - "Surface training t=11545, loss=0.025371787138283253\n", - "Surface training t=11546, loss=0.03507429827004671\n", - "Surface training t=11547, loss=0.036204611882567406\n", - "Surface training t=11548, loss=0.03682188503444195\n", - "Surface training t=11549, loss=0.026590938679873943\n", - "Surface training t=11550, loss=0.021938789635896683\n", - "Surface training t=11551, loss=0.02740021888166666\n", - "Surface training t=11552, loss=0.03787728399038315\n", - "Surface training t=11553, loss=0.02863560151308775\n", - "Surface training t=11554, loss=0.024720363318920135\n", - "Surface training t=11555, loss=0.03096193727105856\n", - "Surface training t=11556, loss=0.027121136896312237\n", - "Surface training t=11557, loss=0.028370208106935024\n", - "Surface training t=11558, loss=0.02845364809036255\n", - "Surface training t=11559, loss=0.02884555049240589\n", - "Surface training t=11560, loss=0.03340292535722256\n", - "Surface training t=11561, loss=0.04028586111962795\n", - "Surface training t=11562, loss=0.04454396106302738\n", - "Surface training t=11563, loss=0.04906126298010349\n", - "Surface training t=11564, loss=0.052984876558184624\n", - "Surface training t=11565, loss=0.051243891939520836\n", - "Surface training t=11566, loss=0.05123339220881462\n", - "Surface training t=11567, loss=0.049140069633722305\n", - "Surface training t=11568, loss=0.04827135242521763\n", - "Surface training t=11569, loss=0.04061258025467396\n", - "Surface training t=11570, loss=0.044097088277339935\n", - "Surface training t=11571, loss=0.040903232991695404\n", - "Surface training t=11572, loss=0.039380330592393875\n", - "Surface training t=11573, loss=0.04072415642440319\n", - "Surface training t=11574, loss=0.04228805750608444\n", - "Surface training t=11575, loss=0.04379066824913025\n", - "Surface training t=11576, loss=0.04994741827249527\n", - "Surface training t=11577, loss=0.06445391476154327\n", - "Surface training t=11578, loss=0.04591444320976734\n", - "Surface training t=11579, loss=0.05609169043600559\n", - "Surface training t=11580, loss=0.04500511009246111\n", - "Surface training t=11581, loss=0.07293420657515526\n", - "Surface training t=11582, loss=0.05251314211636782\n", - "Surface training t=11583, loss=0.08014829084277153\n", - "Surface training t=11584, loss=0.054596398025751114\n", - "Surface training t=11585, loss=0.0566310603171587\n", - "Surface training t=11586, loss=0.04834252968430519\n", - "Surface training t=11587, loss=0.05140722170472145\n", - "Surface training t=11588, loss=0.061918532475829124\n", - "Surface training t=11589, loss=0.045350316911935806\n", - "Surface training t=11590, loss=0.03955306112766266\n", - "Surface training t=11591, loss=0.036579618230462074\n", - "Surface training t=11592, loss=0.03780831303447485\n", - "Surface training t=11593, loss=0.04224136285483837\n", - "Surface training t=11594, loss=0.028547615744173527\n", - "Surface training t=11595, loss=0.02276198286563158\n", - "Surface training t=11596, loss=0.02239386085420847\n", - "Surface training t=11597, loss=0.02853253297507763\n", - "Surface training t=11598, loss=0.03124887775629759\n", - "Surface training t=11599, loss=0.03333522565662861\n", - "Surface training t=11600, loss=0.027466720901429653\n", - "Surface training t=11601, loss=0.02491567935794592\n", - "Surface training t=11602, loss=0.02408661413937807\n", - "Surface training t=11603, loss=0.03041334729641676\n", - "Surface training t=11604, loss=0.02568584866821766\n", - "Surface training t=11605, loss=0.023988324217498302\n", - "Surface training t=11606, loss=0.02678501047194004\n", - "Surface training t=11607, loss=0.045428283512592316\n", - "Surface training t=11608, loss=0.04057093895971775\n", - "Surface training t=11609, loss=0.046835070475935936\n", - "Surface training t=11610, loss=0.026525876484811306\n", - "Surface training t=11611, loss=0.025630032643675804\n", - "Surface training t=11612, loss=0.04650491289794445\n", - "Surface training t=11613, loss=0.04262649267911911\n", - "Surface training t=11614, loss=0.030212951824069023\n", - "Surface training t=11615, loss=0.028112956322729588\n", - "Surface training t=11616, loss=0.025277589447796345\n", - "Surface training t=11617, loss=0.022481445223093033\n", - "Surface training t=11618, loss=0.02985925506800413\n", - "Surface training t=11619, loss=0.031314192339777946\n", - "Surface training t=11620, loss=0.03961034119129181\n", - "Surface training t=11621, loss=0.05772283300757408\n", - "Surface training t=11622, loss=0.038216181099414825\n", - "Surface training t=11623, loss=0.03360092546790838\n", - "Surface training t=11624, loss=0.028657224029302597\n", - "Surface training t=11625, loss=0.0218088673427701\n", - "Surface training t=11626, loss=0.04541107825934887\n", - "Surface training t=11627, loss=0.02559402957558632\n", - "Surface training t=11628, loss=0.03176212124526501\n", - "Surface training t=11629, loss=0.050405802205204964\n", - "Surface training t=11630, loss=0.04523864574730396\n", - "Surface training t=11631, loss=0.044979327358305454\n", - "Surface training t=11632, loss=0.05580543167889118\n", - "Surface training t=11633, loss=0.03435731213539839\n", - "Surface training t=11634, loss=0.06205643527209759\n", - "Surface training t=11635, loss=0.0426715649664402\n", - "Surface training t=11636, loss=0.05383720062673092\n", - "Surface training t=11637, loss=0.04109368100762367\n", - "Surface training t=11638, loss=0.041859615594148636\n", - "Surface training t=11639, loss=0.0379277765750885\n", - "Surface training t=11640, loss=0.033468274399638176\n", - "Surface training t=11641, loss=0.037339527159929276\n", - "Surface training t=11642, loss=0.03869018889963627\n", - "Surface training t=11643, loss=0.03940887004137039\n", - "Surface training t=11644, loss=0.036030106246471405\n", - "Surface training t=11645, loss=0.04701951704919338\n", - "Surface training t=11646, loss=0.038424527272582054\n", - "Surface training t=11647, loss=0.03752836398780346\n", - "Surface training t=11648, loss=0.03896413929760456\n", - "Surface training t=11649, loss=0.032001250423491\n", - "Surface training t=11650, loss=0.03568375017493963\n", - "Surface training t=11651, loss=0.03907960094511509\n", - "Surface training t=11652, loss=0.03275182377547026\n", - "Surface training t=11653, loss=0.026301690377295017\n", - "Surface training t=11654, loss=0.03163823764771223\n", - "Surface training t=11655, loss=0.02901165932416916\n", - "Surface training t=11656, loss=0.02589340414851904\n", - "Surface training t=11657, loss=0.03368263226002455\n", - "Surface training t=11658, loss=0.03944507148116827\n", - "Surface training t=11659, loss=0.03993241209536791\n", - "Surface training t=11660, loss=0.04466360993683338\n", - "Surface training t=11661, loss=0.03524382226169109\n", - "Surface training t=11662, loss=0.03204575926065445\n", - "Surface training t=11663, loss=0.04505706578493118\n", - "Surface training t=11664, loss=0.04125002771615982\n", - "Surface training t=11665, loss=0.08167210221290588\n", - "Surface training t=11666, loss=0.058838147670030594\n", - "Surface training t=11667, loss=0.05276590958237648\n", - "Surface training t=11668, loss=0.05879668891429901\n", - "Surface training t=11669, loss=0.04493461363017559\n", - "Surface training t=11670, loss=0.0367509201169014\n", - "Surface training t=11671, loss=0.04886358045041561\n", - "Surface training t=11672, loss=0.03158768452703953\n", - "Surface training t=11673, loss=0.03268328495323658\n", - "Surface training t=11674, loss=0.03078166116029024\n", - "Surface training t=11675, loss=0.03655903600156307\n", - "Surface training t=11676, loss=0.03475003503262997\n", - "Surface training t=11677, loss=0.03704196214675903\n", - "Surface training t=11678, loss=0.03588869795203209\n", - "Surface training t=11679, loss=0.04234583117067814\n", - "Surface training t=11680, loss=0.0370864300057292\n", - "Surface training t=11681, loss=0.06291300989687443\n", - "Surface training t=11682, loss=0.04634012468159199\n", - "Surface training t=11683, loss=0.047509532421827316\n", - "Surface training t=11684, loss=0.06746160052716732\n", - "Surface training t=11685, loss=0.04715622030198574\n", - "Surface training t=11686, loss=0.04598282277584076\n", - "Surface training t=11687, loss=0.05240884702652693\n", - "Surface training t=11688, loss=0.06534083187580109\n", - "Surface training t=11689, loss=0.05284835025668144\n", - "Surface training t=11690, loss=0.05310375615954399\n", - "Surface training t=11691, loss=0.09248173609375954\n", - "Surface training t=11692, loss=0.05139552615582943\n", - "Surface training t=11693, loss=0.04832927417010069\n", - "Surface training t=11694, loss=0.07729126885533333\n", - "Surface training t=11695, loss=0.04212953895330429\n", - "Surface training t=11696, loss=0.0372253879904747\n", - "Surface training t=11697, loss=0.03148388396948576\n", - "Surface training t=11698, loss=0.038089606910943985\n", - "Surface training t=11699, loss=0.04093078523874283\n", - "Surface training t=11700, loss=0.033332811668515205\n", - "Surface training t=11701, loss=0.055019913241267204\n", - "Surface training t=11702, loss=0.04515167884528637\n", - "Surface training t=11703, loss=0.04289364442229271\n", - "Surface training t=11704, loss=0.033816336654126644\n", - "Surface training t=11705, loss=0.03223617188632488\n", - "Surface training t=11706, loss=0.04533064737915993\n", - "Surface training t=11707, loss=0.03718586079776287\n", - "Surface training t=11708, loss=0.032066699117422104\n", - "Surface training t=11709, loss=0.03946681134402752\n", - "Surface training t=11710, loss=0.04234444722533226\n", - "Surface training t=11711, loss=0.027915989980101585\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=11712, loss=0.042710280045866966\n", - "Surface training t=11713, loss=0.059450024738907814\n", - "Surface training t=11714, loss=0.03601620998233557\n", - "Surface training t=11715, loss=0.03714335151016712\n", - "Surface training t=11716, loss=0.0394566934555769\n", - "Surface training t=11717, loss=0.04432694800198078\n", - "Surface training t=11718, loss=0.04593280889093876\n", - "Surface training t=11719, loss=0.040461814031004906\n", - "Surface training t=11720, loss=0.03269823081791401\n", - "Surface training t=11721, loss=0.03867927007377148\n", - "Surface training t=11722, loss=0.03909553214907646\n", - "Surface training t=11723, loss=0.05760508216917515\n", - "Surface training t=11724, loss=0.04119596257805824\n", - "Surface training t=11725, loss=0.03418806288391352\n", - "Surface training t=11726, loss=0.03500501997768879\n", - "Surface training t=11727, loss=0.03264998272061348\n", - "Surface training t=11728, loss=0.038870006799697876\n", - "Surface training t=11729, loss=0.034182919189333916\n", - "Surface training t=11730, loss=0.03857816196978092\n", - "Surface training t=11731, loss=0.038939639925956726\n", - "Surface training t=11732, loss=0.0403689444065094\n", - "Surface training t=11733, loss=0.03342798538506031\n", - "Surface training t=11734, loss=0.026877908036112785\n", - "Surface training t=11735, loss=0.027024108916521072\n", - "Surface training t=11736, loss=0.03247682377696037\n", - "Surface training t=11737, loss=0.02894135657697916\n", - "Surface training t=11738, loss=0.026730922050774097\n", - "Surface training t=11739, loss=0.02813761681318283\n", - "Surface training t=11740, loss=0.028149310499429703\n", - "Surface training t=11741, loss=0.033748809248209\n", - "Surface training t=11742, loss=0.02832846064120531\n", - "Surface training t=11743, loss=0.028686609119176865\n", - "Surface training t=11744, loss=0.029118158854544163\n", - "Surface training t=11745, loss=0.03953852504491806\n", - "Surface training t=11746, loss=0.031251478008925915\n", - "Surface training t=11747, loss=0.018590277060866356\n", - "Surface training t=11748, loss=0.019722568802535534\n", - "Surface training t=11749, loss=0.02661329321563244\n", - "Surface training t=11750, loss=0.026972361840307713\n", - "Surface training t=11751, loss=0.026411771774291992\n", - "Surface training t=11752, loss=0.02000356651842594\n", - "Surface training t=11753, loss=0.0339378472417593\n", - "Surface training t=11754, loss=0.03933335281908512\n", - "Surface training t=11755, loss=0.062489401549100876\n", - "Surface training t=11756, loss=0.04872240871191025\n", - "Surface training t=11757, loss=0.043097203597426414\n", - "Surface training t=11758, loss=0.044610852375626564\n", - "Surface training t=11759, loss=0.036312706768512726\n", - "Surface training t=11760, loss=0.040036506950855255\n", - "Surface training t=11761, loss=0.039362385869026184\n", - "Surface training t=11762, loss=0.052644263952970505\n", - "Surface training t=11763, loss=0.03690907172858715\n", - "Surface training t=11764, loss=0.035646623000502586\n", - "Surface training t=11765, loss=0.028403379023075104\n", - "Surface training t=11766, loss=0.027390629053115845\n", - "Surface training t=11767, loss=0.024703106842935085\n", - "Surface training t=11768, loss=0.019745182245969772\n", - "Surface training t=11769, loss=0.023372539319097996\n", - "Surface training t=11770, loss=0.03802109882235527\n", - "Surface training t=11771, loss=0.02631764393299818\n", - "Surface training t=11772, loss=0.028489885851740837\n", - "Surface training t=11773, loss=0.032432932406663895\n", - "Surface training t=11774, loss=0.03381565771996975\n", - "Surface training t=11775, loss=0.03024531714618206\n", - "Surface training t=11776, loss=0.03918503411114216\n", - "Surface training t=11777, loss=0.04415063001215458\n", - "Surface training t=11778, loss=0.04837014153599739\n", - "Surface training t=11779, loss=0.058942120522260666\n", - "Surface training t=11780, loss=0.059942856431007385\n", - "Surface training t=11781, loss=0.050897132605314255\n", - "Surface training t=11782, loss=0.06085510551929474\n", - "Surface training t=11783, loss=0.09712990000844002\n", - "Surface training t=11784, loss=0.0664035715162754\n", - "Surface training t=11785, loss=0.08752421662211418\n", - "Surface training t=11786, loss=0.06673630885779858\n", - "Surface training t=11787, loss=0.05662097781896591\n", - "Surface training t=11788, loss=0.04151040967553854\n", - "Surface training t=11789, loss=0.058587074279785156\n", - "Surface training t=11790, loss=0.04146701097488403\n", - "Surface training t=11791, loss=0.04765838384628296\n", - "Surface training t=11792, loss=0.05161985941231251\n", - "Surface training t=11793, loss=0.09685283154249191\n", - "Surface training t=11794, loss=0.056884244084358215\n", - "Surface training t=11795, loss=0.07179586961865425\n", - "Surface training t=11796, loss=0.08105441369116306\n", - "Surface training t=11797, loss=0.047458354383707047\n", - "Surface training t=11798, loss=0.06173772178590298\n", - "Surface training t=11799, loss=0.0710192583501339\n", - "Surface training t=11800, loss=0.046667516231536865\n", - "Surface training t=11801, loss=0.05152638256549835\n", - "Surface training t=11802, loss=0.056810515001416206\n", - "Surface training t=11803, loss=0.05580170452594757\n", - "Surface training t=11804, loss=0.06110772117972374\n", - "Surface training t=11805, loss=0.05620020814239979\n", - "Surface training t=11806, loss=0.06958477199077606\n", - "Surface training t=11807, loss=0.04563489370048046\n", - "Surface training t=11808, loss=0.045899493619799614\n", - "Surface training t=11809, loss=0.03392665274441242\n", - "Surface training t=11810, loss=0.05768570117652416\n", - "Surface training t=11811, loss=0.04254309181123972\n", - "Surface training t=11812, loss=0.06333212926983833\n", - "Surface training t=11813, loss=0.05395970493555069\n", - "Surface training t=11814, loss=0.06233398802578449\n", - "Surface training t=11815, loss=0.04160250350832939\n", - "Surface training t=11816, loss=0.06905992701649666\n", - "Surface training t=11817, loss=0.045347314327955246\n", - "Surface training t=11818, loss=0.05846361629664898\n", - "Surface training t=11819, loss=0.03399323206394911\n", - "Surface training t=11820, loss=0.06280911341309547\n", - "Surface training t=11821, loss=0.04548719525337219\n", - "Surface training t=11822, loss=0.05163554660975933\n", - "Surface training t=11823, loss=0.036590393632650375\n", - "Surface training t=11824, loss=0.028938758186995983\n", - "Surface training t=11825, loss=0.020341564901173115\n", - "Surface training t=11826, loss=0.022313001565635204\n", - "Surface training t=11827, loss=0.02113288640975952\n", - "Surface training t=11828, loss=0.024793021380901337\n", - "Surface training t=11829, loss=0.030347603373229504\n", - "Surface training t=11830, loss=0.021046328358352184\n", - "Surface training t=11831, loss=0.02537492662668228\n", - "Surface training t=11832, loss=0.03845358267426491\n", - "Surface training t=11833, loss=0.03739157039672136\n", - "Surface training t=11834, loss=0.0382535457611084\n", - "Surface training t=11835, loss=0.0375035684555769\n", - "Surface training t=11836, loss=0.040742721408605576\n", - "Surface training t=11837, loss=0.0457520242780447\n", - "Surface training t=11838, loss=0.039035456255078316\n", - "Surface training t=11839, loss=0.031386133283376694\n", - "Surface training t=11840, loss=0.028897474519908428\n", - "Surface training t=11841, loss=0.032395849004387856\n", - "Surface training t=11842, loss=0.033233392983675\n", - "Surface training t=11843, loss=0.0368865467607975\n", - "Surface training t=11844, loss=0.044013443402945995\n", - "Surface training t=11845, loss=0.05152413621544838\n", - "Surface training t=11846, loss=0.03672385960817337\n", - "Surface training t=11847, loss=0.05022099427878857\n", - "Surface training t=11848, loss=0.041452035307884216\n", - "Surface training t=11849, loss=0.04211915470659733\n", - "Surface training t=11850, loss=0.03750629164278507\n", - "Surface training t=11851, loss=0.03738962858915329\n", - "Surface training t=11852, loss=0.03923156764358282\n", - "Surface training t=11853, loss=0.048410991206765175\n", - "Surface training t=11854, loss=0.05096236243844032\n", - "Surface training t=11855, loss=0.040320685133337975\n", - "Surface training t=11856, loss=0.03922831825911999\n", - "Surface training t=11857, loss=0.03616899251937866\n", - "Surface training t=11858, loss=0.039154608733952045\n", - "Surface training t=11859, loss=0.05444716103374958\n", - "Surface training t=11860, loss=0.060103313997387886\n", - "Surface training t=11861, loss=0.04100537486374378\n", - "Surface training t=11862, loss=0.039444051682949066\n", - "Surface training t=11863, loss=0.033248452469706535\n", - "Surface training t=11864, loss=0.026825702749192715\n", - "Surface training t=11865, loss=0.023562786169350147\n", - "Surface training t=11866, loss=0.023674474097788334\n", - "Surface training t=11867, loss=0.024487695656716824\n", - "Surface training t=11868, loss=0.03048374317586422\n", - "Surface training t=11869, loss=0.024282622151076794\n", - "Surface training t=11870, loss=0.01561458082869649\n", - "Surface training t=11871, loss=0.018116841092705727\n", - "Surface training t=11872, loss=0.022913829423487186\n", - "Surface training t=11873, loss=0.027563980780541897\n", - "Surface training t=11874, loss=0.024142264388501644\n", - "Surface training t=11875, loss=0.029202108271420002\n", - "Surface training t=11876, loss=0.028353051282465458\n", - "Surface training t=11877, loss=0.0272831991314888\n", - "Surface training t=11878, loss=0.028591053560376167\n", - "Surface training t=11879, loss=0.027296158485114574\n", - "Surface training t=11880, loss=0.028889921493828297\n", - "Surface training t=11881, loss=0.025995231233537197\n", - "Surface training t=11882, loss=0.026066331192851067\n", - "Surface training t=11883, loss=0.038930151611566544\n", - "Surface training t=11884, loss=0.034366197884082794\n", - "Surface training t=11885, loss=0.03795805759727955\n", - "Surface training t=11886, loss=0.03794713132083416\n", - "Surface training t=11887, loss=0.02352282963693142\n", - "Surface training t=11888, loss=0.028623802587389946\n", - "Surface training t=11889, loss=0.030482043512165546\n", - "Surface training t=11890, loss=0.028879838064312935\n", - "Surface training t=11891, loss=0.019379050470888615\n", - "Surface training t=11892, loss=0.021052013151347637\n", - "Surface training t=11893, loss=0.024618711322546005\n", - "Surface training t=11894, loss=0.02348316926509142\n", - "Surface training t=11895, loss=0.03216656669974327\n", - "Surface training t=11896, loss=0.03977838531136513\n", - "Surface training t=11897, loss=0.026810292154550552\n", - "Surface training t=11898, loss=0.03629898466169834\n", - "Surface training t=11899, loss=0.03314533643424511\n", - "Surface training t=11900, loss=0.024602089077234268\n", - "Surface training t=11901, loss=0.04170208424329758\n", - "Surface training t=11902, loss=0.035828765481710434\n", - "Surface training t=11903, loss=0.0396876223385334\n", - "Surface training t=11904, loss=0.041470425203442574\n", - "Surface training t=11905, loss=0.04080024175345898\n", - "Surface training t=11906, loss=0.032098148949444294\n", - "Surface training t=11907, loss=0.030728877522051334\n", - "Surface training t=11908, loss=0.025521486066281796\n", - "Surface training t=11909, loss=0.029481027275323868\n", - "Surface training t=11910, loss=0.02219635806977749\n", - "Surface training t=11911, loss=0.02666182443499565\n", - "Surface training t=11912, loss=0.0254298010841012\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=11913, loss=0.024479073472321033\n", - "Surface training t=11914, loss=0.022586770355701447\n", - "Surface training t=11915, loss=0.030523259192705154\n", - "Surface training t=11916, loss=0.023419145494699478\n", - "Surface training t=11917, loss=0.025082840584218502\n", - "Surface training t=11918, loss=0.022244537249207497\n", - "Surface training t=11919, loss=0.025134348310530186\n", - "Surface training t=11920, loss=0.025856126099824905\n", - "Surface training t=11921, loss=0.03041317407041788\n", - "Surface training t=11922, loss=0.03146866615861654\n", - "Surface training t=11923, loss=0.027355159632861614\n", - "Surface training t=11924, loss=0.034088971093297005\n", - "Surface training t=11925, loss=0.025132829323410988\n", - "Surface training t=11926, loss=0.028605466708540916\n", - "Surface training t=11927, loss=0.025487948209047318\n", - "Surface training t=11928, loss=0.030843224376440048\n", - "Surface training t=11929, loss=0.030605856329202652\n", - "Surface training t=11930, loss=0.02792974840849638\n", - "Surface training t=11931, loss=0.03469162434339523\n", - "Surface training t=11932, loss=0.027879266999661922\n", - "Surface training t=11933, loss=0.03614568430930376\n", - "Surface training t=11934, loss=0.04179645888507366\n", - "Surface training t=11935, loss=0.03864512965083122\n", - "Surface training t=11936, loss=0.04144022986292839\n", - "Surface training t=11937, loss=0.04292854480445385\n", - "Surface training t=11938, loss=0.046908484771847725\n", - "Surface training t=11939, loss=0.0390881709754467\n", - "Surface training t=11940, loss=0.04637259989976883\n", - "Surface training t=11941, loss=0.048568569123744965\n", - "Surface training t=11942, loss=0.03829941153526306\n", - "Surface training t=11943, loss=0.03281260095536709\n", - "Surface training t=11944, loss=0.04183945618569851\n", - "Surface training t=11945, loss=0.04764053784310818\n", - "Surface training t=11946, loss=0.03279973194003105\n", - "Surface training t=11947, loss=0.032145120203495026\n", - "Surface training t=11948, loss=0.04662793315947056\n", - "Surface training t=11949, loss=0.052429916337132454\n", - "Surface training t=11950, loss=0.05874049849808216\n", - "Surface training t=11951, loss=0.049514392390847206\n", - "Surface training t=11952, loss=0.04654789716005325\n", - "Surface training t=11953, loss=0.0351056344807148\n", - "Surface training t=11954, loss=0.046618254855275154\n", - "Surface training t=11955, loss=0.03512047603726387\n", - "Surface training t=11956, loss=0.04241960495710373\n", - "Surface training t=11957, loss=0.037215931341052055\n", - "Surface training t=11958, loss=0.024007082916796207\n", - "Surface training t=11959, loss=0.028022713027894497\n", - "Surface training t=11960, loss=0.02483697608113289\n", - "Surface training t=11961, loss=0.02411500457674265\n", - "Surface training t=11962, loss=0.01712929643690586\n", - "Surface training t=11963, loss=0.027640314772725105\n", - "Surface training t=11964, loss=0.026752455160021782\n", - "Surface training t=11965, loss=0.023896521888673306\n", - "Surface training t=11966, loss=0.0211500721052289\n", - "Surface training t=11967, loss=0.024755201302468777\n", - "Surface training t=11968, loss=0.035270567052066326\n", - "Surface training t=11969, loss=0.026086894795298576\n", - "Surface training t=11970, loss=0.031643849797546864\n", - "Surface training t=11971, loss=0.0259221363812685\n", - "Surface training t=11972, loss=0.020506305620074272\n", - "Surface training t=11973, loss=0.027631686069071293\n", - "Surface training t=11974, loss=0.027916555292904377\n", - "Surface training t=11975, loss=0.024425984360277653\n", - "Surface training t=11976, loss=0.031197327189147472\n", - "Surface training t=11977, loss=0.037420500069856644\n", - "Surface training t=11978, loss=0.029176395386457443\n", - "Surface training t=11979, loss=0.02432045992463827\n", - "Surface training t=11980, loss=0.028457464650273323\n", - "Surface training t=11981, loss=0.04230962507426739\n", - "Surface training t=11982, loss=0.03060702420771122\n", - "Surface training t=11983, loss=0.041616976261138916\n", - "Surface training t=11984, loss=0.02928481064736843\n", - "Surface training t=11985, loss=0.027092655189335346\n", - "Surface training t=11986, loss=0.023578638210892677\n", - "Surface training t=11987, loss=0.02600237727165222\n", - "Surface training t=11988, loss=0.02549467608332634\n", - "Surface training t=11989, loss=0.023537653498351574\n", - "Surface training t=11990, loss=0.035148163326084614\n", - "Surface training t=11991, loss=0.025218719616532326\n", - "Surface training t=11992, loss=0.02755807712674141\n", - "Surface training t=11993, loss=0.03553532250225544\n", - "Surface training t=11994, loss=0.026119237765669823\n", - "Surface training t=11995, loss=0.03161574900150299\n", - "Surface training t=11996, loss=0.024956670589745045\n", - "Surface training t=11997, loss=0.042147206142544746\n", - "Surface training t=11998, loss=0.039715973660349846\n", - "Surface training t=11999, loss=0.059074776247143745\n", - "Surface training t=12000, loss=0.05108816921710968\n", - "Surface training t=12001, loss=0.05090846307575703\n", - "Surface training t=12002, loss=0.050555091351270676\n", - "Surface training t=12003, loss=0.05761568807065487\n", - "Surface training t=12004, loss=0.0468670055270195\n", - "Surface training t=12005, loss=0.03844892792403698\n", - "Surface training t=12006, loss=0.03807738609611988\n", - "Surface training t=12007, loss=0.037056609988212585\n", - "Surface training t=12008, loss=0.03809552453458309\n", - "Surface training t=12009, loss=0.042950255796313286\n", - "Surface training t=12010, loss=0.04138246923685074\n", - "Surface training t=12011, loss=0.028491671197116375\n", - "Surface training t=12012, loss=0.03525695577263832\n", - "Surface training t=12013, loss=0.04803025349974632\n", - "Surface training t=12014, loss=0.04779152385890484\n", - "Surface training t=12015, loss=0.030207821168005466\n", - "Surface training t=12016, loss=0.02234556432813406\n", - "Surface training t=12017, loss=0.024154536426067352\n", - "Surface training t=12018, loss=0.022753004450351\n", - "Surface training t=12019, loss=0.01690408494323492\n", - "Surface training t=12020, loss=0.026034828275442123\n", - "Surface training t=12021, loss=0.023687495850026608\n", - "Surface training t=12022, loss=0.03549807704985142\n", - "Surface training t=12023, loss=0.0607950147241354\n", - "Surface training t=12024, loss=0.03371874429285526\n", - "Surface training t=12025, loss=0.020977308973670006\n", - "Surface training t=12026, loss=0.021291016601026058\n", - "Surface training t=12027, loss=0.03014685120433569\n", - "Surface training t=12028, loss=0.03134159464389086\n", - "Surface training t=12029, loss=0.037352935411036015\n", - "Surface training t=12030, loss=0.04715723730623722\n", - "Surface training t=12031, loss=0.04950089007616043\n", - "Surface training t=12032, loss=0.0419344287365675\n", - "Surface training t=12033, loss=0.036140237003564835\n", - "Surface training t=12034, loss=0.054951706901192665\n", - "Surface training t=12035, loss=0.04095725156366825\n", - "Surface training t=12036, loss=0.048979196697473526\n", - "Surface training t=12037, loss=0.041022032499313354\n", - "Surface training t=12038, loss=0.05887085758149624\n", - "Surface training t=12039, loss=0.04949949402362108\n", - "Surface training t=12040, loss=0.03584039397537708\n", - "Surface training t=12041, loss=0.0452567283064127\n", - "Surface training t=12042, loss=0.03475983440876007\n", - "Surface training t=12043, loss=0.04447989445179701\n", - "Surface training t=12044, loss=0.03410761058330536\n", - "Surface training t=12045, loss=0.024056714959442616\n", - "Surface training t=12046, loss=0.021940256468951702\n", - "Surface training t=12047, loss=0.026437247171998024\n", - "Surface training t=12048, loss=0.017051638569682837\n", - "Surface training t=12049, loss=0.022690112702548504\n", - "Surface training t=12050, loss=0.02474101260304451\n", - "Surface training t=12051, loss=0.024713944643735886\n", - "Surface training t=12052, loss=0.032602181658148766\n", - "Surface training t=12053, loss=0.035515302792191505\n", - "Surface training t=12054, loss=0.04431632161140442\n", - "Surface training t=12055, loss=0.04266455955803394\n", - "Surface training t=12056, loss=0.02977015357464552\n", - "Surface training t=12057, loss=0.03403080254793167\n", - "Surface training t=12058, loss=0.061470191925764084\n", - "Surface training t=12059, loss=0.038329510018229485\n", - "Surface training t=12060, loss=0.030999379232525826\n", - "Surface training t=12061, loss=0.02349769789725542\n", - "Surface training t=12062, loss=0.026818678714334965\n", - "Surface training t=12063, loss=0.025723902508616447\n", - "Surface training t=12064, loss=0.024934529326856136\n", - "Surface training t=12065, loss=0.029877033084630966\n", - "Surface training t=12066, loss=0.021634208969771862\n", - "Surface training t=12067, loss=0.030387088656425476\n", - "Surface training t=12068, loss=0.037674494087696075\n", - "Surface training t=12069, loss=0.03150435257703066\n", - "Surface training t=12070, loss=0.03391789086163044\n", - "Surface training t=12071, loss=0.0344823244959116\n", - "Surface training t=12072, loss=0.03706696908921003\n", - "Surface training t=12073, loss=0.03834998421370983\n", - "Surface training t=12074, loss=0.03658059611916542\n", - "Surface training t=12075, loss=0.036243059672415257\n", - "Surface training t=12076, loss=0.03493156284093857\n", - "Surface training t=12077, loss=0.03059332352131605\n", - "Surface training t=12078, loss=0.04676233232021332\n", - "Surface training t=12079, loss=0.060154855251312256\n", - "Surface training t=12080, loss=0.044316615909338\n", - "Surface training t=12081, loss=0.04939717426896095\n", - "Surface training t=12082, loss=0.03716549649834633\n", - "Surface training t=12083, loss=0.03715802635997534\n", - "Surface training t=12084, loss=0.046179864555597305\n", - "Surface training t=12085, loss=0.034822018817067146\n", - "Surface training t=12086, loss=0.03576590120792389\n", - "Surface training t=12087, loss=0.040404120460152626\n", - "Surface training t=12088, loss=0.04283313266932964\n", - "Surface training t=12089, loss=0.037737900391221046\n", - "Surface training t=12090, loss=0.040186962112784386\n", - "Surface training t=12091, loss=0.03640513867139816\n", - "Surface training t=12092, loss=0.033597249537706375\n", - "Surface training t=12093, loss=0.043120430782437325\n", - "Surface training t=12094, loss=0.028166884556412697\n", - "Surface training t=12095, loss=0.027841320261359215\n", - "Surface training t=12096, loss=0.03567872568964958\n", - "Surface training t=12097, loss=0.03438556380569935\n", - "Surface training t=12098, loss=0.037495002150535583\n", - "Surface training t=12099, loss=0.0344496900215745\n", - "Surface training t=12100, loss=0.0361768938601017\n", - "Surface training t=12101, loss=0.035073256120085716\n", - "Surface training t=12102, loss=0.031712282449007034\n", - "Surface training t=12103, loss=0.02435292210429907\n", - "Surface training t=12104, loss=0.026026220060884953\n", - "Surface training t=12105, loss=0.02788361720740795\n", - "Surface training t=12106, loss=0.04203793406486511\n", - "Surface training t=12107, loss=0.031435176730155945\n", - "Surface training t=12108, loss=0.049941105768084526\n", - "Surface training t=12109, loss=0.03512087091803551\n", - "Surface training t=12110, loss=0.04106440022587776\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=12111, loss=0.037007471546530724\n", - "Surface training t=12112, loss=0.032898133620619774\n", - "Surface training t=12113, loss=0.040839679539203644\n", - "Surface training t=12114, loss=0.03845112584531307\n", - "Surface training t=12115, loss=0.041061434894800186\n", - "Surface training t=12116, loss=0.04091397114098072\n", - "Surface training t=12117, loss=0.053684353828430176\n", - "Surface training t=12118, loss=0.048611389473080635\n", - "Surface training t=12119, loss=0.04987514764070511\n", - "Surface training t=12120, loss=0.04526483453810215\n", - "Surface training t=12121, loss=0.04402361065149307\n", - "Surface training t=12122, loss=0.0339730279520154\n", - "Surface training t=12123, loss=0.02605330105870962\n", - "Surface training t=12124, loss=0.031103357672691345\n", - "Surface training t=12125, loss=0.03080659918487072\n", - "Surface training t=12126, loss=0.024456007406115532\n", - "Surface training t=12127, loss=0.024383360520005226\n", - "Surface training t=12128, loss=0.025348429568111897\n", - "Surface training t=12129, loss=0.024710568599402905\n", - "Surface training t=12130, loss=0.024061291478574276\n", - "Surface training t=12131, loss=0.02610040921717882\n", - "Surface training t=12132, loss=0.019958512857556343\n", - "Surface training t=12133, loss=0.022709772922098637\n", - "Surface training t=12134, loss=0.023640542291104794\n", - "Surface training t=12135, loss=0.02725403942167759\n", - "Surface training t=12136, loss=0.03382685873657465\n", - "Surface training t=12137, loss=0.03909685276448727\n", - "Surface training t=12138, loss=0.030661435797810555\n", - "Surface training t=12139, loss=0.027002183720469475\n", - "Surface training t=12140, loss=0.03322327136993408\n", - "Surface training t=12141, loss=0.035092856734991074\n", - "Surface training t=12142, loss=0.029008034616708755\n", - "Surface training t=12143, loss=0.03499991074204445\n", - "Surface training t=12144, loss=0.03383883275091648\n", - "Surface training t=12145, loss=0.050344113260507584\n", - "Surface training t=12146, loss=0.059904562309384346\n", - "Surface training t=12147, loss=0.047729870304465294\n", - "Surface training t=12148, loss=0.04066145420074463\n", - "Surface training t=12149, loss=0.04011754225939512\n", - "Surface training t=12150, loss=0.07016756013035774\n", - "Surface training t=12151, loss=0.04739968851208687\n", - "Surface training t=12152, loss=0.05185139365494251\n", - "Surface training t=12153, loss=0.05929350294172764\n", - "Surface training t=12154, loss=0.07301557436585426\n", - "Surface training t=12155, loss=0.05018150992691517\n", - "Surface training t=12156, loss=0.03761842008680105\n", - "Surface training t=12157, loss=0.03623384051024914\n", - "Surface training t=12158, loss=0.03365989029407501\n", - "Surface training t=12159, loss=0.04452211409807205\n", - "Surface training t=12160, loss=0.047641441226005554\n", - "Surface training t=12161, loss=0.041451595723629\n", - "Surface training t=12162, loss=0.04067052714526653\n", - "Surface training t=12163, loss=0.04590313881635666\n", - "Surface training t=12164, loss=0.029271936044096947\n", - "Surface training t=12165, loss=0.034286318346858025\n", - "Surface training t=12166, loss=0.033955106511712074\n", - "Surface training t=12167, loss=0.025723163969814777\n", - "Surface training t=12168, loss=0.0226756501942873\n", - "Surface training t=12169, loss=0.03193962946534157\n", - "Surface training t=12170, loss=0.033629368990659714\n", - "Surface training t=12171, loss=0.037915538996458054\n", - "Surface training t=12172, loss=0.03924525901675224\n", - "Surface training t=12173, loss=0.04264504462480545\n", - "Surface training t=12174, loss=0.039355693385005\n", - "Surface training t=12175, loss=0.04692487046122551\n", - "Surface training t=12176, loss=0.07367703691124916\n", - "Surface training t=12177, loss=0.04954355675727129\n", - "Surface training t=12178, loss=0.07912030257284641\n", - "Surface training t=12179, loss=0.07403819635510445\n", - "Surface training t=12180, loss=0.05315248668193817\n", - "Surface training t=12181, loss=0.05259174853563309\n", - "Surface training t=12182, loss=0.05815239995718002\n", - "Surface training t=12183, loss=0.043906548991799355\n", - "Surface training t=12184, loss=0.05222043301910162\n", - "Surface training t=12185, loss=0.05395296402275562\n", - "Surface training t=12186, loss=0.10239441692829132\n", - "Surface training t=12187, loss=0.061328671872615814\n", - "Surface training t=12188, loss=0.08320915326476097\n", - "Surface training t=12189, loss=0.07017089799046516\n", - "Surface training t=12190, loss=0.05635911226272583\n", - "Surface training t=12191, loss=0.057990141212940216\n", - "Surface training t=12192, loss=0.06452091038227081\n", - "Surface training t=12193, loss=0.048783911392092705\n", - "Surface training t=12194, loss=0.05460579693317413\n", - "Surface training t=12195, loss=0.07345719262957573\n", - "Surface training t=12196, loss=0.04619571752846241\n", - "Surface training t=12197, loss=0.06568756327033043\n", - "Surface training t=12198, loss=0.044536199420690536\n", - "Surface training t=12199, loss=0.038174280896782875\n", - "Surface training t=12200, loss=0.05386429280042648\n", - "Surface training t=12201, loss=0.03330687899142504\n", - "Surface training t=12202, loss=0.040168240666389465\n", - "Surface training t=12203, loss=0.03232218511402607\n", - "Surface training t=12204, loss=0.034456390887498856\n", - "Surface training t=12205, loss=0.034596432000398636\n", - "Surface training t=12206, loss=0.032150465063750744\n", - "Surface training t=12207, loss=0.02809292171150446\n", - "Surface training t=12208, loss=0.021999786607921124\n", - "Surface training t=12209, loss=0.02089724550023675\n", - "Surface training t=12210, loss=0.017279747873544693\n", - "Surface training t=12211, loss=0.019905495457351208\n", - "Surface training t=12212, loss=0.02249376755207777\n", - "Surface training t=12213, loss=0.029154776595532894\n", - "Surface training t=12214, loss=0.028560331091284752\n", - "Surface training t=12215, loss=0.030690803192555904\n", - "Surface training t=12216, loss=0.03330918774008751\n", - "Surface training t=12217, loss=0.04048966243863106\n", - "Surface training t=12218, loss=0.03280930407345295\n", - "Surface training t=12219, loss=0.03056272119283676\n", - "Surface training t=12220, loss=0.03706668969243765\n", - "Surface training t=12221, loss=0.02541422378271818\n", - "Surface training t=12222, loss=0.027516783215105534\n", - "Surface training t=12223, loss=0.02342742495238781\n", - "Surface training t=12224, loss=0.03442735597491264\n", - "Surface training t=12225, loss=0.0452938973903656\n", - "Surface training t=12226, loss=0.06295546889305115\n", - "Surface training t=12227, loss=0.04449807479977608\n", - "Surface training t=12228, loss=0.04747413098812103\n", - "Surface training t=12229, loss=0.07830575853586197\n", - "Surface training t=12230, loss=0.05528477858752012\n", - "Surface training t=12231, loss=0.060967886820435524\n", - "Surface training t=12232, loss=0.060821738094091415\n", - "Surface training t=12233, loss=0.04590829461812973\n", - "Surface training t=12234, loss=0.04086492769420147\n", - "Surface training t=12235, loss=0.04070317838340998\n", - "Surface training t=12236, loss=0.04327899031341076\n", - "Surface training t=12237, loss=0.03797830641269684\n", - "Surface training t=12238, loss=0.037310369312763214\n", - "Surface training t=12239, loss=0.0259110564365983\n", - "Surface training t=12240, loss=0.03298737853765488\n", - "Surface training t=12241, loss=0.03833097033202648\n", - "Surface training t=12242, loss=0.03670342545956373\n", - "Surface training t=12243, loss=0.036798639222979546\n", - "Surface training t=12244, loss=0.04000961408019066\n", - "Surface training t=12245, loss=0.04224710911512375\n", - "Surface training t=12246, loss=0.045384978875517845\n", - "Surface training t=12247, loss=0.04947434179484844\n", - "Surface training t=12248, loss=0.034140465781092644\n", - "Surface training t=12249, loss=0.04298540577292442\n", - "Surface training t=12250, loss=0.0441450159996748\n", - "Surface training t=12251, loss=0.04507638141512871\n", - "Surface training t=12252, loss=0.04726887866854668\n", - "Surface training t=12253, loss=0.04904091916978359\n", - "Surface training t=12254, loss=0.04761245474219322\n", - "Surface training t=12255, loss=0.0353304548189044\n", - "Surface training t=12256, loss=0.031332097947597504\n", - "Surface training t=12257, loss=0.030754108913242817\n", - "Surface training t=12258, loss=0.029574728570878506\n", - "Surface training t=12259, loss=0.03295351378619671\n", - "Surface training t=12260, loss=0.027574210427701473\n", - "Surface training t=12261, loss=0.02995105180889368\n", - "Surface training t=12262, loss=0.035181015729904175\n", - "Surface training t=12263, loss=0.0395625326782465\n", - "Surface training t=12264, loss=0.03515406604856253\n", - "Surface training t=12265, loss=0.03214822802692652\n", - "Surface training t=12266, loss=0.03897990845143795\n", - "Surface training t=12267, loss=0.060614803805947304\n", - "Surface training t=12268, loss=0.03279111161828041\n", - "Surface training t=12269, loss=0.032153707928955555\n", - "Surface training t=12270, loss=0.031688581220805645\n", - "Surface training t=12271, loss=0.03491240553557873\n", - "Surface training t=12272, loss=0.03325882367789745\n", - "Surface training t=12273, loss=0.04662672337144613\n", - "Surface training t=12274, loss=0.045000696554780006\n", - "Surface training t=12275, loss=0.04660049080848694\n", - "Surface training t=12276, loss=0.03375657834112644\n", - "Surface training t=12277, loss=0.03249399643391371\n", - "Surface training t=12278, loss=0.025863327085971832\n", - "Surface training t=12279, loss=0.025530322454869747\n", - "Surface training t=12280, loss=0.02701669931411743\n", - "Surface training t=12281, loss=0.0396725507453084\n", - "Surface training t=12282, loss=0.039444515481591225\n", - "Surface training t=12283, loss=0.0447534266859293\n", - "Surface training t=12284, loss=0.06707309186458588\n", - "Surface training t=12285, loss=0.05481950379908085\n", - "Surface training t=12286, loss=0.05433288961648941\n", - "Surface training t=12287, loss=0.061667799949645996\n", - "Surface training t=12288, loss=0.06331018172204494\n", - "Surface training t=12289, loss=0.047184910625219345\n", - "Surface training t=12290, loss=0.04370244778692722\n", - "Surface training t=12291, loss=0.047854579985141754\n", - "Surface training t=12292, loss=0.033164625987410545\n", - "Surface training t=12293, loss=0.04592656344175339\n", - "Surface training t=12294, loss=0.03629598766565323\n", - "Surface training t=12295, loss=0.0476359948515892\n", - "Surface training t=12296, loss=0.04543114826083183\n", - "Surface training t=12297, loss=0.03920242004096508\n", - "Surface training t=12298, loss=0.05390145629644394\n", - "Surface training t=12299, loss=0.04284699633717537\n", - "Surface training t=12300, loss=0.04292485862970352\n", - "Surface training t=12301, loss=0.033260936848819256\n", - "Surface training t=12302, loss=0.06619828194379807\n", - "Surface training t=12303, loss=0.03957704361528158\n", - "Surface training t=12304, loss=0.04773146100342274\n", - "Surface training t=12305, loss=0.034961861558258533\n", - "Surface training t=12306, loss=0.059373585507273674\n", - "Surface training t=12307, loss=0.04168887995183468\n", - "Surface training t=12308, loss=0.06191146932542324\n", - "Surface training t=12309, loss=0.03222079947590828\n", - "Surface training t=12310, loss=0.057285599410533905\n", - "Surface training t=12311, loss=0.03623204305768013\n", - "Surface training t=12312, loss=0.03542507067322731\n", - "Surface training t=12313, loss=0.04847865737974644\n", - "Surface training t=12314, loss=0.03427486214786768\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=12315, loss=0.0307550011202693\n", - "Surface training t=12316, loss=0.0297624496743083\n", - "Surface training t=12317, loss=0.02910312358289957\n", - "Surface training t=12318, loss=0.03625021409243345\n", - "Surface training t=12319, loss=0.028089764527976513\n", - "Surface training t=12320, loss=0.03050406277179718\n", - "Surface training t=12321, loss=0.04387149214744568\n", - "Surface training t=12322, loss=0.03292839974164963\n", - "Surface training t=12323, loss=0.04163946956396103\n", - "Surface training t=12324, loss=0.03223246615380049\n", - "Surface training t=12325, loss=0.029054217040538788\n", - "Surface training t=12326, loss=0.041308676823973656\n", - "Surface training t=12327, loss=0.02895650640130043\n", - "Surface training t=12328, loss=0.026150323450565338\n", - "Surface training t=12329, loss=0.032121023163199425\n", - "Surface training t=12330, loss=0.026767240837216377\n", - "Surface training t=12331, loss=0.03386533632874489\n", - "Surface training t=12332, loss=0.029870194382965565\n", - "Surface training t=12333, loss=0.03341282531619072\n", - "Surface training t=12334, loss=0.03170268144458532\n", - "Surface training t=12335, loss=0.030471906065940857\n", - "Surface training t=12336, loss=0.03328107111155987\n", - "Surface training t=12337, loss=0.024066049605607986\n", - "Surface training t=12338, loss=0.029861589893698692\n", - "Surface training t=12339, loss=0.037479473277926445\n", - "Surface training t=12340, loss=0.03648386709392071\n", - "Surface training t=12341, loss=0.041501984000205994\n", - "Surface training t=12342, loss=0.038972143083810806\n", - "Surface training t=12343, loss=0.04902007803320885\n", - "Surface training t=12344, loss=0.036414528265595436\n", - "Surface training t=12345, loss=0.02573335636407137\n", - "Surface training t=12346, loss=0.03449399396777153\n", - "Surface training t=12347, loss=0.03783908113837242\n", - "Surface training t=12348, loss=0.030347472056746483\n", - "Surface training t=12349, loss=0.04165704548358917\n", - "Surface training t=12350, loss=0.030283947475254536\n", - "Surface training t=12351, loss=0.028151477687060833\n", - "Surface training t=12352, loss=0.03033613134175539\n", - "Surface training t=12353, loss=0.025388221256434917\n", - "Surface training t=12354, loss=0.021570741198956966\n", - "Surface training t=12355, loss=0.025420840829610825\n", - "Surface training t=12356, loss=0.027328559197485447\n", - "Surface training t=12357, loss=0.029171286150813103\n", - "Surface training t=12358, loss=0.026507234200835228\n", - "Surface training t=12359, loss=0.02876550890505314\n", - "Surface training t=12360, loss=0.02527738455682993\n", - "Surface training t=12361, loss=0.054150836542248726\n", - "Surface training t=12362, loss=0.05209682136774063\n", - "Surface training t=12363, loss=0.04664291813969612\n", - "Surface training t=12364, loss=0.060673924162983894\n", - "Surface training t=12365, loss=0.0508884321898222\n", - "Surface training t=12366, loss=0.042436081916093826\n", - "Surface training t=12367, loss=0.03594537451863289\n", - "Surface training t=12368, loss=0.039263274520635605\n", - "Surface training t=12369, loss=0.0389140285551548\n", - "Surface training t=12370, loss=0.0338668180629611\n", - "Surface training t=12371, loss=0.03609903156757355\n", - "Surface training t=12372, loss=0.039681414142251015\n", - "Surface training t=12373, loss=0.03984042815864086\n", - "Surface training t=12374, loss=0.03488069958984852\n", - "Surface training t=12375, loss=0.03721787501126528\n", - "Surface training t=12376, loss=0.049157969653606415\n", - "Surface training t=12377, loss=0.06566046550869942\n", - "Surface training t=12378, loss=0.047817543148994446\n", - "Surface training t=12379, loss=0.03259846195578575\n", - "Surface training t=12380, loss=0.03434528410434723\n", - "Surface training t=12381, loss=0.058143822476267815\n", - "Surface training t=12382, loss=0.041471198201179504\n", - "Surface training t=12383, loss=0.03324577305465937\n", - "Surface training t=12384, loss=0.03375734016299248\n", - "Surface training t=12385, loss=0.02942838706076145\n", - "Surface training t=12386, loss=0.04114348068833351\n", - "Surface training t=12387, loss=0.04669257625937462\n", - "Surface training t=12388, loss=0.043368831276893616\n", - "Surface training t=12389, loss=0.04010738618671894\n", - "Surface training t=12390, loss=0.03111635148525238\n", - "Surface training t=12391, loss=0.042583195492625237\n", - "Surface training t=12392, loss=0.04220614396035671\n", - "Surface training t=12393, loss=0.0483523104339838\n", - "Surface training t=12394, loss=0.04000884108245373\n", - "Surface training t=12395, loss=0.037326108664274216\n", - "Surface training t=12396, loss=0.04083302430808544\n", - "Surface training t=12397, loss=0.035160805098712444\n", - "Surface training t=12398, loss=0.044528404250741005\n", - "Surface training t=12399, loss=0.05363406799733639\n", - "Surface training t=12400, loss=0.052103957161307335\n", - "Surface training t=12401, loss=0.03989861160516739\n", - "Surface training t=12402, loss=0.05139268934726715\n", - "Surface training t=12403, loss=0.038501420989632607\n", - "Surface training t=12404, loss=0.036485252901911736\n", - "Surface training t=12405, loss=0.046270204707980156\n", - "Surface training t=12406, loss=0.04334612004458904\n", - "Surface training t=12407, loss=0.038897816091775894\n", - "Surface training t=12408, loss=0.0339969415217638\n", - "Surface training t=12409, loss=0.04783933237195015\n", - "Surface training t=12410, loss=0.042502932250499725\n", - "Surface training t=12411, loss=0.049552736803889275\n", - "Surface training t=12412, loss=0.03815857321023941\n", - "Surface training t=12413, loss=0.022598404437303543\n", - "Surface training t=12414, loss=0.035013667307794094\n", - "Surface training t=12415, loss=0.0321880467236042\n", - "Surface training t=12416, loss=0.021890236996114254\n", - "Surface training t=12417, loss=0.025628372095525265\n", - "Surface training t=12418, loss=0.025558643974363804\n", - "Surface training t=12419, loss=0.022304379381239414\n", - "Surface training t=12420, loss=0.02769464999437332\n", - "Surface training t=12421, loss=0.03487162105739117\n", - "Surface training t=12422, loss=0.04258210398256779\n", - "Surface training t=12423, loss=0.04410388134419918\n", - "Surface training t=12424, loss=0.04106734599918127\n", - "Surface training t=12425, loss=0.037924304604530334\n", - "Surface training t=12426, loss=0.031661126762628555\n", - "Surface training t=12427, loss=0.0269319424405694\n", - "Surface training t=12428, loss=0.0252620130777359\n", - "Surface training t=12429, loss=0.027663614600896835\n", - "Surface training t=12430, loss=0.03306430298835039\n", - "Surface training t=12431, loss=0.038645800203084946\n", - "Surface training t=12432, loss=0.040374502539634705\n", - "Surface training t=12433, loss=0.03784971125423908\n", - "Surface training t=12434, loss=0.03613691218197346\n", - "Surface training t=12435, loss=0.04575464501976967\n", - "Surface training t=12436, loss=0.031555941328406334\n", - "Surface training t=12437, loss=0.0294362623244524\n", - "Surface training t=12438, loss=0.04308623820543289\n", - "Surface training t=12439, loss=0.03514753095805645\n", - "Surface training t=12440, loss=0.02634300384670496\n", - "Surface training t=12441, loss=0.03594284504652023\n", - "Surface training t=12442, loss=0.02193571161478758\n", - "Surface training t=12443, loss=0.022031779401004314\n", - "Surface training t=12444, loss=0.028183589689433575\n", - "Surface training t=12445, loss=0.024068632163107395\n", - "Surface training t=12446, loss=0.02180934976786375\n", - "Surface training t=12447, loss=0.023470106534659863\n", - "Surface training t=12448, loss=0.018896136432886124\n", - "Surface training t=12449, loss=0.02519854437559843\n", - "Surface training t=12450, loss=0.030125231482088566\n", - "Surface training t=12451, loss=0.0200284281745553\n", - "Surface training t=12452, loss=0.020025339908897877\n", - "Surface training t=12453, loss=0.023052812553942204\n", - "Surface training t=12454, loss=0.02437381912022829\n", - "Surface training t=12455, loss=0.027679042890667915\n", - "Surface training t=12456, loss=0.04177156277000904\n", - "Surface training t=12457, loss=0.032172445207834244\n", - "Surface training t=12458, loss=0.029403138905763626\n", - "Surface training t=12459, loss=0.03759072162210941\n", - "Surface training t=12460, loss=0.025828486308455467\n", - "Surface training t=12461, loss=0.02563746925443411\n", - "Surface training t=12462, loss=0.02583381999284029\n", - "Surface training t=12463, loss=0.016022298019379377\n", - "Surface training t=12464, loss=0.025163503363728523\n", - "Surface training t=12465, loss=0.02889763657003641\n", - "Surface training t=12466, loss=0.02952899131923914\n", - "Surface training t=12467, loss=0.03673544153571129\n", - "Surface training t=12468, loss=0.027346517890691757\n", - "Surface training t=12469, loss=0.028958319686353207\n", - "Surface training t=12470, loss=0.02937144972383976\n", - "Surface training t=12471, loss=0.0346825011074543\n", - "Surface training t=12472, loss=0.03155255317687988\n", - "Surface training t=12473, loss=0.0433941762894392\n", - "Surface training t=12474, loss=0.06435202993452549\n", - "Surface training t=12475, loss=0.047598617151379585\n", - "Surface training t=12476, loss=0.04141384735703468\n", - "Surface training t=12477, loss=0.037118266336619854\n", - "Surface training t=12478, loss=0.036446463316679\n", - "Surface training t=12479, loss=0.027417007833719254\n", - "Surface training t=12480, loss=0.022952372208237648\n", - "Surface training t=12481, loss=0.02932611759752035\n", - "Surface training t=12482, loss=0.027636060491204262\n", - "Surface training t=12483, loss=0.024426245130598545\n", - "Surface training t=12484, loss=0.02286856807768345\n", - "Surface training t=12485, loss=0.03129067551344633\n", - "Surface training t=12486, loss=0.03055106569081545\n", - "Surface training t=12487, loss=0.034928640350699425\n", - "Surface training t=12488, loss=0.03477586526423693\n", - "Surface training t=12489, loss=0.02739572525024414\n", - "Surface training t=12490, loss=0.031231679022312164\n", - "Surface training t=12491, loss=0.030121146701276302\n", - "Surface training t=12492, loss=0.031001383438706398\n", - "Surface training t=12493, loss=0.02997505571693182\n", - "Surface training t=12494, loss=0.02236081939190626\n", - "Surface training t=12495, loss=0.02397724613547325\n", - "Surface training t=12496, loss=0.02969940844923258\n", - "Surface training t=12497, loss=0.027963164262473583\n", - "Surface training t=12498, loss=0.028571483679115772\n", - "Surface training t=12499, loss=0.048927852883934975\n", - "Surface training t=12500, loss=0.06664520688354969\n", - "Surface training t=12501, loss=0.035515252500772476\n", - "Surface training t=12502, loss=0.045330485329031944\n", - "Surface training t=12503, loss=0.04083961062133312\n", - "Surface training t=12504, loss=0.026494687423110008\n", - "Surface training t=12505, loss=0.026822548359632492\n", - "Surface training t=12506, loss=0.021882263012230396\n", - "Surface training t=12507, loss=0.024263864383101463\n", - "Surface training t=12508, loss=0.05112306773662567\n", - "Surface training t=12509, loss=0.039732856675982475\n", - "Surface training t=12510, loss=0.04112856090068817\n", - "Surface training t=12511, loss=0.03204690292477608\n", - "Surface training t=12512, loss=0.034335522912442684\n", - "Surface training t=12513, loss=0.03790389187633991\n", - "Surface training t=12514, loss=0.029478294774889946\n", - "Surface training t=12515, loss=0.02931332029402256\n", - "Surface training t=12516, loss=0.03048960492014885\n", - "Surface training t=12517, loss=0.04300360754132271\n", - "Surface training t=12518, loss=0.03975447826087475\n", - "Surface training t=12519, loss=0.04911000654101372\n", - "Surface training t=12520, loss=0.039726609364151955\n", - "Surface training t=12521, loss=0.04066724330186844\n", - "Surface training t=12522, loss=0.0376146100461483\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=12523, loss=0.03374988678842783\n", - "Surface training t=12524, loss=0.040968263521790504\n", - "Surface training t=12525, loss=0.03870909661054611\n", - "Surface training t=12526, loss=0.0413967315107584\n", - "Surface training t=12527, loss=0.06196424178779125\n", - "Surface training t=12528, loss=0.04351839516311884\n", - "Surface training t=12529, loss=0.046451106667518616\n", - "Surface training t=12530, loss=0.037822870537638664\n", - "Surface training t=12531, loss=0.04206354729831219\n", - "Surface training t=12532, loss=0.036365268751978874\n", - "Surface training t=12533, loss=0.040509020909667015\n", - "Surface training t=12534, loss=0.042407382279634476\n", - "Surface training t=12535, loss=0.03541755676269531\n", - "Surface training t=12536, loss=0.03127353452146053\n", - "Surface training t=12537, loss=0.03553757630288601\n", - "Surface training t=12538, loss=0.03150209505110979\n", - "Surface training t=12539, loss=0.025195199064910412\n", - "Surface training t=12540, loss=0.029726143926382065\n", - "Surface training t=12541, loss=0.03773801028728485\n", - "Surface training t=12542, loss=0.033774154260754585\n", - "Surface training t=12543, loss=0.0371088944375515\n", - "Surface training t=12544, loss=0.0290329959243536\n", - "Surface training t=12545, loss=0.03410298749804497\n", - "Surface training t=12546, loss=0.03134855069220066\n", - "Surface training t=12547, loss=0.031023260205984116\n", - "Surface training t=12548, loss=0.03658402897417545\n", - "Surface training t=12549, loss=0.03592725656926632\n", - "Surface training t=12550, loss=0.02953312173485756\n", - "Surface training t=12551, loss=0.022582314908504486\n", - "Surface training t=12552, loss=0.022588558495044708\n", - "Surface training t=12553, loss=0.024413978680968285\n", - "Surface training t=12554, loss=0.02026695292443037\n", - "Surface training t=12555, loss=0.028112187050282955\n", - "Surface training t=12556, loss=0.029882878065109253\n", - "Surface training t=12557, loss=0.03112637158483267\n", - "Surface training t=12558, loss=0.03616599831730127\n", - "Surface training t=12559, loss=0.03361402824521065\n", - "Surface training t=12560, loss=0.0323417317122221\n", - "Surface training t=12561, loss=0.023634073790162802\n", - "Surface training t=12562, loss=0.02438289113342762\n", - "Surface training t=12563, loss=0.030083397403359413\n", - "Surface training t=12564, loss=0.022414586506783962\n", - "Surface training t=12565, loss=0.02444557100534439\n", - "Surface training t=12566, loss=0.03381575830280781\n", - "Surface training t=12567, loss=0.04576731659471989\n", - "Surface training t=12568, loss=0.055227797478437424\n", - "Surface training t=12569, loss=0.04534855950623751\n", - "Surface training t=12570, loss=0.03639372996985912\n", - "Surface training t=12571, loss=0.03859425708651543\n", - "Surface training t=12572, loss=0.03654628060758114\n", - "Surface training t=12573, loss=0.044207748025655746\n", - "Surface training t=12574, loss=0.0512180645018816\n", - "Surface training t=12575, loss=0.04513257555663586\n", - "Surface training t=12576, loss=0.03598496224731207\n", - "Surface training t=12577, loss=0.04245874471962452\n", - "Surface training t=12578, loss=0.042511165142059326\n", - "Surface training t=12579, loss=0.03865979425609112\n", - "Surface training t=12580, loss=0.02969285286962986\n", - "Surface training t=12581, loss=0.028835758566856384\n", - "Surface training t=12582, loss=0.04352663829922676\n", - "Surface training t=12583, loss=0.05364221706986427\n", - "Surface training t=12584, loss=0.04210119787603617\n", - "Surface training t=12585, loss=0.051009807735681534\n", - "Surface training t=12586, loss=0.03223956469446421\n", - "Surface training t=12587, loss=0.03588624205440283\n", - "Surface training t=12588, loss=0.029344000853598118\n", - "Surface training t=12589, loss=0.02428731881082058\n", - "Surface training t=12590, loss=0.022011288441717625\n", - "Surface training t=12591, loss=0.024353510700166225\n", - "Surface training t=12592, loss=0.02213140483945608\n", - "Surface training t=12593, loss=0.022972709499299526\n", - "Surface training t=12594, loss=0.024489725939929485\n", - "Surface training t=12595, loss=0.029534795321524143\n", - "Surface training t=12596, loss=0.024364352226257324\n", - "Surface training t=12597, loss=0.03166716545820236\n", - "Surface training t=12598, loss=0.030162072740495205\n", - "Surface training t=12599, loss=0.05450162850320339\n", - "Surface training t=12600, loss=0.04791868291795254\n", - "Surface training t=12601, loss=0.04394618235528469\n", - "Surface training t=12602, loss=0.0390064837411046\n", - "Surface training t=12603, loss=0.029503120109438896\n", - "Surface training t=12604, loss=0.026524067390710115\n", - "Surface training t=12605, loss=0.04482063464820385\n", - "Surface training t=12606, loss=0.05348505266010761\n", - "Surface training t=12607, loss=0.04643702507019043\n", - "Surface training t=12608, loss=0.04149938654154539\n", - "Surface training t=12609, loss=0.03652122151106596\n", - "Surface training t=12610, loss=0.03704649955034256\n", - "Surface training t=12611, loss=0.036000609397888184\n", - "Surface training t=12612, loss=0.03177305404096842\n", - "Surface training t=12613, loss=0.033635541796684265\n", - "Surface training t=12614, loss=0.030326695181429386\n", - "Surface training t=12615, loss=0.03214239701628685\n", - "Surface training t=12616, loss=0.030502045527100563\n", - "Surface training t=12617, loss=0.029486642219126225\n", - "Surface training t=12618, loss=0.02584332786500454\n", - "Surface training t=12619, loss=0.024635495617985725\n", - "Surface training t=12620, loss=0.023668345995247364\n", - "Surface training t=12621, loss=0.024934873916208744\n", - "Surface training t=12622, loss=0.020412937738001347\n", - "Surface training t=12623, loss=0.027638712897896767\n", - "Surface training t=12624, loss=0.030845319852232933\n", - "Surface training t=12625, loss=0.025496291927993298\n", - "Surface training t=12626, loss=0.024041786789894104\n", - "Surface training t=12627, loss=0.02968570403754711\n", - "Surface training t=12628, loss=0.02412980981171131\n", - "Surface training t=12629, loss=0.028071893379092216\n", - "Surface training t=12630, loss=0.0212761415168643\n", - "Surface training t=12631, loss=0.026440116576850414\n", - "Surface training t=12632, loss=0.02343176305294037\n", - "Surface training t=12633, loss=0.028699757531285286\n", - "Surface training t=12634, loss=0.021444959565997124\n", - "Surface training t=12635, loss=0.030039538629353046\n", - "Surface training t=12636, loss=0.02292733173817396\n", - "Surface training t=12637, loss=0.040384840220212936\n", - "Surface training t=12638, loss=0.027198879048228264\n", - "Surface training t=12639, loss=0.02319871261715889\n", - "Surface training t=12640, loss=0.035411857068538666\n", - "Surface training t=12641, loss=0.03351289965212345\n", - "Surface training t=12642, loss=0.030780330300331116\n", - "Surface training t=12643, loss=0.03684269078075886\n", - "Surface training t=12644, loss=0.025805816985666752\n", - "Surface training t=12645, loss=0.0346350884065032\n", - "Surface training t=12646, loss=0.03551922086626291\n", - "Surface training t=12647, loss=0.037260448560118675\n", - "Surface training t=12648, loss=0.03436823934316635\n", - "Surface training t=12649, loss=0.036384688690304756\n", - "Surface training t=12650, loss=0.029358016327023506\n", - "Surface training t=12651, loss=0.022075654938817024\n", - "Surface training t=12652, loss=0.030153940431773663\n", - "Surface training t=12653, loss=0.030443718656897545\n", - "Surface training t=12654, loss=0.02808194886893034\n", - "Surface training t=12655, loss=0.026378728449344635\n", - "Surface training t=12656, loss=0.024979290552437305\n", - "Surface training t=12657, loss=0.03364719916135073\n", - "Surface training t=12658, loss=0.030383278615772724\n", - "Surface training t=12659, loss=0.028650233522057533\n", - "Surface training t=12660, loss=0.02472253330051899\n", - "Surface training t=12661, loss=0.02971371728926897\n", - "Surface training t=12662, loss=0.03053274191915989\n", - "Surface training t=12663, loss=0.03170124441385269\n", - "Surface training t=12664, loss=0.03133795037865639\n", - "Surface training t=12665, loss=0.028705520555377007\n", - "Surface training t=12666, loss=0.02534610964357853\n", - "Surface training t=12667, loss=0.022823640145361423\n", - "Surface training t=12668, loss=0.0373164638876915\n", - "Surface training t=12669, loss=0.02596130035817623\n", - "Surface training t=12670, loss=0.0287037156522274\n", - "Surface training t=12671, loss=0.03869685344398022\n", - "Surface training t=12672, loss=0.07193487510085106\n", - "Surface training t=12673, loss=0.04328374192118645\n", - "Surface training t=12674, loss=0.048019527457654476\n", - "Surface training t=12675, loss=0.03122294880449772\n", - "Surface training t=12676, loss=0.030860701575875282\n", - "Surface training t=12677, loss=0.033256168477237225\n", - "Surface training t=12678, loss=0.04481305368244648\n", - "Surface training t=12679, loss=0.06484838016331196\n", - "Surface training t=12680, loss=0.043581776320934296\n", - "Surface training t=12681, loss=0.04646441340446472\n", - "Surface training t=12682, loss=0.04971126466989517\n", - "Surface training t=12683, loss=0.05646168440580368\n", - "Surface training t=12684, loss=0.042463308200240135\n", - "Surface training t=12685, loss=0.06406467594206333\n", - "Surface training t=12686, loss=0.048808543011546135\n", - "Surface training t=12687, loss=0.04218308813869953\n", - "Surface training t=12688, loss=0.04014523420482874\n", - "Surface training t=12689, loss=0.04217124730348587\n", - "Surface training t=12690, loss=0.04178512655198574\n", - "Surface training t=12691, loss=0.04702062904834747\n", - "Surface training t=12692, loss=0.03467560838907957\n", - "Surface training t=12693, loss=0.048152774572372437\n", - "Surface training t=12694, loss=0.0420637559145689\n", - "Surface training t=12695, loss=0.027353437151759863\n", - "Surface training t=12696, loss=0.03396095894277096\n", - "Surface training t=12697, loss=0.026145605370402336\n", - "Surface training t=12698, loss=0.035054128617048264\n", - "Surface training t=12699, loss=0.03624633885920048\n", - "Surface training t=12700, loss=0.043553391471505165\n", - "Surface training t=12701, loss=0.03804426081478596\n", - "Surface training t=12702, loss=0.0416065463796258\n", - "Surface training t=12703, loss=0.0417763190343976\n", - "Surface training t=12704, loss=0.04366789758205414\n", - "Surface training t=12705, loss=0.046121543273329735\n", - "Surface training t=12706, loss=0.05966959893703461\n", - "Surface training t=12707, loss=0.05321529507637024\n", - "Surface training t=12708, loss=0.0506173400208354\n", - "Surface training t=12709, loss=0.03577084559947252\n", - "Surface training t=12710, loss=0.05267391540110111\n", - "Surface training t=12711, loss=0.048758864402770996\n", - "Surface training t=12712, loss=0.035982681438326836\n", - "Surface training t=12713, loss=0.03918511979281902\n", - "Surface training t=12714, loss=0.033500898629426956\n", - "Surface training t=12715, loss=0.03454674407839775\n", - "Surface training t=12716, loss=0.046728529036045074\n", - "Surface training t=12717, loss=0.05030934512615204\n", - "Surface training t=12718, loss=0.03225363604724407\n", - "Surface training t=12719, loss=0.032221498899161816\n", - "Surface training t=12720, loss=0.032265499234199524\n", - "Surface training t=12721, loss=0.026868635788559914\n", - "Surface training t=12722, loss=0.03532964363694191\n", - "Surface training t=12723, loss=0.02956327050924301\n", - "Surface training t=12724, loss=0.025911983102560043\n", - "Surface training t=12725, loss=0.02425792720168829\n", - "Surface training t=12726, loss=0.023449840024113655\n", - "Surface training t=12727, loss=0.0235116146504879\n", - "Surface training t=12728, loss=0.029255371540784836\n", - "Surface training t=12729, loss=0.026481017470359802\n", - "Surface training t=12730, loss=0.0228462852537632\n", - "Surface training t=12731, loss=0.024614551104605198\n", - "Surface training t=12732, loss=0.026444964110851288\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=12733, loss=0.02895941585302353\n", - "Surface training t=12734, loss=0.02385057881474495\n", - "Surface training t=12735, loss=0.029258674941956997\n", - "Surface training t=12736, loss=0.02244978304952383\n", - "Surface training t=12737, loss=0.026740011759102345\n", - "Surface training t=12738, loss=0.0251816650852561\n", - "Surface training t=12739, loss=0.025284024886786938\n", - "Surface training t=12740, loss=0.024090062826871872\n", - "Surface training t=12741, loss=0.02036570105701685\n", - "Surface training t=12742, loss=0.0239832466468215\n", - "Surface training t=12743, loss=0.023039570078253746\n", - "Surface training t=12744, loss=0.025804128497838974\n", - "Surface training t=12745, loss=0.02196631021797657\n", - "Surface training t=12746, loss=0.022302857600152493\n", - "Surface training t=12747, loss=0.021173407323658466\n", - "Surface training t=12748, loss=0.02419914212077856\n", - "Surface training t=12749, loss=0.02324540074914694\n", - "Surface training t=12750, loss=0.043722955510020256\n", - "Surface training t=12751, loss=0.038001591339707375\n", - "Surface training t=12752, loss=0.04955192096531391\n", - "Surface training t=12753, loss=0.045260834507644176\n", - "Surface training t=12754, loss=0.05701776407659054\n", - "Surface training t=12755, loss=0.06563825905323029\n", - "Surface training t=12756, loss=0.0640685111284256\n", - "Surface training t=12757, loss=0.052767686545848846\n", - "Surface training t=12758, loss=0.03145773150026798\n", - "Surface training t=12759, loss=0.027406752109527588\n", - "Surface training t=12760, loss=0.030252818018198013\n", - "Surface training t=12761, loss=0.03586040996015072\n", - "Surface training t=12762, loss=0.022184470668435097\n", - "Surface training t=12763, loss=0.026029841974377632\n", - "Surface training t=12764, loss=0.034932542592287064\n", - "Surface training t=12765, loss=0.039803074672818184\n", - "Surface training t=12766, loss=0.04633809067308903\n", - "Surface training t=12767, loss=0.03066475223749876\n", - "Surface training t=12768, loss=0.03098794724792242\n", - "Surface training t=12769, loss=0.03156651183962822\n", - "Surface training t=12770, loss=0.04313039034605026\n", - "Surface training t=12771, loss=0.036038193851709366\n", - "Surface training t=12772, loss=0.03756721317768097\n", - "Surface training t=12773, loss=0.036184435710310936\n", - "Surface training t=12774, loss=0.031406993977725506\n", - "Surface training t=12775, loss=0.03152712155133486\n", - "Surface training t=12776, loss=0.025786619633436203\n", - "Surface training t=12777, loss=0.029468336142599583\n", - "Surface training t=12778, loss=0.028349418193101883\n", - "Surface training t=12779, loss=0.024465045891702175\n", - "Surface training t=12780, loss=0.026689736172556877\n", - "Surface training t=12781, loss=0.03355008736252785\n", - "Surface training t=12782, loss=0.028100259602069855\n", - "Surface training t=12783, loss=0.025250894017517567\n", - "Surface training t=12784, loss=0.025342145934700966\n", - "Surface training t=12785, loss=0.029932547360658646\n", - "Surface training t=12786, loss=0.03493888862431049\n", - "Surface training t=12787, loss=0.04535766877233982\n", - "Surface training t=12788, loss=0.04755914956331253\n", - "Surface training t=12789, loss=0.037951333448290825\n", - "Surface training t=12790, loss=0.04934683069586754\n", - "Surface training t=12791, loss=0.03311514854431152\n", - "Surface training t=12792, loss=0.04357445426285267\n", - "Surface training t=12793, loss=0.03545102849602699\n", - "Surface training t=12794, loss=0.03315863665193319\n", - "Surface training t=12795, loss=0.03758750110864639\n", - "Surface training t=12796, loss=0.03806482162326574\n", - "Surface training t=12797, loss=0.036660898476839066\n", - "Surface training t=12798, loss=0.04889974370598793\n", - "Surface training t=12799, loss=0.04527716897428036\n", - "Surface training t=12800, loss=0.04645830579102039\n", - "Surface training t=12801, loss=0.03846263699233532\n", - "Surface training t=12802, loss=0.042175681330263615\n", - "Surface training t=12803, loss=0.06064038537442684\n", - "Surface training t=12804, loss=0.03897765651345253\n", - "Surface training t=12805, loss=0.039825478568673134\n", - "Surface training t=12806, loss=0.041902185417711735\n", - "Surface training t=12807, loss=0.03079801145941019\n", - "Surface training t=12808, loss=0.028725759126245975\n", - "Surface training t=12809, loss=0.031378187239170074\n", - "Surface training t=12810, loss=0.045784955844283104\n", - "Surface training t=12811, loss=0.038096558302640915\n", - "Surface training t=12812, loss=0.03380271978676319\n", - "Surface training t=12813, loss=0.043328458443284035\n", - "Surface training t=12814, loss=0.0389320682734251\n", - "Surface training t=12815, loss=0.04728436656296253\n", - "Surface training t=12816, loss=0.04348711669445038\n", - "Surface training t=12817, loss=0.02586857322603464\n", - "Surface training t=12818, loss=0.03144106641411781\n", - "Surface training t=12819, loss=0.032272694632411\n", - "Surface training t=12820, loss=0.06215372122824192\n", - "Surface training t=12821, loss=0.04291365295648575\n", - "Surface training t=12822, loss=0.04627910256385803\n", - "Surface training t=12823, loss=0.06605391576886177\n", - "Surface training t=12824, loss=0.07152386382222176\n", - "Surface training t=12825, loss=0.05674307234585285\n", - "Surface training t=12826, loss=0.06740626692771912\n", - "Surface training t=12827, loss=0.08011832647025585\n", - "Surface training t=12828, loss=0.05487864464521408\n", - "Surface training t=12829, loss=0.06464558839797974\n", - "Surface training t=12830, loss=0.06022876501083374\n", - "Surface training t=12831, loss=0.04320141393691301\n", - "Surface training t=12832, loss=0.044495840556919575\n", - "Surface training t=12833, loss=0.053583456203341484\n", - "Surface training t=12834, loss=0.05005522631108761\n", - "Surface training t=12835, loss=0.03349096328020096\n", - "Surface training t=12836, loss=0.04462946951389313\n", - "Surface training t=12837, loss=0.04368213564157486\n", - "Surface training t=12838, loss=0.04281758517026901\n", - "Surface training t=12839, loss=0.07890687882900238\n", - "Surface training t=12840, loss=0.055708544328808784\n", - "Surface training t=12841, loss=0.06973679177463055\n", - "Surface training t=12842, loss=0.07285220921039581\n", - "Surface training t=12843, loss=0.05188530869781971\n", - "Surface training t=12844, loss=0.057526519522070885\n", - "Surface training t=12845, loss=0.06486906670033932\n", - "Surface training t=12846, loss=0.06019067019224167\n", - "Surface training t=12847, loss=0.05363023839890957\n", - "Surface training t=12848, loss=0.05816779285669327\n", - "Surface training t=12849, loss=0.04220285266637802\n", - "Surface training t=12850, loss=0.03870931826531887\n", - "Surface training t=12851, loss=0.034430958330631256\n", - "Surface training t=12852, loss=0.03155195154249668\n", - "Surface training t=12853, loss=0.03425139095634222\n", - "Surface training t=12854, loss=0.027262595482170582\n", - "Surface training t=12855, loss=0.0333690382540226\n", - "Surface training t=12856, loss=0.03341144509613514\n", - "Surface training t=12857, loss=0.029957816004753113\n", - "Surface training t=12858, loss=0.03650061972439289\n", - "Surface training t=12859, loss=0.02435737382620573\n", - "Surface training t=12860, loss=0.02811670582741499\n", - "Surface training t=12861, loss=0.031581914983689785\n", - "Surface training t=12862, loss=0.03216755390167236\n", - "Surface training t=12863, loss=0.05025964789092541\n", - "Surface training t=12864, loss=0.037523459643125534\n", - "Surface training t=12865, loss=0.04179211147129536\n", - "Surface training t=12866, loss=0.04158705100417137\n", - "Surface training t=12867, loss=0.03224436193704605\n", - "Surface training t=12868, loss=0.02852331381291151\n", - "Surface training t=12869, loss=0.02625813614577055\n", - "Surface training t=12870, loss=0.030834607779979706\n", - "Surface training t=12871, loss=0.02839143853634596\n", - "Surface training t=12872, loss=0.025040989741683006\n", - "Surface training t=12873, loss=0.02627640124410391\n", - "Surface training t=12874, loss=0.01948972325772047\n", - "Surface training t=12875, loss=0.03237503953278065\n", - "Surface training t=12876, loss=0.021721191704273224\n", - "Surface training t=12877, loss=0.02244043443351984\n", - "Surface training t=12878, loss=0.029711468145251274\n", - "Surface training t=12879, loss=0.026971684768795967\n", - "Surface training t=12880, loss=0.02742708846926689\n", - "Surface training t=12881, loss=0.02991531975567341\n", - "Surface training t=12882, loss=0.03562591038644314\n", - "Surface training t=12883, loss=0.027365872636437416\n", - "Surface training t=12884, loss=0.02712245937436819\n", - "Surface training t=12885, loss=0.02576166857033968\n", - "Surface training t=12886, loss=0.019509969279170036\n", - "Surface training t=12887, loss=0.02155737578868866\n", - "Surface training t=12888, loss=0.02151814615353942\n", - "Surface training t=12889, loss=0.025936413556337357\n", - "Surface training t=12890, loss=0.024388207122683525\n", - "Surface training t=12891, loss=0.030331443063914776\n", - "Surface training t=12892, loss=0.024926558136940002\n", - "Surface training t=12893, loss=0.023719491437077522\n", - "Surface training t=12894, loss=0.026840098202228546\n", - "Surface training t=12895, loss=0.03515952546149492\n", - "Surface training t=12896, loss=0.04060668684542179\n", - "Surface training t=12897, loss=0.03381726332008839\n", - "Surface training t=12898, loss=0.03129245433956385\n", - "Surface training t=12899, loss=0.031527645885944366\n", - "Surface training t=12900, loss=0.025844857096672058\n", - "Surface training t=12901, loss=0.03899970091879368\n", - "Surface training t=12902, loss=0.03362388722598553\n", - "Surface training t=12903, loss=0.03550342097878456\n", - "Surface training t=12904, loss=0.026300758123397827\n", - "Surface training t=12905, loss=0.03522667847573757\n", - "Surface training t=12906, loss=0.04116174019873142\n", - "Surface training t=12907, loss=0.03426314704120159\n", - "Surface training t=12908, loss=0.03823879361152649\n", - "Surface training t=12909, loss=0.026166772469878197\n", - "Surface training t=12910, loss=0.038934407755732536\n", - "Surface training t=12911, loss=0.03293926082551479\n", - "Surface training t=12912, loss=0.05271921865642071\n", - "Surface training t=12913, loss=0.03710128366947174\n", - "Surface training t=12914, loss=0.028379669412970543\n", - "Surface training t=12915, loss=0.029064747504889965\n", - "Surface training t=12916, loss=0.024029807187616825\n", - "Surface training t=12917, loss=0.020664514042437077\n", - "Surface training t=12918, loss=0.02711308654397726\n", - "Surface training t=12919, loss=0.022874537855386734\n", - "Surface training t=12920, loss=0.017523811198771\n", - "Surface training t=12921, loss=0.019993934780359268\n", - "Surface training t=12922, loss=0.0221833111718297\n", - "Surface training t=12923, loss=0.028528181836009026\n", - "Surface training t=12924, loss=0.02366340160369873\n", - "Surface training t=12925, loss=0.025996758602559566\n", - "Surface training t=12926, loss=0.041501207277178764\n", - "Surface training t=12927, loss=0.02642722614109516\n", - "Surface training t=12928, loss=0.032791998237371445\n", - "Surface training t=12929, loss=0.06796632334589958\n", - "Surface training t=12930, loss=0.049577346071600914\n", - "Surface training t=12931, loss=0.058866485953330994\n", - "Surface training t=12932, loss=0.062003109604120255\n", - "Surface training t=12933, loss=0.03969566151499748\n", - "Surface training t=12934, loss=0.040357427671551704\n", - "Surface training t=12935, loss=0.05319191887974739\n", - "Surface training t=12936, loss=0.04542095772922039\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=12937, loss=0.03759945183992386\n", - "Surface training t=12938, loss=0.03120519034564495\n", - "Surface training t=12939, loss=0.0325512383133173\n", - "Surface training t=12940, loss=0.04713527485728264\n", - "Surface training t=12941, loss=0.026954458095133305\n", - "Surface training t=12942, loss=0.026915020309388638\n", - "Surface training t=12943, loss=0.02496248483657837\n", - "Surface training t=12944, loss=0.026695091277360916\n", - "Surface training t=12945, loss=0.029625224880874157\n", - "Surface training t=12946, loss=0.03039347007870674\n", - "Surface training t=12947, loss=0.03372214548289776\n", - "Surface training t=12948, loss=0.03768894448876381\n", - "Surface training t=12949, loss=0.0320248631760478\n", - "Surface training t=12950, loss=0.035303590819239616\n", - "Surface training t=12951, loss=0.04353743977844715\n", - "Surface training t=12952, loss=0.028704657219350338\n", - "Surface training t=12953, loss=0.03125103656202555\n", - "Surface training t=12954, loss=0.030002658255398273\n", - "Surface training t=12955, loss=0.031636917032301426\n", - "Surface training t=12956, loss=0.025851035490632057\n", - "Surface training t=12957, loss=0.03102942928671837\n", - "Surface training t=12958, loss=0.03582260571420193\n", - "Surface training t=12959, loss=0.026846607215702534\n", - "Surface training t=12960, loss=0.030001377686858177\n", - "Surface training t=12961, loss=0.0365713257342577\n", - "Surface training t=12962, loss=0.03837752714753151\n", - "Surface training t=12963, loss=0.026802093721926212\n", - "Surface training t=12964, loss=0.03370943106710911\n", - "Surface training t=12965, loss=0.023912283591926098\n", - "Surface training t=12966, loss=0.023684502579271793\n", - "Surface training t=12967, loss=0.03319756966084242\n", - "Surface training t=12968, loss=0.037629712373018265\n", - "Surface training t=12969, loss=0.05401916056871414\n", - "Surface training t=12970, loss=0.038350651040673256\n", - "Surface training t=12971, loss=0.04193607717752457\n", - "Surface training t=12972, loss=0.03553636372089386\n", - "Surface training t=12973, loss=0.037417213432490826\n", - "Surface training t=12974, loss=0.04227953590452671\n", - "Surface training t=12975, loss=0.035678278654813766\n", - "Surface training t=12976, loss=0.03556466102600098\n", - "Surface training t=12977, loss=0.029706822708249092\n", - "Surface training t=12978, loss=0.037251269444823265\n", - "Surface training t=12979, loss=0.03227265924215317\n", - "Surface training t=12980, loss=0.023925927467644215\n", - "Surface training t=12981, loss=0.03932215832173824\n", - "Surface training t=12982, loss=0.035780603997409344\n", - "Surface training t=12983, loss=0.040049636736512184\n", - "Surface training t=12984, loss=0.028577404096722603\n", - "Surface training t=12985, loss=0.01969436276704073\n", - "Surface training t=12986, loss=0.027553413063287735\n", - "Surface training t=12987, loss=0.028576195240020752\n", - "Surface training t=12988, loss=0.0423391442745924\n", - "Surface training t=12989, loss=0.026653125882148743\n", - "Surface training t=12990, loss=0.027229255996644497\n", - "Surface training t=12991, loss=0.026928297244012356\n", - "Surface training t=12992, loss=0.0178820937871933\n", - "Surface training t=12993, loss=0.028527667745947838\n", - "Surface training t=12994, loss=0.02550415974110365\n", - "Surface training t=12995, loss=0.02979962434619665\n", - "Surface training t=12996, loss=0.027226404286921024\n", - "Surface training t=12997, loss=0.02890910767018795\n", - "Surface training t=12998, loss=0.0217955419793725\n", - "Surface training t=12999, loss=0.04646047204732895\n", - "Surface training t=13000, loss=0.06247512996196747\n", - "Surface training t=13001, loss=0.040555449202656746\n", - "Surface training t=13002, loss=0.03587806411087513\n", - "Surface training t=13003, loss=0.03875535726547241\n", - "Surface training t=13004, loss=0.036702755838632584\n", - "Surface training t=13005, loss=0.03367178700864315\n", - "Surface training t=13006, loss=0.03926830179989338\n", - "Surface training t=13007, loss=0.04652806464582682\n", - "Surface training t=13008, loss=0.04215431958436966\n", - "Surface training t=13009, loss=0.034574586898088455\n", - "Surface training t=13010, loss=0.053463298827409744\n", - "Surface training t=13011, loss=0.038142165169119835\n", - "Surface training t=13012, loss=0.04515182785689831\n", - "Surface training t=13013, loss=0.037538403645157814\n", - "Surface training t=13014, loss=0.03907699044793844\n", - "Surface training t=13015, loss=0.0480265561491251\n", - "Surface training t=13016, loss=0.03439171612262726\n", - "Surface training t=13017, loss=0.03214010316878557\n", - "Surface training t=13018, loss=0.031463487073779106\n", - "Surface training t=13019, loss=0.025771364569664\n", - "Surface training t=13020, loss=0.031350573524832726\n", - "Surface training t=13021, loss=0.03338848799467087\n", - "Surface training t=13022, loss=0.06486430391669273\n", - "Surface training t=13023, loss=0.06259636022150517\n", - "Surface training t=13024, loss=0.050767041742801666\n", - "Surface training t=13025, loss=0.042697688564658165\n", - "Surface training t=13026, loss=0.05310326628386974\n", - "Surface training t=13027, loss=0.04527805186808109\n", - "Surface training t=13028, loss=0.03310857433825731\n", - "Surface training t=13029, loss=0.03205409552901983\n", - "Surface training t=13030, loss=0.0369822159409523\n", - "Surface training t=13031, loss=0.033395237289369106\n", - "Surface training t=13032, loss=0.039588455110788345\n", - "Surface training t=13033, loss=0.04040242172777653\n", - "Surface training t=13034, loss=0.03174043260514736\n", - "Surface training t=13035, loss=0.04600308649241924\n", - "Surface training t=13036, loss=0.04667971283197403\n", - "Surface training t=13037, loss=0.056863198056817055\n", - "Surface training t=13038, loss=0.03934541158378124\n", - "Surface training t=13039, loss=0.035132719203829765\n", - "Surface training t=13040, loss=0.03661501780152321\n", - "Surface training t=13041, loss=0.03412683680653572\n", - "Surface training t=13042, loss=0.034749219194054604\n", - "Surface training t=13043, loss=0.033796786330640316\n", - "Surface training t=13044, loss=0.0403138492256403\n", - "Surface training t=13045, loss=0.034671892412006855\n", - "Surface training t=13046, loss=0.0313531206920743\n", - "Surface training t=13047, loss=0.028476163744926453\n", - "Surface training t=13048, loss=0.03462154604494572\n", - "Surface training t=13049, loss=0.031878044828772545\n", - "Surface training t=13050, loss=0.031049164943397045\n", - "Surface training t=13051, loss=0.031278328970074654\n", - "Surface training t=13052, loss=0.027315703220665455\n", - "Surface training t=13053, loss=0.026880992576479912\n", - "Surface training t=13054, loss=0.03447831980884075\n", - "Surface training t=13055, loss=0.026129341684281826\n", - "Surface training t=13056, loss=0.03779280558228493\n", - "Surface training t=13057, loss=0.04037124291062355\n", - "Surface training t=13058, loss=0.04361516423523426\n", - "Surface training t=13059, loss=0.028018778190016747\n", - "Surface training t=13060, loss=0.03676337189972401\n", - "Surface training t=13061, loss=0.055894188582897186\n", - "Surface training t=13062, loss=0.0412024836987257\n", - "Surface training t=13063, loss=0.034534052945673466\n", - "Surface training t=13064, loss=0.03561723418533802\n", - "Surface training t=13065, loss=0.025265446864068508\n", - "Surface training t=13066, loss=0.03590496815741062\n", - "Surface training t=13067, loss=0.04210568219423294\n", - "Surface training t=13068, loss=0.03211169131100178\n", - "Surface training t=13069, loss=0.03368198499083519\n", - "Surface training t=13070, loss=0.03173708450049162\n", - "Surface training t=13071, loss=0.02248454838991165\n", - "Surface training t=13072, loss=0.03423318266868591\n", - "Surface training t=13073, loss=0.03565453737974167\n", - "Surface training t=13074, loss=0.0376374963670969\n", - "Surface training t=13075, loss=0.03896243590861559\n", - "Surface training t=13076, loss=0.0362981827929616\n", - "Surface training t=13077, loss=0.03920341655611992\n", - "Surface training t=13078, loss=0.03632012754678726\n", - "Surface training t=13079, loss=0.022232109680771828\n", - "Surface training t=13080, loss=0.024956475012004375\n", - "Surface training t=13081, loss=0.022664159536361694\n", - "Surface training t=13082, loss=0.03712012059986591\n", - "Surface training t=13083, loss=0.026537617668509483\n", - "Surface training t=13084, loss=0.03138981573283672\n", - "Surface training t=13085, loss=0.030710740014910698\n", - "Surface training t=13086, loss=0.02973413933068514\n", - "Surface training t=13087, loss=0.023732224479317665\n", - "Surface training t=13088, loss=0.029806483536958694\n", - "Surface training t=13089, loss=0.020667454227805138\n", - "Surface training t=13090, loss=0.02911293227225542\n", - "Surface training t=13091, loss=0.03245797008275986\n", - "Surface training t=13092, loss=0.024775519967079163\n", - "Surface training t=13093, loss=0.020967003889381886\n", - "Surface training t=13094, loss=0.025541704148054123\n", - "Surface training t=13095, loss=0.02465834002941847\n", - "Surface training t=13096, loss=0.02171671949326992\n", - "Surface training t=13097, loss=0.018125249072909355\n", - "Surface training t=13098, loss=0.019680003635585308\n", - "Surface training t=13099, loss=0.027384787797927856\n", - "Surface training t=13100, loss=0.02186893206089735\n", - "Surface training t=13101, loss=0.03183907736092806\n", - "Surface training t=13102, loss=0.024255347438156605\n", - "Surface training t=13103, loss=0.022755061276257038\n", - "Surface training t=13104, loss=0.019303131848573685\n", - "Surface training t=13105, loss=0.021795816719532013\n", - "Surface training t=13106, loss=0.024258017539978027\n", - "Surface training t=13107, loss=0.022775334306061268\n", - "Surface training t=13108, loss=0.022396661341190338\n", - "Surface training t=13109, loss=0.027630014345049858\n", - "Surface training t=13110, loss=0.02131350338459015\n", - "Surface training t=13111, loss=0.030290245078504086\n", - "Surface training t=13112, loss=0.027113737538456917\n", - "Surface training t=13113, loss=0.024436648003757\n", - "Surface training t=13114, loss=0.02346398588269949\n", - "Surface training t=13115, loss=0.034336439333856106\n", - "Surface training t=13116, loss=0.03791845962405205\n", - "Surface training t=13117, loss=0.03164512664079666\n", - "Surface training t=13118, loss=0.023255223408341408\n", - "Surface training t=13119, loss=0.01845420617610216\n", - "Surface training t=13120, loss=0.02377550769597292\n", - "Surface training t=13121, loss=0.018645373173058033\n", - "Surface training t=13122, loss=0.021365645341575146\n", - "Surface training t=13123, loss=0.020584354177117348\n", - "Surface training t=13124, loss=0.02166743203997612\n", - "Surface training t=13125, loss=0.022814194671809673\n", - "Surface training t=13126, loss=0.02063207421451807\n", - "Surface training t=13127, loss=0.031294578686356544\n", - "Surface training t=13128, loss=0.02908538281917572\n", - "Surface training t=13129, loss=0.044410793110728264\n", - "Surface training t=13130, loss=0.05201845243573189\n", - "Surface training t=13131, loss=0.028376024216413498\n", - "Surface training t=13132, loss=0.035422769375145435\n", - "Surface training t=13133, loss=0.04152658395469189\n", - "Surface training t=13134, loss=0.0381087576970458\n", - "Surface training t=13135, loss=0.04048936255276203\n", - "Surface training t=13136, loss=0.04348170757293701\n", - "Surface training t=13137, loss=0.04859109967947006\n", - "Surface training t=13138, loss=0.04251220263540745\n", - "Surface training t=13139, loss=0.032989030703902245\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=13140, loss=0.030755172483623028\n", - "Surface training t=13141, loss=0.03271886520087719\n", - "Surface training t=13142, loss=0.03191564232110977\n", - "Surface training t=13143, loss=0.03565588593482971\n", - "Surface training t=13144, loss=0.04440913535654545\n", - "Surface training t=13145, loss=0.04285404086112976\n", - "Surface training t=13146, loss=0.04937610402703285\n", - "Surface training t=13147, loss=0.03998059220612049\n", - "Surface training t=13148, loss=0.03833664767444134\n", - "Surface training t=13149, loss=0.05293609760701656\n", - "Surface training t=13150, loss=0.046569064259529114\n", - "Surface training t=13151, loss=0.04776366055011749\n", - "Surface training t=13152, loss=0.0452838521450758\n", - "Surface training t=13153, loss=0.0326506057754159\n", - "Surface training t=13154, loss=0.03261694312095642\n", - "Surface training t=13155, loss=0.02529417723417282\n", - "Surface training t=13156, loss=0.022358537651598454\n", - "Surface training t=13157, loss=0.0261832345277071\n", - "Surface training t=13158, loss=0.023879623040556908\n", - "Surface training t=13159, loss=0.021850804798305035\n", - "Surface training t=13160, loss=0.027492565102875233\n", - "Surface training t=13161, loss=0.0253919567912817\n", - "Surface training t=13162, loss=0.023540135473012924\n", - "Surface training t=13163, loss=0.02854303363710642\n", - "Surface training t=13164, loss=0.026682169176638126\n", - "Surface training t=13165, loss=0.0303575387224555\n", - "Surface training t=13166, loss=0.03238758910447359\n", - "Surface training t=13167, loss=0.03541390039026737\n", - "Surface training t=13168, loss=0.0355474129319191\n", - "Surface training t=13169, loss=0.03724136762320995\n", - "Surface training t=13170, loss=0.03557371813803911\n", - "Surface training t=13171, loss=0.030929027125239372\n", - "Surface training t=13172, loss=0.023721495643258095\n", - "Surface training t=13173, loss=0.02615263406187296\n", - "Surface training t=13174, loss=0.033940695226192474\n", - "Surface training t=13175, loss=0.031162803061306477\n", - "Surface training t=13176, loss=0.0316926185041666\n", - "Surface training t=13177, loss=0.0252276249229908\n", - "Surface training t=13178, loss=0.028431325685232878\n", - "Surface training t=13179, loss=0.05380859598517418\n", - "Surface training t=13180, loss=0.041204145178198814\n", - "Surface training t=13181, loss=0.05100619979202747\n", - "Surface training t=13182, loss=0.04776899889111519\n", - "Surface training t=13183, loss=0.05201100558042526\n", - "Surface training t=13184, loss=0.03608025424182415\n", - "Surface training t=13185, loss=0.0359322726726532\n", - "Surface training t=13186, loss=0.03192650340497494\n", - "Surface training t=13187, loss=0.021111009642481804\n", - "Surface training t=13188, loss=0.025892185978591442\n", - "Surface training t=13189, loss=0.022759676910936832\n", - "Surface training t=13190, loss=0.03363489732146263\n", - "Surface training t=13191, loss=0.029009437188506126\n", - "Surface training t=13192, loss=0.03334416262805462\n", - "Surface training t=13193, loss=0.02989826165139675\n", - "Surface training t=13194, loss=0.04302610643208027\n", - "Surface training t=13195, loss=0.028690416365861893\n", - "Surface training t=13196, loss=0.030446458607912064\n", - "Surface training t=13197, loss=0.04058699682354927\n", - "Surface training t=13198, loss=0.025280578061938286\n", - "Surface training t=13199, loss=0.02462568413466215\n", - "Surface training t=13200, loss=0.023877804167568684\n", - "Surface training t=13201, loss=0.027414090000092983\n", - "Surface training t=13202, loss=0.02760378923267126\n", - "Surface training t=13203, loss=0.02407244499772787\n", - "Surface training t=13204, loss=0.01966097392141819\n", - "Surface training t=13205, loss=0.025915328413248062\n", - "Surface training t=13206, loss=0.05709011293947697\n", - "Surface training t=13207, loss=0.03749885316938162\n", - "Surface training t=13208, loss=0.0321098193526268\n", - "Surface training t=13209, loss=0.03695846814662218\n", - "Surface training t=13210, loss=0.033803828060626984\n", - "Surface training t=13211, loss=0.030063653364777565\n", - "Surface training t=13212, loss=0.03057339321821928\n", - "Surface training t=13213, loss=0.03569398634135723\n", - "Surface training t=13214, loss=0.02317270915955305\n", - "Surface training t=13215, loss=0.02584466990083456\n", - "Surface training t=13216, loss=0.031099400483071804\n", - "Surface training t=13217, loss=0.02800037059932947\n", - "Surface training t=13218, loss=0.021212213672697544\n", - "Surface training t=13219, loss=0.030868010595440865\n", - "Surface training t=13220, loss=0.03240925818681717\n", - "Surface training t=13221, loss=0.02695943508297205\n", - "Surface training t=13222, loss=0.028267057612538338\n", - "Surface training t=13223, loss=0.04068116471171379\n", - "Surface training t=13224, loss=0.040110135450959206\n", - "Surface training t=13225, loss=0.036841969937086105\n", - "Surface training t=13226, loss=0.03980214521288872\n", - "Surface training t=13227, loss=0.034958794713020325\n", - "Surface training t=13228, loss=0.029437405988574028\n", - "Surface training t=13229, loss=0.05072193592786789\n", - "Surface training t=13230, loss=0.0407419353723526\n", - "Surface training t=13231, loss=0.03815297316759825\n", - "Surface training t=13232, loss=0.04403350688517094\n", - "Surface training t=13233, loss=0.0668304804712534\n", - "Surface training t=13234, loss=0.0522059453651309\n", - "Surface training t=13235, loss=0.04355744644999504\n", - "Surface training t=13236, loss=0.03543333616107702\n", - "Surface training t=13237, loss=0.032716031186282635\n", - "Surface training t=13238, loss=0.02948114648461342\n", - "Surface training t=13239, loss=0.022342249751091003\n", - "Surface training t=13240, loss=0.030116818845272064\n", - "Surface training t=13241, loss=0.03134599979966879\n", - "Surface training t=13242, loss=0.02615312859416008\n", - "Surface training t=13243, loss=0.02640643808990717\n", - "Surface training t=13244, loss=0.030820250511169434\n", - "Surface training t=13245, loss=0.026396669447422028\n", - "Surface training t=13246, loss=0.03221677616238594\n", - "Surface training t=13247, loss=0.026386447250843048\n", - "Surface training t=13248, loss=0.029523540288209915\n", - "Surface training t=13249, loss=0.029313411563634872\n", - "Surface training t=13250, loss=0.023079991340637207\n", - "Surface training t=13251, loss=0.023291954770684242\n", - "Surface training t=13252, loss=0.03023392055183649\n", - "Surface training t=13253, loss=0.03548006806522608\n", - "Surface training t=13254, loss=0.02913620136678219\n", - "Surface training t=13255, loss=0.024456326849758625\n", - "Surface training t=13256, loss=0.027598612010478973\n", - "Surface training t=13257, loss=0.028523088432848454\n", - "Surface training t=13258, loss=0.028538277372717857\n", - "Surface training t=13259, loss=0.029444013722240925\n", - "Surface training t=13260, loss=0.02129646809771657\n", - "Surface training t=13261, loss=0.028308806009590626\n", - "Surface training t=13262, loss=0.021699687466025352\n", - "Surface training t=13263, loss=0.023157857358455658\n", - "Surface training t=13264, loss=0.020704765804111958\n", - "Surface training t=13265, loss=0.01838820055127144\n", - "Surface training t=13266, loss=0.030919483862817287\n", - "Surface training t=13267, loss=0.029548930935561657\n", - "Surface training t=13268, loss=0.03839504532516003\n", - "Surface training t=13269, loss=0.05321299284696579\n", - "Surface training t=13270, loss=0.040899863466620445\n", - "Surface training t=13271, loss=0.03341785632073879\n", - "Surface training t=13272, loss=0.03756161779165268\n", - "Surface training t=13273, loss=0.04107978194952011\n", - "Surface training t=13274, loss=0.03590773046016693\n", - "Surface training t=13275, loss=0.02574065327644348\n", - "Surface training t=13276, loss=0.0196257708594203\n", - "Surface training t=13277, loss=0.03294816892594099\n", - "Surface training t=13278, loss=0.023368467576801777\n", - "Surface training t=13279, loss=0.027603106573224068\n", - "Surface training t=13280, loss=0.026795029640197754\n", - "Surface training t=13281, loss=0.019053340889513493\n", - "Surface training t=13282, loss=0.02442438341677189\n", - "Surface training t=13283, loss=0.02057526120916009\n", - "Surface training t=13284, loss=0.02810668107122183\n", - "Surface training t=13285, loss=0.01954023167490959\n", - "Surface training t=13286, loss=0.027568683959543705\n", - "Surface training t=13287, loss=0.03088922705501318\n", - "Surface training t=13288, loss=0.02864093706011772\n", - "Surface training t=13289, loss=0.022801442071795464\n", - "Surface training t=13290, loss=0.024954847060143948\n", - "Surface training t=13291, loss=0.02493817824870348\n", - "Surface training t=13292, loss=0.025921731255948544\n", - "Surface training t=13293, loss=0.03447457682341337\n", - "Surface training t=13294, loss=0.023792053572833538\n", - "Surface training t=13295, loss=0.02114262245595455\n", - "Surface training t=13296, loss=0.024289097636938095\n", - "Surface training t=13297, loss=0.024184104055166245\n", - "Surface training t=13298, loss=0.022819701582193375\n", - "Surface training t=13299, loss=0.023230482824146748\n", - "Surface training t=13300, loss=0.017358051147311926\n", - "Surface training t=13301, loss=0.0237370477989316\n", - "Surface training t=13302, loss=0.025676033459603786\n", - "Surface training t=13303, loss=0.02917230688035488\n", - "Surface training t=13304, loss=0.02630426175892353\n", - "Surface training t=13305, loss=0.022027943283319473\n", - "Surface training t=13306, loss=0.03389264550060034\n", - "Surface training t=13307, loss=0.020056037232279778\n", - "Surface training t=13308, loss=0.025970148853957653\n", - "Surface training t=13309, loss=0.02612423151731491\n", - "Surface training t=13310, loss=0.027448073029518127\n", - "Surface training t=13311, loss=0.029855528846383095\n", - "Surface training t=13312, loss=0.029554075561463833\n", - "Surface training t=13313, loss=0.026147959753870964\n", - "Surface training t=13314, loss=0.02801382727921009\n", - "Surface training t=13315, loss=0.0234195152297616\n", - "Surface training t=13316, loss=0.029201499186456203\n", - "Surface training t=13317, loss=0.03877091594040394\n", - "Surface training t=13318, loss=0.03446736000478268\n", - "Surface training t=13319, loss=0.03201467916369438\n", - "Surface training t=13320, loss=0.03941269684582949\n", - "Surface training t=13321, loss=0.05054258182644844\n", - "Surface training t=13322, loss=0.05125639587640762\n", - "Surface training t=13323, loss=0.04326598346233368\n", - "Surface training t=13324, loss=0.039940088987350464\n", - "Surface training t=13325, loss=0.03497816435992718\n", - "Surface training t=13326, loss=0.03323958069086075\n", - "Surface training t=13327, loss=0.050704292953014374\n", - "Surface training t=13328, loss=0.03522414527833462\n", - "Surface training t=13329, loss=0.05169903486967087\n", - "Surface training t=13330, loss=0.043468842282891273\n", - "Surface training t=13331, loss=0.03463749494403601\n", - "Surface training t=13332, loss=0.03378703445196152\n", - "Surface training t=13333, loss=0.037045592442154884\n", - "Surface training t=13334, loss=0.033416357822716236\n", - "Surface training t=13335, loss=0.04072560369968414\n", - "Surface training t=13336, loss=0.03764105495065451\n", - "Surface training t=13337, loss=0.03166958596557379\n", - "Surface training t=13338, loss=0.028475471772253513\n", - "Surface training t=13339, loss=0.02740578632801771\n", - "Surface training t=13340, loss=0.03598308563232422\n", - "Surface training t=13341, loss=0.027875647880136967\n", - "Surface training t=13342, loss=0.028137684799730778\n", - "Surface training t=13343, loss=0.02603574562817812\n", - "Surface training t=13344, loss=0.03052180726081133\n", - "Surface training t=13345, loss=0.03060889709740877\n", - "Surface training t=13346, loss=0.02308744378387928\n", - "Surface training t=13347, loss=0.025776811875402927\n", - "Surface training t=13348, loss=0.034743234515190125\n", - "Surface training t=13349, loss=0.0332215242087841\n", - "Surface training t=13350, loss=0.02688067127019167\n", - "Surface training t=13351, loss=0.03382257837802172\n", - "Surface training t=13352, loss=0.05588741786777973\n", - "Surface training t=13353, loss=0.03704954870045185\n", - "Surface training t=13354, loss=0.03236485831439495\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=13355, loss=0.025831490755081177\n", - "Surface training t=13356, loss=0.04408236965537071\n", - "Surface training t=13357, loss=0.05459732748568058\n", - "Surface training t=13358, loss=0.03744134958833456\n", - "Surface training t=13359, loss=0.03496847953647375\n", - "Surface training t=13360, loss=0.041187845170497894\n", - "Surface training t=13361, loss=0.03899003937840462\n", - "Surface training t=13362, loss=0.048442309722304344\n", - "Surface training t=13363, loss=0.05767356418073177\n", - "Surface training t=13364, loss=0.03781318571418524\n", - "Surface training t=13365, loss=0.04255819506943226\n", - "Surface training t=13366, loss=0.038129210472106934\n", - "Surface training t=13367, loss=0.03814986068755388\n", - "Surface training t=13368, loss=0.043230876326560974\n", - "Surface training t=13369, loss=0.03444709815084934\n", - "Surface training t=13370, loss=0.03142274171113968\n", - "Surface training t=13371, loss=0.031854258850216866\n", - "Surface training t=13372, loss=0.02909219916909933\n", - "Surface training t=13373, loss=0.02671030629426241\n", - "Surface training t=13374, loss=0.022286307066679\n", - "Surface training t=13375, loss=0.030213394202291965\n", - "Surface training t=13376, loss=0.03254033997654915\n", - "Surface training t=13377, loss=0.035529399290680885\n", - "Surface training t=13378, loss=0.046122562140226364\n", - "Surface training t=13379, loss=0.055921098217368126\n", - "Surface training t=13380, loss=0.06260284036397934\n", - "Surface training t=13381, loss=0.05420893616974354\n", - "Surface training t=13382, loss=0.037870047613978386\n", - "Surface training t=13383, loss=0.05038562789559364\n", - "Surface training t=13384, loss=0.07175816968083382\n", - "Surface training t=13385, loss=0.04392783157527447\n", - "Surface training t=13386, loss=0.04052066057920456\n", - "Surface training t=13387, loss=0.035290103405714035\n", - "Surface training t=13388, loss=0.03542622551321983\n", - "Surface training t=13389, loss=0.03153691999614239\n", - "Surface training t=13390, loss=0.031358351930975914\n", - "Surface training t=13391, loss=0.024362795054912567\n", - "Surface training t=13392, loss=0.024365455843508244\n", - "Surface training t=13393, loss=0.021024607121944427\n", - "Surface training t=13394, loss=0.024338260293006897\n", - "Surface training t=13395, loss=0.017171730753034353\n", - "Surface training t=13396, loss=0.03112044557929039\n", - "Surface training t=13397, loss=0.020433926954865456\n", - "Surface training t=13398, loss=0.025699924677610397\n", - "Surface training t=13399, loss=0.0293455608189106\n", - "Surface training t=13400, loss=0.031105011701583862\n", - "Surface training t=13401, loss=0.023150808177888393\n", - "Surface training t=13402, loss=0.023151599802076817\n", - "Surface training t=13403, loss=0.026155692525207996\n", - "Surface training t=13404, loss=0.028017389588057995\n", - "Surface training t=13405, loss=0.023169303312897682\n", - "Surface training t=13406, loss=0.03464372083544731\n", - "Surface training t=13407, loss=0.027093468233942986\n", - "Surface training t=13408, loss=0.029781198129057884\n", - "Surface training t=13409, loss=0.021037147380411625\n", - "Surface training t=13410, loss=0.02217130735516548\n", - "Surface training t=13411, loss=0.020581657998263836\n", - "Surface training t=13412, loss=0.024556628428399563\n", - "Surface training t=13413, loss=0.02392416726797819\n", - "Surface training t=13414, loss=0.0166275417432189\n", - "Surface training t=13415, loss=0.025117063894867897\n", - "Surface training t=13416, loss=0.02043813094496727\n", - "Surface training t=13417, loss=0.029694552533328533\n", - "Surface training t=13418, loss=0.02835425455123186\n", - "Surface training t=13419, loss=0.02611473575234413\n", - "Surface training t=13420, loss=0.027905032970011234\n", - "Surface training t=13421, loss=0.030955523252487183\n", - "Surface training t=13422, loss=0.021043083630502224\n", - "Surface training t=13423, loss=0.023958735167980194\n", - "Surface training t=13424, loss=0.018287915736436844\n", - "Surface training t=13425, loss=0.02325811330229044\n", - "Surface training t=13426, loss=0.020622420124709606\n", - "Surface training t=13427, loss=0.02148738782852888\n", - "Surface training t=13428, loss=0.022606232203543186\n", - "Surface training t=13429, loss=0.03290373366326094\n", - "Surface training t=13430, loss=0.024639016948640347\n", - "Surface training t=13431, loss=0.023957940749824047\n", - "Surface training t=13432, loss=0.02734319493174553\n", - "Surface training t=13433, loss=0.04351292923092842\n", - "Surface training t=13434, loss=0.04417327046394348\n", - "Surface training t=13435, loss=0.05905386246740818\n", - "Surface training t=13436, loss=0.05684293806552887\n", - "Surface training t=13437, loss=0.09432240575551987\n", - "Surface training t=13438, loss=0.06563552469015121\n", - "Surface training t=13439, loss=0.06415420025587082\n", - "Surface training t=13440, loss=0.08274256065487862\n", - "Surface training t=13441, loss=0.04523441381752491\n", - "Surface training t=13442, loss=0.08141232281923294\n", - "Surface training t=13443, loss=0.06971266865730286\n", - "Surface training t=13444, loss=0.08815896511077881\n", - "Surface training t=13445, loss=0.0990188904106617\n", - "Surface training t=13446, loss=0.0520663857460022\n", - "Surface training t=13447, loss=0.0885562151670456\n", - "Surface training t=13448, loss=0.04594932869076729\n", - "Surface training t=13449, loss=0.05572107620537281\n", - "Surface training t=13450, loss=0.03785883076488972\n", - "Surface training t=13451, loss=0.04053938575088978\n", - "Surface training t=13452, loss=0.03539748024195433\n", - "Surface training t=13453, loss=0.040800461545586586\n", - "Surface training t=13454, loss=0.024428170174360275\n", - "Surface training t=13455, loss=0.036851128563284874\n", - "Surface training t=13456, loss=0.030653072521090508\n", - "Surface training t=13457, loss=0.027813425287604332\n", - "Surface training t=13458, loss=0.03262612596154213\n", - "Surface training t=13459, loss=0.030306929722428322\n", - "Surface training t=13460, loss=0.04519438184797764\n", - "Surface training t=13461, loss=0.04132441431283951\n", - "Surface training t=13462, loss=0.039038754999637604\n", - "Surface training t=13463, loss=0.043821532279253006\n", - "Surface training t=13464, loss=0.05882840417325497\n", - "Surface training t=13465, loss=0.0416253749281168\n", - "Surface training t=13466, loss=0.03749268688261509\n", - "Surface training t=13467, loss=0.037561711855232716\n", - "Surface training t=13468, loss=0.05207606963813305\n", - "Surface training t=13469, loss=0.033577630296349525\n", - "Surface training t=13470, loss=0.02598831243813038\n", - "Surface training t=13471, loss=0.023380454629659653\n", - "Surface training t=13472, loss=0.029285987839102745\n", - "Surface training t=13473, loss=0.01917431317269802\n", - "Surface training t=13474, loss=0.0296226991340518\n", - "Surface training t=13475, loss=0.023130636662244797\n", - "Surface training t=13476, loss=0.02676136139780283\n", - "Surface training t=13477, loss=0.01942539121955633\n", - "Surface training t=13478, loss=0.02484385296702385\n", - "Surface training t=13479, loss=0.03176394011825323\n", - "Surface training t=13480, loss=0.03813982289284468\n", - "Surface training t=13481, loss=0.0426653828471899\n", - "Surface training t=13482, loss=0.029764887876808643\n", - "Surface training t=13483, loss=0.032163300551474094\n", - "Surface training t=13484, loss=0.04277687892317772\n", - "Surface training t=13485, loss=0.031459259800612926\n", - "Surface training t=13486, loss=0.0419267974793911\n", - "Surface training t=13487, loss=0.03667612001299858\n", - "Surface training t=13488, loss=0.0244693411514163\n", - "Surface training t=13489, loss=0.03177603520452976\n", - "Surface training t=13490, loss=0.030006862245500088\n", - "Surface training t=13491, loss=0.02552403975278139\n", - "Surface training t=13492, loss=0.02480954769998789\n", - "Surface training t=13493, loss=0.025111472234129906\n", - "Surface training t=13494, loss=0.028411779552698135\n", - "Surface training t=13495, loss=0.02470836229622364\n", - "Surface training t=13496, loss=0.03493656124919653\n", - "Surface training t=13497, loss=0.030016057193279266\n", - "Surface training t=13498, loss=0.026453974656760693\n", - "Surface training t=13499, loss=0.031366290524601936\n", - "Surface training t=13500, loss=0.029426343739032745\n", - "Surface training t=13501, loss=0.026344556361436844\n", - "Surface training t=13502, loss=0.019479808397591114\n", - "Surface training t=13503, loss=0.022079293616116047\n", - "Surface training t=13504, loss=0.029120846651494503\n", - "Surface training t=13505, loss=0.02580858487635851\n", - "Surface training t=13506, loss=0.019130330998450518\n", - "Surface training t=13507, loss=0.029863239265978336\n", - "Surface training t=13508, loss=0.020986584946513176\n", - "Surface training t=13509, loss=0.02206859551370144\n", - "Surface training t=13510, loss=0.02602058369666338\n", - "Surface training t=13511, loss=0.02816634811460972\n", - "Surface training t=13512, loss=0.02116852719336748\n", - "Surface training t=13513, loss=0.02715421561151743\n", - "Surface training t=13514, loss=0.03242765553295612\n", - "Surface training t=13515, loss=0.031973086297512054\n", - "Surface training t=13516, loss=0.03133998438715935\n", - "Surface training t=13517, loss=0.025711712427437305\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=13518, loss=0.030752241611480713\n", - "Surface training t=13519, loss=0.02677399292588234\n", - "Surface training t=13520, loss=0.023830723017454147\n", - "Surface training t=13521, loss=0.03704587742686272\n", - "Surface training t=13522, loss=0.026322556659579277\n", - "Surface training t=13523, loss=0.025705717504024506\n", - "Surface training t=13524, loss=0.030072779394686222\n", - "Surface training t=13525, loss=0.02798325102776289\n", - "Surface training t=13526, loss=0.031204232946038246\n", - "Surface training t=13527, loss=0.049903204664587975\n", - "Surface training t=13528, loss=0.0479301493614912\n", - "Surface training t=13529, loss=0.04562784358859062\n", - "Surface training t=13530, loss=0.0455595962703228\n", - "Surface training t=13531, loss=0.06513757072389126\n", - "Surface training t=13532, loss=0.05577579699456692\n", - "Surface training t=13533, loss=0.05094816908240318\n", - "Surface training t=13534, loss=0.07328453101217747\n", - "Surface training t=13535, loss=0.07139960303902626\n", - "Surface training t=13536, loss=0.05046548508107662\n", - "Surface training t=13537, loss=0.04479053430259228\n", - "Surface training t=13538, loss=0.02953462488949299\n", - "Surface training t=13539, loss=0.03954462707042694\n", - "Surface training t=13540, loss=0.05893164686858654\n", - "Surface training t=13541, loss=0.04040362127125263\n", - "Surface training t=13542, loss=0.036608412861824036\n", - "Surface training t=13543, loss=0.030768781900405884\n", - "Surface training t=13544, loss=0.032179510686546564\n", - "Surface training t=13545, loss=0.03785110451281071\n", - "Surface training t=13546, loss=0.0397062823176384\n", - "Surface training t=13547, loss=0.04893328156322241\n", - "Surface training t=13548, loss=0.08802817016839981\n", - "Surface training t=13549, loss=0.04895629175007343\n", - "Surface training t=13550, loss=0.05132327042520046\n", - "Surface training t=13551, loss=0.07051629573106766\n", - "Surface training t=13552, loss=0.055207167752087116\n", - "Surface training t=13553, loss=0.09131139144301414\n", - "Surface training t=13554, loss=0.062214478850364685\n", - "Surface training t=13555, loss=0.047154100611805916\n", - "Surface training t=13556, loss=0.07411043904721737\n", - "Surface training t=13557, loss=0.050374044105410576\n", - "Surface training t=13558, loss=0.04900207370519638\n", - "Surface training t=13559, loss=0.0686299093067646\n", - "Surface training t=13560, loss=0.07784422300755978\n", - "Surface training t=13561, loss=0.05071650631725788\n", - "Surface training t=13562, loss=0.06626936979591846\n", - "Surface training t=13563, loss=0.05508064851164818\n", - "Surface training t=13564, loss=0.04328935779631138\n", - "Surface training t=13565, loss=0.03868862986564636\n", - "Surface training t=13566, loss=0.03814706578850746\n", - "Surface training t=13567, loss=0.04806769825518131\n", - "Surface training t=13568, loss=0.04551318101584911\n", - "Surface training t=13569, loss=0.0324285002425313\n", - "Surface training t=13570, loss=0.032641688361763954\n", - "Surface training t=13571, loss=0.04256872087717056\n", - "Surface training t=13572, loss=0.02177216950803995\n", - "Surface training t=13573, loss=0.028662330470979214\n", - "Surface training t=13574, loss=0.027877159416675568\n", - "Surface training t=13575, loss=0.02921524178236723\n", - "Surface training t=13576, loss=0.027451666072010994\n", - "Surface training t=13577, loss=0.03148265182971954\n", - "Surface training t=13578, loss=0.030471818521618843\n", - "Surface training t=13579, loss=0.02398832980543375\n", - "Surface training t=13580, loss=0.022982081398367882\n", - "Surface training t=13581, loss=0.022720509208738804\n", - "Surface training t=13582, loss=0.02349369414150715\n", - "Surface training t=13583, loss=0.027935141697525978\n", - "Surface training t=13584, loss=0.03393544256687164\n", - "Surface training t=13585, loss=0.02820676565170288\n", - "Surface training t=13586, loss=0.034770441241562366\n", - "Surface training t=13587, loss=0.035018108785152435\n", - "Surface training t=13588, loss=0.029843056574463844\n", - "Surface training t=13589, loss=0.022766231559216976\n", - "Surface training t=13590, loss=0.020521407015621662\n", - "Surface training t=13591, loss=0.024243305437266827\n", - "Surface training t=13592, loss=0.02552900742739439\n", - "Surface training t=13593, loss=0.023956842720508575\n", - "Surface training t=13594, loss=0.026413234882056713\n", - "Surface training t=13595, loss=0.030750960111618042\n", - "Surface training t=13596, loss=0.02245223894715309\n", - "Surface training t=13597, loss=0.02442886494100094\n", - "Surface training t=13598, loss=0.026520426385104656\n", - "Surface training t=13599, loss=0.02598494663834572\n", - "Surface training t=13600, loss=0.026696300134062767\n", - "Surface training t=13601, loss=0.02874172292649746\n", - "Surface training t=13602, loss=0.021104389801621437\n", - "Surface training t=13603, loss=0.021284710615873337\n", - "Surface training t=13604, loss=0.017526629380881786\n", - "Surface training t=13605, loss=0.030296118929982185\n", - "Surface training t=13606, loss=0.02400288637727499\n", - "Surface training t=13607, loss=0.024436457082629204\n", - "Surface training t=13608, loss=0.028912583366036415\n", - "Surface training t=13609, loss=0.0316728912293911\n", - "Surface training t=13610, loss=0.04587055183947086\n", - "Surface training t=13611, loss=0.03609801456332207\n", - "Surface training t=13612, loss=0.03709189407527447\n", - "Surface training t=13613, loss=0.023607973009347916\n", - "Surface training t=13614, loss=0.021816186606884003\n", - "Surface training t=13615, loss=0.02629732433706522\n", - "Surface training t=13616, loss=0.034403277561068535\n", - "Surface training t=13617, loss=0.027884860523045063\n", - "Surface training t=13618, loss=0.025573777966201305\n", - "Surface training t=13619, loss=0.02756652981042862\n", - "Surface training t=13620, loss=0.03311421629041433\n", - "Surface training t=13621, loss=0.04262384958565235\n", - "Surface training t=13622, loss=0.034912330098450184\n", - "Surface training t=13623, loss=0.03001951053738594\n", - "Surface training t=13624, loss=0.05123268626630306\n", - "Surface training t=13625, loss=0.040472254157066345\n", - "Surface training t=13626, loss=0.0508038979023695\n", - "Surface training t=13627, loss=0.03705091215670109\n", - "Surface training t=13628, loss=0.04229382611811161\n", - "Surface training t=13629, loss=0.039461562409996986\n", - "Surface training t=13630, loss=0.03258123621344566\n", - "Surface training t=13631, loss=0.026847326196730137\n", - "Surface training t=13632, loss=0.02617811132222414\n", - "Surface training t=13633, loss=0.027549415826797485\n", - "Surface training t=13634, loss=0.027042366564273834\n", - "Surface training t=13635, loss=0.039224918000400066\n", - "Surface training t=13636, loss=0.06662718206644058\n", - "Surface training t=13637, loss=0.0349974911659956\n", - "Surface training t=13638, loss=0.0317644951865077\n", - "Surface training t=13639, loss=0.028879426419734955\n", - "Surface training t=13640, loss=0.026229238137602806\n", - "Surface training t=13641, loss=0.040742507204413414\n", - "Surface training t=13642, loss=0.026871128007769585\n", - "Surface training t=13643, loss=0.028582189232110977\n", - "Surface training t=13644, loss=0.025127566419541836\n", - "Surface training t=13645, loss=0.022749189287424088\n", - "Surface training t=13646, loss=0.02376926876604557\n", - "Surface training t=13647, loss=0.019676110707223415\n", - "Surface training t=13648, loss=0.025941853411495686\n", - "Surface training t=13649, loss=0.03228142857551575\n", - "Surface training t=13650, loss=0.025973031297326088\n", - "Surface training t=13651, loss=0.03129950165748596\n", - "Surface training t=13652, loss=0.029434295371174812\n", - "Surface training t=13653, loss=0.034036193042993546\n", - "Surface training t=13654, loss=0.02737365011125803\n", - "Surface training t=13655, loss=0.02553747594356537\n", - "Surface training t=13656, loss=0.03006730880588293\n", - "Surface training t=13657, loss=0.03256915509700775\n", - "Surface training t=13658, loss=0.028129149228334427\n", - "Surface training t=13659, loss=0.034916577860713005\n", - "Surface training t=13660, loss=0.029969953931868076\n", - "Surface training t=13661, loss=0.02590967994183302\n", - "Surface training t=13662, loss=0.0290373545140028\n", - "Surface training t=13663, loss=0.045254066586494446\n", - "Surface training t=13664, loss=0.036404386162757874\n", - "Surface training t=13665, loss=0.04090743884444237\n", - "Surface training t=13666, loss=0.03577021509408951\n", - "Surface training t=13667, loss=0.032351765781641006\n", - "Surface training t=13668, loss=0.03380280639976263\n", - "Surface training t=13669, loss=0.0409651305526495\n", - "Surface training t=13670, loss=0.03541397303342819\n", - "Surface training t=13671, loss=0.05544855259358883\n", - "Surface training t=13672, loss=0.0405390989035368\n", - "Surface training t=13673, loss=0.0483675841242075\n", - "Surface training t=13674, loss=0.03733862563967705\n", - "Surface training t=13675, loss=0.04473778232932091\n", - "Surface training t=13676, loss=0.032873209565877914\n", - "Surface training t=13677, loss=0.044532520696520805\n", - "Surface training t=13678, loss=0.03939325921237469\n", - "Surface training t=13679, loss=0.0488689336925745\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=13680, loss=0.035058142617344856\n", - "Surface training t=13681, loss=0.050320571288466454\n", - "Surface training t=13682, loss=0.05106909014284611\n", - "Surface training t=13683, loss=0.04492449201643467\n", - "Surface training t=13684, loss=0.04398754145950079\n", - "Surface training t=13685, loss=0.043321847915649414\n", - "Surface training t=13686, loss=0.03576299175620079\n", - "Surface training t=13687, loss=0.02741298731416464\n", - "Surface training t=13688, loss=0.017697406001389027\n", - "Surface training t=13689, loss=0.023853265680372715\n", - "Surface training t=13690, loss=0.026923280209302902\n", - "Surface training t=13691, loss=0.03260529134422541\n", - "Surface training t=13692, loss=0.031213819980621338\n", - "Surface training t=13693, loss=0.023400092497467995\n", - "Surface training t=13694, loss=0.018320227973163128\n", - "Surface training t=13695, loss=0.022298621013760567\n", - "Surface training t=13696, loss=0.026345164515078068\n", - "Surface training t=13697, loss=0.03130707889795303\n", - "Surface training t=13698, loss=0.03038412518799305\n", - "Surface training t=13699, loss=0.030631663277745247\n", - "Surface training t=13700, loss=0.02550343330949545\n", - "Surface training t=13701, loss=0.041496239602565765\n", - "Surface training t=13702, loss=0.07912062481045723\n", - "Surface training t=13703, loss=0.05707823857665062\n", - "Surface training t=13704, loss=0.06800554320216179\n", - "Surface training t=13705, loss=0.03960997797548771\n", - "Surface training t=13706, loss=0.04531977325677872\n", - "Surface training t=13707, loss=0.037031546235084534\n", - "Surface training t=13708, loss=0.042595844715833664\n", - "Surface training t=13709, loss=0.029174418188631535\n", - "Surface training t=13710, loss=0.025830697268247604\n", - "Surface training t=13711, loss=0.027527940459549427\n", - "Surface training t=13712, loss=0.028807434253394604\n", - "Surface training t=13713, loss=0.03337698057293892\n", - "Surface training t=13714, loss=0.027910327538847923\n", - "Surface training t=13715, loss=0.04129071347415447\n", - "Surface training t=13716, loss=0.044920334592461586\n", - "Surface training t=13717, loss=0.033017722889781\n", - "Surface training t=13718, loss=0.048458484932780266\n", - "Surface training t=13719, loss=0.02985450066626072\n", - "Surface training t=13720, loss=0.03026145976036787\n", - "Surface training t=13721, loss=0.026964307762682438\n", - "Surface training t=13722, loss=0.030839458107948303\n", - "Surface training t=13723, loss=0.03325008973479271\n", - "Surface training t=13724, loss=0.02406882680952549\n", - "Surface training t=13725, loss=0.021766262128949165\n", - "Surface training t=13726, loss=0.022182834334671497\n", - "Surface training t=13727, loss=0.028143126517534256\n", - "Surface training t=13728, loss=0.026948620565235615\n", - "Surface training t=13729, loss=0.03236984368413687\n", - "Surface training t=13730, loss=0.037994951009750366\n", - "Surface training t=13731, loss=0.03848057612776756\n", - "Surface training t=13732, loss=0.027746668085455894\n", - "Surface training t=13733, loss=0.036629337817430496\n", - "Surface training t=13734, loss=0.04607364535331726\n", - "Surface training t=13735, loss=0.032217610627412796\n", - "Surface training t=13736, loss=0.034621357917785645\n", - "Surface training t=13737, loss=0.03816848434507847\n", - "Surface training t=13738, loss=0.03477117791771889\n", - "Surface training t=13739, loss=0.04628395102918148\n", - "Surface training t=13740, loss=0.03775099106132984\n", - "Surface training t=13741, loss=0.04510080628097057\n", - "Surface training t=13742, loss=0.035445380955934525\n", - "Surface training t=13743, loss=0.06869718059897423\n", - "Surface training t=13744, loss=0.0516985896974802\n", - "Surface training t=13745, loss=0.05176177993416786\n", - "Surface training t=13746, loss=0.03517747297883034\n", - "Surface training t=13747, loss=0.025274316780269146\n", - "Surface training t=13748, loss=0.028947845101356506\n", - "Surface training t=13749, loss=0.03149519767612219\n", - "Surface training t=13750, loss=0.03892180509865284\n", - "Surface training t=13751, loss=0.04331008717417717\n", - "Surface training t=13752, loss=0.03875005058944225\n", - "Surface training t=13753, loss=0.0396821815520525\n", - "Surface training t=13754, loss=0.027012092992663383\n", - "Surface training t=13755, loss=0.025020881555974483\n", - "Surface training t=13756, loss=0.028232164680957794\n", - "Surface training t=13757, loss=0.026464851573109627\n", - "Surface training t=13758, loss=0.027621672488749027\n", - "Surface training t=13759, loss=0.03782094735652208\n", - "Surface training t=13760, loss=0.04053059034049511\n", - "Surface training t=13761, loss=0.03895486146211624\n", - "Surface training t=13762, loss=0.03456123452633619\n", - "Surface training t=13763, loss=0.042221637442708015\n", - "Surface training t=13764, loss=0.03358268924057484\n", - "Surface training t=13765, loss=0.04642964527010918\n", - "Surface training t=13766, loss=0.042129768058657646\n", - "Surface training t=13767, loss=0.04963900335133076\n", - "Surface training t=13768, loss=0.04307186231017113\n", - "Surface training t=13769, loss=0.058963555842638016\n", - "Surface training t=13770, loss=0.04455437511205673\n", - "Surface training t=13771, loss=0.047041233628988266\n", - "Surface training t=13772, loss=0.03908955305814743\n", - "Surface training t=13773, loss=0.03628655709326267\n", - "Surface training t=13774, loss=0.03723317012190819\n", - "Surface training t=13775, loss=0.03339399676769972\n", - "Surface training t=13776, loss=0.03589748311787844\n", - "Surface training t=13777, loss=0.031657833606004715\n", - "Surface training t=13778, loss=0.02364045660942793\n", - "Surface training t=13779, loss=0.02708675805479288\n", - "Surface training t=13780, loss=0.021561358124017715\n", - "Surface training t=13781, loss=0.03464043140411377\n", - "Surface training t=13782, loss=0.03792363032698631\n", - "Surface training t=13783, loss=0.03122500516474247\n", - "Surface training t=13784, loss=0.029089692048728466\n", - "Surface training t=13785, loss=0.029548855498433113\n", - "Surface training t=13786, loss=0.03054555505514145\n", - "Surface training t=13787, loss=0.03268733713775873\n", - "Surface training t=13788, loss=0.037015726789832115\n", - "Surface training t=13789, loss=0.029391629621386528\n", - "Surface training t=13790, loss=0.021397276781499386\n", - "Surface training t=13791, loss=0.03289364371448755\n", - "Surface training t=13792, loss=0.029160174541175365\n", - "Surface training t=13793, loss=0.046421829611063004\n", - "Surface training t=13794, loss=0.03704557940363884\n", - "Surface training t=13795, loss=0.03644762374460697\n", - "Surface training t=13796, loss=0.03908371739089489\n", - "Surface training t=13797, loss=0.03552538435906172\n", - "Surface training t=13798, loss=0.045979006215929985\n", - "Surface training t=13799, loss=0.03807699307799339\n", - "Surface training t=13800, loss=0.027968275360763073\n", - "Surface training t=13801, loss=0.03290417045354843\n", - "Surface training t=13802, loss=0.033240366727113724\n", - "Surface training t=13803, loss=0.025287526659667492\n", - "Surface training t=13804, loss=0.025961047038435936\n", - "Surface training t=13805, loss=0.023016619496047497\n", - "Surface training t=13806, loss=0.024936504662036896\n", - "Surface training t=13807, loss=0.023420248180627823\n", - "Surface training t=13808, loss=0.025403444655239582\n", - "Surface training t=13809, loss=0.027910687029361725\n", - "Surface training t=13810, loss=0.03776142746210098\n", - "Surface training t=13811, loss=0.03330831602215767\n", - "Surface training t=13812, loss=0.04392717964947224\n", - "Surface training t=13813, loss=0.03582381084561348\n", - "Surface training t=13814, loss=0.037391955964267254\n", - "Surface training t=13815, loss=0.03313238173723221\n", - "Surface training t=13816, loss=0.022728124633431435\n", - "Surface training t=13817, loss=0.03211703896522522\n", - "Surface training t=13818, loss=0.025624689646065235\n", - "Surface training t=13819, loss=0.031063036993145943\n", - "Surface training t=13820, loss=0.027043686248362064\n", - "Surface training t=13821, loss=0.027126572094857693\n", - "Surface training t=13822, loss=0.027426972053945065\n", - "Surface training t=13823, loss=0.028704051859676838\n", - "Surface training t=13824, loss=0.03808103688061237\n", - "Surface training t=13825, loss=0.02679811790585518\n", - "Surface training t=13826, loss=0.032298882491886616\n", - "Surface training t=13827, loss=0.03934377245604992\n", - "Surface training t=13828, loss=0.036663029342889786\n", - "Surface training t=13829, loss=0.03485299088060856\n", - "Surface training t=13830, loss=0.03411778621375561\n", - "Surface training t=13831, loss=0.03380439430475235\n", - "Surface training t=13832, loss=0.03219862561672926\n", - "Surface training t=13833, loss=0.03320223093032837\n", - "Surface training t=13834, loss=0.02818630449473858\n", - "Surface training t=13835, loss=0.030796555802226067\n", - "Surface training t=13836, loss=0.04733473062515259\n", - "Surface training t=13837, loss=0.03497024439275265\n", - "Surface training t=13838, loss=0.037559812888503075\n", - "Surface training t=13839, loss=0.03472265228629112\n", - "Surface training t=13840, loss=0.03346306178718805\n", - "Surface training t=13841, loss=0.034632375463843346\n", - "Surface training t=13842, loss=0.028555214405059814\n", - "Surface training t=13843, loss=0.0296970522031188\n", - "Surface training t=13844, loss=0.02328332420438528\n", - "Surface training t=13845, loss=0.03342358209192753\n", - "Surface training t=13846, loss=0.032446492463350296\n", - "Surface training t=13847, loss=0.031702710315585136\n", - "Surface training t=13848, loss=0.03567204810678959\n", - "Surface training t=13849, loss=0.04670288786292076\n", - "Surface training t=13850, loss=0.037232255563139915\n", - "Surface training t=13851, loss=0.042843712493777275\n", - "Surface training t=13852, loss=0.02977658621966839\n", - "Surface training t=13853, loss=0.036573600955307484\n", - "Surface training t=13854, loss=0.03172707837074995\n", - "Surface training t=13855, loss=0.04087887145578861\n", - "Surface training t=13856, loss=0.03214677982032299\n", - "Surface training t=13857, loss=0.04390549473464489\n", - "Surface training t=13858, loss=0.03186095505952835\n", - "Surface training t=13859, loss=0.03484022431075573\n", - "Surface training t=13860, loss=0.03314511850476265\n", - "Surface training t=13861, loss=0.024791962467134\n", - "Surface training t=13862, loss=0.034013972617685795\n", - "Surface training t=13863, loss=0.04565126821398735\n", - "Surface training t=13864, loss=0.03495749272406101\n", - "Surface training t=13865, loss=0.061427218839526176\n", - "Surface training t=13866, loss=0.03881211765110493\n", - "Surface training t=13867, loss=0.04048933833837509\n", - "Surface training t=13868, loss=0.030162548646330833\n", - "Surface training t=13869, loss=0.028792472556233406\n", - "Surface training t=13870, loss=0.030753156170248985\n", - "Surface training t=13871, loss=0.02894261945039034\n", - "Surface training t=13872, loss=0.03171855956315994\n", - "Surface training t=13873, loss=0.02866060845553875\n", - "Surface training t=13874, loss=0.035161230713129044\n", - "Surface training t=13875, loss=0.018753504380583763\n", - "Surface training t=13876, loss=0.038643984124064445\n", - "Surface training t=13877, loss=0.026464339345693588\n", - "Surface training t=13878, loss=0.02277358714491129\n", - "Surface training t=13879, loss=0.033624665811657906\n", - "Surface training t=13880, loss=0.024394266307353973\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=13881, loss=0.03170290030539036\n", - "Surface training t=13882, loss=0.019982087425887585\n", - "Surface training t=13883, loss=0.025132297538220882\n", - "Surface training t=13884, loss=0.03237181901931763\n", - "Surface training t=13885, loss=0.038944728672504425\n", - "Surface training t=13886, loss=0.02878305409103632\n", - "Surface training t=13887, loss=0.02949683368206024\n", - "Surface training t=13888, loss=0.06278765015304089\n", - "Surface training t=13889, loss=0.037193652242422104\n", - "Surface training t=13890, loss=0.04100907780230045\n", - "Surface training t=13891, loss=0.0408175615593791\n", - "Surface training t=13892, loss=0.05768631771206856\n", - "Surface training t=13893, loss=0.04213433898985386\n", - "Surface training t=13894, loss=0.04765648394823074\n", - "Surface training t=13895, loss=0.03168647829443216\n", - "Surface training t=13896, loss=0.03291793167591095\n", - "Surface training t=13897, loss=0.03260558098554611\n", - "Surface training t=13898, loss=0.026996382512152195\n", - "Surface training t=13899, loss=0.028568378649652004\n", - "Surface training t=13900, loss=0.03485804796218872\n", - "Surface training t=13901, loss=0.03323475271463394\n", - "Surface training t=13902, loss=0.03354397974908352\n", - "Surface training t=13903, loss=0.03266902267932892\n", - "Surface training t=13904, loss=0.0452854298055172\n", - "Surface training t=13905, loss=0.033993011340498924\n", - "Surface training t=13906, loss=0.03562996722757816\n", - "Surface training t=13907, loss=0.04995770752429962\n", - "Surface training t=13908, loss=0.035723255947232246\n", - "Surface training t=13909, loss=0.0412422139197588\n", - "Surface training t=13910, loss=0.038536581210792065\n", - "Surface training t=13911, loss=0.0462891086935997\n", - "Surface training t=13912, loss=0.061115844175219536\n", - "Surface training t=13913, loss=0.046859873458743095\n", - "Surface training t=13914, loss=0.057494815438985825\n", - "Surface training t=13915, loss=0.10486815497279167\n", - "Surface training t=13916, loss=0.05757317692041397\n", - "Surface training t=13917, loss=0.059166254475712776\n", - "Surface training t=13918, loss=0.05441868677735329\n", - "Surface training t=13919, loss=0.042638376355171204\n", - "Surface training t=13920, loss=0.04097859375178814\n", - "Surface training t=13921, loss=0.039321502670645714\n", - "Surface training t=13922, loss=0.0299471328034997\n", - "Surface training t=13923, loss=0.03535163588821888\n", - "Surface training t=13924, loss=0.03882916457951069\n", - "Surface training t=13925, loss=0.03385992348194122\n", - "Surface training t=13926, loss=0.033029383048415184\n", - "Surface training t=13927, loss=0.02628587931394577\n", - "Surface training t=13928, loss=0.028305859304964542\n", - "Surface training t=13929, loss=0.034717812202870846\n", - "Surface training t=13930, loss=0.03318816516548395\n", - "Surface training t=13931, loss=0.027814985252916813\n", - "Surface training t=13932, loss=0.026328569278120995\n", - "Surface training t=13933, loss=0.055738743394613266\n", - "Surface training t=13934, loss=0.04414654150605202\n", - "Surface training t=13935, loss=0.04903707280755043\n", - "Surface training t=13936, loss=0.04113193042576313\n", - "Surface training t=13937, loss=0.059512294828891754\n", - "Surface training t=13938, loss=0.03790280595421791\n", - "Surface training t=13939, loss=0.049221914261579514\n", - "Surface training t=13940, loss=0.05573011189699173\n", - "Surface training t=13941, loss=0.037261126562952995\n", - "Surface training t=13942, loss=0.044721854850649834\n", - "Surface training t=13943, loss=0.039092994295060635\n", - "Surface training t=13944, loss=0.06203925982117653\n", - "Surface training t=13945, loss=0.050775811076164246\n", - "Surface training t=13946, loss=0.04897141829133034\n", - "Surface training t=13947, loss=0.04582871124148369\n", - "Surface training t=13948, loss=0.03820059075951576\n", - "Surface training t=13949, loss=0.04057523235678673\n", - "Surface training t=13950, loss=0.04056461714208126\n", - "Surface training t=13951, loss=0.05124921724200249\n", - "Surface training t=13952, loss=0.05372558534145355\n", - "Surface training t=13953, loss=0.03926356043666601\n", - "Surface training t=13954, loss=0.04776381142437458\n", - "Surface training t=13955, loss=0.05452452972531319\n", - "Surface training t=13956, loss=0.04329737089574337\n", - "Surface training t=13957, loss=0.03732382971793413\n", - "Surface training t=13958, loss=0.04008391872048378\n", - "Surface training t=13959, loss=0.05152638256549835\n", - "Surface training t=13960, loss=0.03835897333920002\n", - "Surface training t=13961, loss=0.0652043242007494\n", - "Surface training t=13962, loss=0.03525172919034958\n", - "Surface training t=13963, loss=0.062271783128380775\n", - "Surface training t=13964, loss=0.03908103518188\n", - "Surface training t=13965, loss=0.041570551693439484\n", - "Surface training t=13966, loss=0.04006312135607004\n", - "Surface training t=13967, loss=0.030207660049200058\n", - "Surface training t=13968, loss=0.0440187007188797\n", - "Surface training t=13969, loss=0.04451700486242771\n", - "Surface training t=13970, loss=0.06584057584404945\n", - "Surface training t=13971, loss=0.04039647988975048\n", - "Surface training t=13972, loss=0.04368837736546993\n", - "Surface training t=13973, loss=0.03249570168554783\n", - "Surface training t=13974, loss=0.040850816294550896\n", - "Surface training t=13975, loss=0.03143572621047497\n", - "Surface training t=13976, loss=0.03328951261937618\n", - "Surface training t=13977, loss=0.03317007515579462\n", - "Surface training t=13978, loss=0.02618809975683689\n", - "Surface training t=13979, loss=0.0418217908591032\n", - "Surface training t=13980, loss=0.03700936771929264\n", - "Surface training t=13981, loss=0.03226904012262821\n", - "Surface training t=13982, loss=0.03596014529466629\n", - "Surface training t=13983, loss=0.045430270954966545\n", - "Surface training t=13984, loss=0.04362824372947216\n", - "Surface training t=13985, loss=0.038909947499632835\n", - "Surface training t=13986, loss=0.026897506788372993\n", - "Surface training t=13987, loss=0.03707763087004423\n", - "Surface training t=13988, loss=0.036192938685417175\n", - "Surface training t=13989, loss=0.03568931017071009\n", - "Surface training t=13990, loss=0.03813168592751026\n", - "Surface training t=13991, loss=0.031909133307635784\n", - "Surface training t=13992, loss=0.028609858825802803\n", - "Surface training t=13993, loss=0.030209532007575035\n", - "Surface training t=13994, loss=0.019936423748731613\n", - "Surface training t=13995, loss=0.022698037326335907\n", - "Surface training t=13996, loss=0.023371636867523193\n", - "Surface training t=13997, loss=0.02231160644441843\n", - "Surface training t=13998, loss=0.01963573321700096\n", - "Surface training t=13999, loss=0.033583417534828186\n", - "Surface training t=14000, loss=0.03069987241178751\n", - "Surface training t=14001, loss=0.024740119464695454\n", - "Surface training t=14002, loss=0.02175561711192131\n", - "Surface training t=14003, loss=0.029643571004271507\n", - "Surface training t=14004, loss=0.032085100188851357\n", - "Surface training t=14005, loss=0.04411038011312485\n", - "Surface training t=14006, loss=0.036934467032551765\n", - "Surface training t=14007, loss=0.0459744967520237\n", - "Surface training t=14008, loss=0.043235257267951965\n", - "Surface training t=14009, loss=0.0459454171359539\n", - "Surface training t=14010, loss=0.04989313706755638\n", - "Surface training t=14011, loss=0.058528393507003784\n", - "Surface training t=14012, loss=0.04586777649819851\n", - "Surface training t=14013, loss=0.048678627237677574\n", - "Surface training t=14014, loss=0.037265677005052567\n", - "Surface training t=14015, loss=0.040040286257863045\n", - "Surface training t=14016, loss=0.0308665931224823\n", - "Surface training t=14017, loss=0.027615398168563843\n", - "Surface training t=14018, loss=0.022743048146367073\n", - "Surface training t=14019, loss=0.02495643123984337\n", - "Surface training t=14020, loss=0.02624350506812334\n", - "Surface training t=14021, loss=0.023789101280272007\n", - "Surface training t=14022, loss=0.026327721774578094\n", - "Surface training t=14023, loss=0.03587584849447012\n", - "Surface training t=14024, loss=0.0391263123601675\n", - "Surface training t=14025, loss=0.033950189128518105\n", - "Surface training t=14026, loss=0.04593292437493801\n", - "Surface training t=14027, loss=0.03574276156723499\n", - "Surface training t=14028, loss=0.03730632737278938\n", - "Surface training t=14029, loss=0.03691742941737175\n", - "Surface training t=14030, loss=0.037794992327690125\n", - "Surface training t=14031, loss=0.02206866256892681\n", - "Surface training t=14032, loss=0.022516359575092793\n", - "Surface training t=14033, loss=0.028422742150723934\n", - "Surface training t=14034, loss=0.03044013772159815\n", - "Surface training t=14035, loss=0.03227666858583689\n", - "Surface training t=14036, loss=0.03084433823823929\n", - "Surface training t=14037, loss=0.03428014740347862\n", - "Surface training t=14038, loss=0.03684201464056969\n", - "Surface training t=14039, loss=0.03357839398086071\n", - "Surface training t=14040, loss=0.03149075619876385\n", - "Surface training t=14041, loss=0.02645300794392824\n", - "Surface training t=14042, loss=0.03354785591363907\n", - "Surface training t=14043, loss=0.036290492862463\n", - "Surface training t=14044, loss=0.028359590098261833\n", - "Surface training t=14045, loss=0.04512536898255348\n", - "Surface training t=14046, loss=0.02943381853401661\n", - "Surface training t=14047, loss=0.026087645441293716\n", - "Surface training t=14048, loss=0.026883614249527454\n", - "Surface training t=14049, loss=0.025202888995409012\n", - "Surface training t=14050, loss=0.022787603549659252\n", - "Surface training t=14051, loss=0.02184968162328005\n", - "Surface training t=14052, loss=0.021278949454426765\n", - "Surface training t=14053, loss=0.0285725686699152\n", - "Surface training t=14054, loss=0.023968116380274296\n", - "Surface training t=14055, loss=0.02491540927439928\n", - "Surface training t=14056, loss=0.03010235633701086\n", - "Surface training t=14057, loss=0.028145084157586098\n", - "Surface training t=14058, loss=0.02708241529762745\n", - "Surface training t=14059, loss=0.03249694500118494\n", - "Surface training t=14060, loss=0.027966078370809555\n", - "Surface training t=14061, loss=0.025882226414978504\n", - "Surface training t=14062, loss=0.03631077706813812\n", - "Surface training t=14063, loss=0.04206275939941406\n", - "Surface training t=14064, loss=0.03368195705115795\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=14065, loss=0.03715007472783327\n", - "Surface training t=14066, loss=0.051301661878824234\n", - "Surface training t=14067, loss=0.04401724599301815\n", - "Surface training t=14068, loss=0.04076375253498554\n", - "Surface training t=14069, loss=0.043109314516186714\n", - "Surface training t=14070, loss=0.037499270401895046\n", - "Surface training t=14071, loss=0.04016214609146118\n", - "Surface training t=14072, loss=0.04590750113129616\n", - "Surface training t=14073, loss=0.038069961592555046\n", - "Surface training t=14074, loss=0.038946542888879776\n", - "Surface training t=14075, loss=0.04146208614110947\n", - "Surface training t=14076, loss=0.028354693204164505\n", - "Surface training t=14077, loss=0.01925351470708847\n", - "Surface training t=14078, loss=0.02107424195855856\n", - "Surface training t=14079, loss=0.018464609049260616\n", - "Surface training t=14080, loss=0.023811811581254005\n", - "Surface training t=14081, loss=0.027906635776162148\n", - "Surface training t=14082, loss=0.03195985686033964\n", - "Surface training t=14083, loss=0.02789708785712719\n", - "Surface training t=14084, loss=0.030856279656291008\n", - "Surface training t=14085, loss=0.0251094289124012\n", - "Surface training t=14086, loss=0.026697661727666855\n", - "Surface training t=14087, loss=0.031418646685779095\n", - "Surface training t=14088, loss=0.04209708794951439\n", - "Surface training t=14089, loss=0.03416530508548021\n", - "Surface training t=14090, loss=0.03652207553386688\n", - "Surface training t=14091, loss=0.02940494194626808\n", - "Surface training t=14092, loss=0.027754864655435085\n", - "Surface training t=14093, loss=0.03165122680366039\n", - "Surface training t=14094, loss=0.024599775671958923\n", - "Surface training t=14095, loss=0.030601492151618004\n", - "Surface training t=14096, loss=0.024874483235180378\n", - "Surface training t=14097, loss=0.028951654210686684\n", - "Surface training t=14098, loss=0.02421568799763918\n", - "Surface training t=14099, loss=0.019984296523034573\n", - "Surface training t=14100, loss=0.02053492423146963\n", - "Surface training t=14101, loss=0.02611595019698143\n", - "Surface training t=14102, loss=0.026885760948061943\n", - "Surface training t=14103, loss=0.05188866704702377\n", - "Surface training t=14104, loss=0.043164629489183426\n", - "Surface training t=14105, loss=0.048489767126739025\n", - "Surface training t=14106, loss=0.05466601625084877\n", - "Surface training t=14107, loss=0.1038956418633461\n", - "Surface training t=14108, loss=0.061707235872745514\n", - "Surface training t=14109, loss=0.06075960956513882\n", - "Surface training t=14110, loss=0.06556646153330803\n", - "Surface training t=14111, loss=0.04635104164481163\n", - "Surface training t=14112, loss=0.03296418022364378\n", - "Surface training t=14113, loss=0.02756384201347828\n", - "Surface training t=14114, loss=0.026911658234894276\n", - "Surface training t=14115, loss=0.028926950879395008\n", - "Surface training t=14116, loss=0.03179418295621872\n", - "Surface training t=14117, loss=0.041769228875637054\n", - "Surface training t=14118, loss=0.054894085973501205\n", - "Surface training t=14119, loss=0.0386265330016613\n", - "Surface training t=14120, loss=0.0403472688049078\n", - "Surface training t=14121, loss=0.027336392551660538\n", - "Surface training t=14122, loss=0.036509525030851364\n", - "Surface training t=14123, loss=0.03149740491062403\n", - "Surface training t=14124, loss=0.03386266343295574\n", - "Surface training t=14125, loss=0.024687965400516987\n", - "Surface training t=14126, loss=0.031081688590347767\n", - "Surface training t=14127, loss=0.03283830173313618\n", - "Surface training t=14128, loss=0.026328327134251595\n", - "Surface training t=14129, loss=0.03194093145430088\n", - "Surface training t=14130, loss=0.034837573766708374\n", - "Surface training t=14131, loss=0.03714960813522339\n", - "Surface training t=14132, loss=0.04256419837474823\n", - "Surface training t=14133, loss=0.03767057694494724\n", - "Surface training t=14134, loss=0.038227710872888565\n", - "Surface training t=14135, loss=0.036960795521736145\n", - "Surface training t=14136, loss=0.04383591562509537\n", - "Surface training t=14137, loss=0.03142302017658949\n", - "Surface training t=14138, loss=0.028691603802144527\n", - "Surface training t=14139, loss=0.03722143918275833\n", - "Surface training t=14140, loss=0.03470178321003914\n", - "Surface training t=14141, loss=0.028161906637251377\n", - "Surface training t=14142, loss=0.038097078911960125\n", - "Surface training t=14143, loss=0.03717733174562454\n", - "Surface training t=14144, loss=0.027453850023448467\n", - "Surface training t=14145, loss=0.028696014545857906\n", - "Surface training t=14146, loss=0.02999188844114542\n", - "Surface training t=14147, loss=0.031998785212635994\n", - "Surface training t=14148, loss=0.04727570526301861\n", - "Surface training t=14149, loss=0.054830703884363174\n", - "Surface training t=14150, loss=0.038632805459201336\n", - "Surface training t=14151, loss=0.03854517079889774\n", - "Surface training t=14152, loss=0.03124251402914524\n", - "Surface training t=14153, loss=0.03064655140042305\n", - "Surface training t=14154, loss=0.044662075117230415\n", - "Surface training t=14155, loss=0.03070718329399824\n", - "Surface training t=14156, loss=0.03526462335139513\n", - "Surface training t=14157, loss=0.04347432404756546\n", - "Surface training t=14158, loss=0.026141086593270302\n", - "Surface training t=14159, loss=0.023531770333647728\n", - "Surface training t=14160, loss=0.035041820257902145\n", - "Surface training t=14161, loss=0.027734420262277126\n", - "Surface training t=14162, loss=0.03481232561171055\n", - "Surface training t=14163, loss=0.03762086667120457\n", - "Surface training t=14164, loss=0.03508457541465759\n", - "Surface training t=14165, loss=0.03630257956683636\n", - "Surface training t=14166, loss=0.02933566737920046\n", - "Surface training t=14167, loss=0.03155849315226078\n", - "Surface training t=14168, loss=0.03594425693154335\n", - "Surface training t=14169, loss=0.029411688446998596\n", - "Surface training t=14170, loss=0.02549551986157894\n", - "Surface training t=14171, loss=0.017479277215898037\n", - "Surface training t=14172, loss=0.023816023021936417\n", - "Surface training t=14173, loss=0.024538605473935604\n", - "Surface training t=14174, loss=0.020739826373755932\n", - "Surface training t=14175, loss=0.023124187719076872\n", - "Surface training t=14176, loss=0.017545084469020367\n", - "Surface training t=14177, loss=0.024723154492676258\n", - "Surface training t=14178, loss=0.026234318502247334\n", - "Surface training t=14179, loss=0.028748388402163982\n", - "Surface training t=14180, loss=0.04089026898145676\n", - "Surface training t=14181, loss=0.03432234562933445\n", - "Surface training t=14182, loss=0.043195560574531555\n", - "Surface training t=14183, loss=0.02980712614953518\n", - "Surface training t=14184, loss=0.03459140658378601\n", - "Surface training t=14185, loss=0.0327260997146368\n", - "Surface training t=14186, loss=0.023164255544543266\n", - "Surface training t=14187, loss=0.03142717853188515\n", - "Surface training t=14188, loss=0.035093722864985466\n", - "Surface training t=14189, loss=0.028523748740553856\n", - "Surface training t=14190, loss=0.03826352767646313\n", - "Surface training t=14191, loss=0.038318103179335594\n", - "Surface training t=14192, loss=0.054002705961465836\n", - "Surface training t=14193, loss=0.04396700672805309\n", - "Surface training t=14194, loss=0.04989572614431381\n", - "Surface training t=14195, loss=0.03941944241523743\n", - "Surface training t=14196, loss=0.06973053514957428\n", - "Surface training t=14197, loss=0.050623999908566475\n", - "Surface training t=14198, loss=0.0527593158185482\n", - "Surface training t=14199, loss=0.05693576671183109\n", - "Surface training t=14200, loss=0.051904693245887756\n", - "Surface training t=14201, loss=0.04269547201693058\n", - "Surface training t=14202, loss=0.05762689560651779\n", - "Surface training t=14203, loss=0.04182312451303005\n", - "Surface training t=14204, loss=0.0384923592209816\n", - "Surface training t=14205, loss=0.05105765536427498\n", - "Surface training t=14206, loss=0.03712433576583862\n", - "Surface training t=14207, loss=0.0348972212523222\n", - "Surface training t=14208, loss=0.03300912119448185\n", - "Surface training t=14209, loss=0.02276457380503416\n", - "Surface training t=14210, loss=0.023698249831795692\n", - "Surface training t=14211, loss=0.026478917337954044\n", - "Surface training t=14212, loss=0.022409302182495594\n", - "Surface training t=14213, loss=0.02075917460024357\n", - "Surface training t=14214, loss=0.022379054687917233\n", - "Surface training t=14215, loss=0.025274978019297123\n", - "Surface training t=14216, loss=0.025970667600631714\n", - "Surface training t=14217, loss=0.026712768711149693\n", - "Surface training t=14218, loss=0.028509952127933502\n", - "Surface training t=14219, loss=0.021827984135597944\n", - "Surface training t=14220, loss=0.028964375145733356\n", - "Surface training t=14221, loss=0.027979865670204163\n", - "Surface training t=14222, loss=0.021672634407877922\n", - "Surface training t=14223, loss=0.03766068257391453\n", - "Surface training t=14224, loss=0.037372919730842113\n", - "Surface training t=14225, loss=0.035315765999257565\n", - "Surface training t=14226, loss=0.03523034788668156\n", - "Surface training t=14227, loss=0.031051822006702423\n", - "Surface training t=14228, loss=0.060096338391304016\n", - "Surface training t=14229, loss=0.04522351175546646\n", - "Surface training t=14230, loss=0.051581814885139465\n", - "Surface training t=14231, loss=0.032771484926342964\n", - "Surface training t=14232, loss=0.06588295474648476\n", - "Surface training t=14233, loss=0.05577551946043968\n", - "Surface training t=14234, loss=0.045776788145303726\n", - "Surface training t=14235, loss=0.04865499585866928\n", - "Surface training t=14236, loss=0.06908482313156128\n", - "Surface training t=14237, loss=0.04961125925183296\n", - "Surface training t=14238, loss=0.06640628352761269\n", - "Surface training t=14239, loss=0.06372785940766335\n", - "Surface training t=14240, loss=0.05485174059867859\n", - "Surface training t=14241, loss=0.08077820017933846\n", - "Surface training t=14242, loss=0.043106790632009506\n", - "Surface training t=14243, loss=0.05267234891653061\n", - "Surface training t=14244, loss=0.03193186782300472\n", - "Surface training t=14245, loss=0.04920491203665733\n", - "Surface training t=14246, loss=0.035450092516839504\n", - "Surface training t=14247, loss=0.049904268234968185\n", - "Surface training t=14248, loss=0.03862507734447718\n", - "Surface training t=14249, loss=0.044321225956082344\n", - "Surface training t=14250, loss=0.03357073664665222\n", - "Surface training t=14251, loss=0.03947561141103506\n", - "Surface training t=14252, loss=0.0498114675283432\n", - "Surface training t=14253, loss=0.04274086467921734\n", - "Surface training t=14254, loss=0.036288049072027206\n", - "Surface training t=14255, loss=0.035929882898926735\n", - "Surface training t=14256, loss=0.034542473033070564\n", - "Surface training t=14257, loss=0.03031568042933941\n", - "Surface training t=14258, loss=0.0332356933504343\n", - "Surface training t=14259, loss=0.030625914223492146\n", - "Surface training t=14260, loss=0.038620345294475555\n", - "Surface training t=14261, loss=0.03912381827831268\n", - "Surface training t=14262, loss=0.03571716882288456\n", - "Surface training t=14263, loss=0.033486814238131046\n", - "Surface training t=14264, loss=0.030178561806678772\n", - "Surface training t=14265, loss=0.0367218554019928\n", - "Surface training t=14266, loss=0.03466452471911907\n", - "Surface training t=14267, loss=0.04012872278690338\n", - "Surface training t=14268, loss=0.03203495591878891\n", - "Surface training t=14269, loss=0.025652294978499413\n", - "Surface training t=14270, loss=0.03086458519101143\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=14271, loss=0.023012550547719002\n", - "Surface training t=14272, loss=0.02589265536516905\n", - "Surface training t=14273, loss=0.0284084165468812\n", - "Surface training t=14274, loss=0.033787138760089874\n", - "Surface training t=14275, loss=0.023780410178005695\n", - "Surface training t=14276, loss=0.026029150001704693\n", - "Surface training t=14277, loss=0.024253438226878643\n", - "Surface training t=14278, loss=0.029156140983104706\n", - "Surface training t=14279, loss=0.04621439427137375\n", - "Surface training t=14280, loss=0.06488944590091705\n", - "Surface training t=14281, loss=0.05181817524135113\n", - "Surface training t=14282, loss=0.0640740804374218\n", - "Surface training t=14283, loss=0.061257705092430115\n", - "Surface training t=14284, loss=0.049651408568024635\n", - "Surface training t=14285, loss=0.05148984678089619\n", - "Surface training t=14286, loss=0.10417207330465317\n", - "Surface training t=14287, loss=0.060180800035595894\n", - "Surface training t=14288, loss=0.06328889541327953\n", - "Surface training t=14289, loss=0.06253402680158615\n", - "Surface training t=14290, loss=0.044863779097795486\n", - "Surface training t=14291, loss=0.050561362877488136\n", - "Surface training t=14292, loss=0.0449625626206398\n", - "Surface training t=14293, loss=0.04091520234942436\n", - "Surface training t=14294, loss=0.03973735682666302\n", - "Surface training t=14295, loss=0.03636149875819683\n", - "Surface training t=14296, loss=0.04566664807498455\n", - "Surface training t=14297, loss=0.040746187791228294\n", - "Surface training t=14298, loss=0.038649410009384155\n", - "Surface training t=14299, loss=0.03806559182703495\n", - "Surface training t=14300, loss=0.028164786286652088\n", - "Surface training t=14301, loss=0.03171217814087868\n", - "Surface training t=14302, loss=0.034757078625261784\n", - "Surface training t=14303, loss=0.03087129071354866\n", - "Surface training t=14304, loss=0.030222147703170776\n", - "Surface training t=14305, loss=0.03541441913694143\n", - "Surface training t=14306, loss=0.042020680382847786\n", - "Surface training t=14307, loss=0.024460253305733204\n", - "Surface training t=14308, loss=0.038594139739871025\n", - "Surface training t=14309, loss=0.039691269397735596\n", - "Surface training t=14310, loss=0.06284867599606514\n", - "Surface training t=14311, loss=0.030371051281690598\n", - "Surface training t=14312, loss=0.02372363954782486\n", - "Surface training t=14313, loss=0.02937956526875496\n", - "Surface training t=14314, loss=0.043087903410196304\n", - "Surface training t=14315, loss=0.034419638104736805\n", - "Surface training t=14316, loss=0.030744980089366436\n", - "Surface training t=14317, loss=0.026823433116078377\n", - "Surface training t=14318, loss=0.027856333181262016\n", - "Surface training t=14319, loss=0.024686760269105434\n", - "Surface training t=14320, loss=0.01998529490083456\n", - "Surface training t=14321, loss=0.01764159183949232\n", - "Surface training t=14322, loss=0.028927762061357498\n", - "Surface training t=14323, loss=0.02590646781027317\n", - "Surface training t=14324, loss=0.029021148569881916\n", - "Surface training t=14325, loss=0.035558778792619705\n", - "Surface training t=14326, loss=0.027370997704565525\n", - "Surface training t=14327, loss=0.026001199148595333\n", - "Surface training t=14328, loss=0.035801636055111885\n", - "Surface training t=14329, loss=0.030812932178378105\n", - "Surface training t=14330, loss=0.035912283696234226\n", - "Surface training t=14331, loss=0.03651457838714123\n", - "Surface training t=14332, loss=0.03338473662734032\n", - "Surface training t=14333, loss=0.0308114280924201\n", - "Surface training t=14334, loss=0.034058813005685806\n", - "Surface training t=14335, loss=0.039456648752093315\n", - "Surface training t=14336, loss=0.0389491468667984\n", - "Surface training t=14337, loss=0.034711653366684914\n", - "Surface training t=14338, loss=0.03779999166727066\n", - "Surface training t=14339, loss=0.030622989870607853\n", - "Surface training t=14340, loss=0.03025469183921814\n", - "Surface training t=14341, loss=0.02924729697406292\n", - "Surface training t=14342, loss=0.040672944858670235\n", - "Surface training t=14343, loss=0.043746042996644974\n", - "Surface training t=14344, loss=0.028860379941761494\n", - "Surface training t=14345, loss=0.03333492670208216\n", - "Surface training t=14346, loss=0.04075673781335354\n", - "Surface training t=14347, loss=0.030497372150421143\n", - "Surface training t=14348, loss=0.0413789302110672\n", - "Surface training t=14349, loss=0.03955327905714512\n", - "Surface training t=14350, loss=0.03242386877536774\n", - "Surface training t=14351, loss=0.04013572260737419\n", - "Surface training t=14352, loss=0.02504448313266039\n", - "Surface training t=14353, loss=0.03345884941518307\n", - "Surface training t=14354, loss=0.03811902552843094\n", - "Surface training t=14355, loss=0.026274679228663445\n", - "Surface training t=14356, loss=0.0219234861433506\n", - "Surface training t=14357, loss=0.044810738414525986\n", - "Surface training t=14358, loss=0.026311487890779972\n", - "Surface training t=14359, loss=0.03590661846101284\n", - "Surface training t=14360, loss=0.06125040724873543\n", - "Surface training t=14361, loss=0.04784688726067543\n", - "Surface training t=14362, loss=0.04271807707846165\n", - "Surface training t=14363, loss=0.050846802070736885\n", - "Surface training t=14364, loss=0.059207817539572716\n", - "Surface training t=14365, loss=0.044809965416789055\n", - "Surface training t=14366, loss=0.04974792338907719\n", - "Surface training t=14367, loss=0.06716062128543854\n", - "Surface training t=14368, loss=0.04584789276123047\n", - "Surface training t=14369, loss=0.047899773344397545\n", - "Surface training t=14370, loss=0.03336813859641552\n", - "Surface training t=14371, loss=0.03372153080999851\n", - "Surface training t=14372, loss=0.029622619040310383\n", - "Surface training t=14373, loss=0.024487094953656197\n", - "Surface training t=14374, loss=0.0371838454157114\n", - "Surface training t=14375, loss=0.03318409714847803\n", - "Surface training t=14376, loss=0.033566540107131004\n", - "Surface training t=14377, loss=0.033369798213243484\n", - "Surface training t=14378, loss=0.03026823326945305\n", - "Surface training t=14379, loss=0.025199757888913155\n", - "Surface training t=14380, loss=0.025297102518379688\n", - "Surface training t=14381, loss=0.02235732041299343\n", - "Surface training t=14382, loss=0.02896038256585598\n", - "Surface training t=14383, loss=0.03466391749680042\n", - "Surface training t=14384, loss=0.030549753457307816\n", - "Surface training t=14385, loss=0.03047199919819832\n", - "Surface training t=14386, loss=0.02987744379788637\n", - "Surface training t=14387, loss=0.03154538944363594\n", - "Surface training t=14388, loss=0.036453818902373314\n", - "Surface training t=14389, loss=0.034520833753049374\n", - "Surface training t=14390, loss=0.03267333097755909\n", - "Surface training t=14391, loss=0.03816030826419592\n", - "Surface training t=14392, loss=0.032291047275066376\n", - "Surface training t=14393, loss=0.029100869782269\n", - "Surface training t=14394, loss=0.026438836939632893\n", - "Surface training t=14395, loss=0.0264720031991601\n", - "Surface training t=14396, loss=0.0201083580031991\n", - "Surface training t=14397, loss=0.028278682380914688\n", - "Surface training t=14398, loss=0.03425589669495821\n", - "Surface training t=14399, loss=0.046913446858525276\n", - "Surface training t=14400, loss=0.04258785769343376\n", - "Surface training t=14401, loss=0.04412650689482689\n", - "Surface training t=14402, loss=0.037135498598217964\n", - "Surface training t=14403, loss=0.034913625568151474\n", - "Surface training t=14404, loss=0.04253789409995079\n", - "Surface training t=14405, loss=0.045373786240816116\n", - "Surface training t=14406, loss=0.05486079305410385\n", - "Surface training t=14407, loss=0.06607267260551453\n", - "Surface training t=14408, loss=0.04717965982854366\n", - "Surface training t=14409, loss=0.061130622401833534\n", - "Surface training t=14410, loss=0.030773463658988476\n", - "Surface training t=14411, loss=0.038778072223067284\n", - "Surface training t=14412, loss=0.0362280635163188\n", - "Surface training t=14413, loss=0.02607795875519514\n", - "Surface training t=14414, loss=0.023434409871697426\n", - "Surface training t=14415, loss=0.02959115244448185\n", - "Surface training t=14416, loss=0.03362779226154089\n", - "Surface training t=14417, loss=0.03334695287048817\n", - "Surface training t=14418, loss=0.02827569004148245\n", - "Surface training t=14419, loss=0.03607737924903631\n", - "Surface training t=14420, loss=0.036455441266298294\n", - "Surface training t=14421, loss=0.044826941564679146\n", - "Surface training t=14422, loss=0.030452998355031013\n", - "Surface training t=14423, loss=0.023294128477573395\n", - "Surface training t=14424, loss=0.03706016764044762\n", - "Surface training t=14425, loss=0.04092387109994888\n", - "Surface training t=14426, loss=0.03545206133276224\n", - "Surface training t=14427, loss=0.03201553598046303\n", - "Surface training t=14428, loss=0.028004865162074566\n", - "Surface training t=14429, loss=0.030954129993915558\n", - "Surface training t=14430, loss=0.034384939819574356\n", - "Surface training t=14431, loss=0.037647122517228127\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=14432, loss=0.034594496712088585\n", - "Surface training t=14433, loss=0.027344845235347748\n", - "Surface training t=14434, loss=0.02690762933343649\n", - "Surface training t=14435, loss=0.02673687320202589\n", - "Surface training t=14436, loss=0.03103051520884037\n", - "Surface training t=14437, loss=0.022277021780610085\n", - "Surface training t=14438, loss=0.022525496315211058\n", - "Surface training t=14439, loss=0.016671743243932724\n", - "Surface training t=14440, loss=0.02257512602955103\n", - "Surface training t=14441, loss=0.021442673169076443\n", - "Surface training t=14442, loss=0.025309008546173573\n", - "Surface training t=14443, loss=0.02005670592188835\n", - "Surface training t=14444, loss=0.018066341057419777\n", - "Surface training t=14445, loss=0.025242699310183525\n", - "Surface training t=14446, loss=0.025137332268059254\n", - "Surface training t=14447, loss=0.02511262334883213\n", - "Surface training t=14448, loss=0.027627043426036835\n", - "Surface training t=14449, loss=0.03321895096451044\n", - "Surface training t=14450, loss=0.034608226269483566\n", - "Surface training t=14451, loss=0.036935484036803246\n", - "Surface training t=14452, loss=0.038375288248062134\n", - "Surface training t=14453, loss=0.02933731535449624\n", - "Surface training t=14454, loss=0.03573846071958542\n", - "Surface training t=14455, loss=0.04110333137214184\n", - "Surface training t=14456, loss=0.045455725863575935\n", - "Surface training t=14457, loss=0.039492092095315456\n", - "Surface training t=14458, loss=0.03648689389228821\n", - "Surface training t=14459, loss=0.03600358963012695\n", - "Surface training t=14460, loss=0.026458842679858208\n", - "Surface training t=14461, loss=0.0503835529088974\n", - "Surface training t=14462, loss=0.041917646303772926\n", - "Surface training t=14463, loss=0.03947584703564644\n", - "Surface training t=14464, loss=0.038445133715867996\n", - "Surface training t=14465, loss=0.040258483961224556\n", - "Surface training t=14466, loss=0.06593122705817223\n", - "Surface training t=14467, loss=0.04169527254998684\n", - "Surface training t=14468, loss=0.046218391507864\n", - "Surface training t=14469, loss=0.10070542991161346\n", - "Surface training t=14470, loss=0.05783892795443535\n", - "Surface training t=14471, loss=0.08702217787504196\n", - "Surface training t=14472, loss=0.05617010034620762\n", - "Surface training t=14473, loss=0.05276033841073513\n", - "Surface training t=14474, loss=0.0568408016115427\n", - "Surface training t=14475, loss=0.03922050725668669\n", - "Surface training t=14476, loss=0.03959671873599291\n", - "Surface training t=14477, loss=0.05064762756228447\n", - "Surface training t=14478, loss=0.047700922936201096\n", - "Surface training t=14479, loss=0.03452697861939669\n", - "Surface training t=14480, loss=0.03522396273910999\n", - "Surface training t=14481, loss=0.0317825973033905\n", - "Surface training t=14482, loss=0.02572279144078493\n", - "Surface training t=14483, loss=0.02959488518536091\n", - "Surface training t=14484, loss=0.0266581978648901\n", - "Surface training t=14485, loss=0.03739857114851475\n", - "Surface training t=14486, loss=0.02934996411204338\n", - "Surface training t=14487, loss=0.04430007189512253\n", - "Surface training t=14488, loss=0.04168540798127651\n", - "Surface training t=14489, loss=0.03549339063465595\n", - "Surface training t=14490, loss=0.026673873886466026\n", - "Surface training t=14491, loss=0.026365550234913826\n", - "Surface training t=14492, loss=0.01993488520383835\n", - "Surface training t=14493, loss=0.026733380742371082\n", - "Surface training t=14494, loss=0.02118724025785923\n", - "Surface training t=14495, loss=0.024947425350546837\n", - "Surface training t=14496, loss=0.01885572774335742\n", - "Surface training t=14497, loss=0.024971971288323402\n", - "Surface training t=14498, loss=0.021464979276061058\n", - "Surface training t=14499, loss=0.021834378130733967\n", - "Surface training t=14500, loss=0.021400523371994495\n", - "Surface training t=14501, loss=0.018310116603970528\n", - "Surface training t=14502, loss=0.02797598112374544\n", - "Surface training t=14503, loss=0.026091418229043484\n", - "Surface training t=14504, loss=0.02086326666176319\n", - "Surface training t=14505, loss=0.022968475706875324\n", - "Surface training t=14506, loss=0.02301607746630907\n", - "Surface training t=14507, loss=0.021863451227545738\n", - "Surface training t=14508, loss=0.027314461767673492\n", - "Surface training t=14509, loss=0.026953990571200848\n", - "Surface training t=14510, loss=0.03244287893176079\n", - "Surface training t=14511, loss=0.0260021286085248\n", - "Surface training t=14512, loss=0.027927525341510773\n", - "Surface training t=14513, loss=0.027057438157498837\n", - "Surface training t=14514, loss=0.031065702438354492\n", - "Surface training t=14515, loss=0.03986556176096201\n", - "Surface training t=14516, loss=0.044974185526371\n", - "Surface training t=14517, loss=0.040236372500658035\n", - "Surface training t=14518, loss=0.031912392005324364\n", - "Surface training t=14519, loss=0.02919139340519905\n", - "Surface training t=14520, loss=0.026713427156209946\n", - "Surface training t=14521, loss=0.02822030708193779\n", - "Surface training t=14522, loss=0.03529102634638548\n", - "Surface training t=14523, loss=0.02590423170477152\n", - "Surface training t=14524, loss=0.020462526008486748\n", - "Surface training t=14525, loss=0.028597218915820122\n", - "Surface training t=14526, loss=0.030006535351276398\n", - "Surface training t=14527, loss=0.029571411199867725\n", - "Surface training t=14528, loss=0.0397662203758955\n", - "Surface training t=14529, loss=0.04621439427137375\n", - "Surface training t=14530, loss=0.056739725172519684\n", - "Surface training t=14531, loss=0.03623206913471222\n", - "Surface training t=14532, loss=0.03946557641029358\n", - "Surface training t=14533, loss=0.04382668621838093\n", - "Surface training t=14534, loss=0.038754912093281746\n", - "Surface training t=14535, loss=0.04097096063196659\n", - "Surface training t=14536, loss=0.04314905405044556\n", - "Surface training t=14537, loss=0.04759722016751766\n", - "Surface training t=14538, loss=0.0611510556191206\n", - "Surface training t=14539, loss=0.06025143153965473\n", - "Surface training t=14540, loss=0.04737558215856552\n", - "Surface training t=14541, loss=0.04949330352246761\n", - "Surface training t=14542, loss=0.0855475664138794\n", - "Surface training t=14543, loss=0.056252263486385345\n", - "Surface training t=14544, loss=0.07185852155089378\n", - "Surface training t=14545, loss=0.0644020140171051\n", - "Surface training t=14546, loss=0.04862675070762634\n", - "Surface training t=14547, loss=0.0702507458627224\n", - "Surface training t=14548, loss=0.04406085889786482\n", - "Surface training t=14549, loss=0.04176292475312948\n", - "Surface training t=14550, loss=0.048207519575953484\n", - "Surface training t=14551, loss=0.026883866637945175\n", - "Surface training t=14552, loss=0.03271685168147087\n", - "Surface training t=14553, loss=0.03039118554443121\n", - "Surface training t=14554, loss=0.020520287565886974\n", - "Surface training t=14555, loss=0.025708685629069805\n", - "Surface training t=14556, loss=0.03413883689790964\n", - "Surface training t=14557, loss=0.027001998387277126\n", - "Surface training t=14558, loss=0.023057020734995604\n", - "Surface training t=14559, loss=0.05650327354669571\n", - "Surface training t=14560, loss=0.03901625890284777\n", - "Surface training t=14561, loss=0.04889889806509018\n", - "Surface training t=14562, loss=0.038545302115380764\n", - "Surface training t=14563, loss=0.07054547965526581\n", - "Surface training t=14564, loss=0.04422052949666977\n", - "Surface training t=14565, loss=0.043371979147195816\n", - "Surface training t=14566, loss=0.03209813218563795\n", - "Surface training t=14567, loss=0.01973497960716486\n", - "Surface training t=14568, loss=0.03646285645663738\n", - "Surface training t=14569, loss=0.026312327943742275\n", - "Surface training t=14570, loss=0.031505304388701916\n", - "Surface training t=14571, loss=0.03674857318401337\n", - "Surface training t=14572, loss=0.026914574205875397\n", - "Surface training t=14573, loss=0.026312263682484627\n", - "Surface training t=14574, loss=0.03876025602221489\n", - "Surface training t=14575, loss=0.03402486443519592\n", - "Surface training t=14576, loss=0.03505958616733551\n", - "Surface training t=14577, loss=0.030448351986706257\n", - "Surface training t=14578, loss=0.02798634674400091\n", - "Surface training t=14579, loss=0.03280625492334366\n", - "Surface training t=14580, loss=0.0363532193005085\n", - "Surface training t=14581, loss=0.031180025078356266\n", - "Surface training t=14582, loss=0.03216549567878246\n", - "Surface training t=14583, loss=0.028763309121131897\n", - "Surface training t=14584, loss=0.030585838481783867\n", - "Surface training t=14585, loss=0.0346829928457737\n", - "Surface training t=14586, loss=0.03128585126250982\n", - "Surface training t=14587, loss=0.04311365634202957\n", - "Surface training t=14588, loss=0.04062799643725157\n", - "Surface training t=14589, loss=0.04325109161436558\n", - "Surface training t=14590, loss=0.043051473796367645\n", - "Surface training t=14591, loss=0.044292956590652466\n", - "Surface training t=14592, loss=0.04264703020453453\n", - "Surface training t=14593, loss=0.03858240507543087\n", - "Surface training t=14594, loss=0.03576001152396202\n", - "Surface training t=14595, loss=0.05923020839691162\n", - "Surface training t=14596, loss=0.03706434741616249\n", - "Surface training t=14597, loss=0.041552841663360596\n", - "Surface training t=14598, loss=0.0428941547870636\n", - "Surface training t=14599, loss=0.03254920057952404\n", - "Surface training t=14600, loss=0.040896205231547356\n", - "Surface training t=14601, loss=0.03387158829718828\n", - "Surface training t=14602, loss=0.03261195681989193\n", - "Surface training t=14603, loss=0.03216651640832424\n", - "Surface training t=14604, loss=0.03343428857624531\n", - "Surface training t=14605, loss=0.04082923009991646\n", - "Surface training t=14606, loss=0.039016611874103546\n", - "Surface training t=14607, loss=0.03423462063074112\n", - "Surface training t=14608, loss=0.022729715332388878\n", - "Surface training t=14609, loss=0.029512574896216393\n", - "Surface training t=14610, loss=0.02383282408118248\n", - "Surface training t=14611, loss=0.025753258727490902\n", - "Surface training t=14612, loss=0.022100877948105335\n", - "Surface training t=14613, loss=0.020688053220510483\n", - "Surface training t=14614, loss=0.031163454055786133\n", - "Surface training t=14615, loss=0.043468913063406944\n", - "Surface training t=14616, loss=0.03976544924080372\n", - "Surface training t=14617, loss=0.03969067335128784\n", - "Surface training t=14618, loss=0.038922784850001335\n", - "Surface training t=14619, loss=0.03531617857515812\n", - "Surface training t=14620, loss=0.039198155514895916\n", - "Surface training t=14621, loss=0.04432293772697449\n", - "Surface training t=14622, loss=0.046233417466282845\n", - "Surface training t=14623, loss=0.03115019015967846\n", - "Surface training t=14624, loss=0.038441259413957596\n", - "Surface training t=14625, loss=0.030296615324914455\n", - "Surface training t=14626, loss=0.02605722937732935\n", - "Surface training t=14627, loss=0.029321451671421528\n", - "Surface training t=14628, loss=0.0388207733631134\n", - "Surface training t=14629, loss=0.03453186899423599\n", - "Surface training t=14630, loss=0.02530717197805643\n", - "Surface training t=14631, loss=0.02877061255276203\n", - "Surface training t=14632, loss=0.027729149907827377\n", - "Surface training t=14633, loss=0.025210097432136536\n", - "Surface training t=14634, loss=0.035908980295062065\n", - "Surface training t=14635, loss=0.03154575917869806\n", - "Surface training t=14636, loss=0.044033026322722435\n", - "Surface training t=14637, loss=0.04752436280250549\n", - "Surface training t=14638, loss=0.029444048181176186\n", - "Surface training t=14639, loss=0.033436316065490246\n", - "Surface training t=14640, loss=0.029069450683891773\n", - "Surface training t=14641, loss=0.02082943171262741\n", - "Surface training t=14642, loss=0.022585125640034676\n", - "Surface training t=14643, loss=0.024985208176076412\n", - "Surface training t=14644, loss=0.02236458659172058\n", - "Surface training t=14645, loss=0.021095208823680878\n", - "Surface training t=14646, loss=0.023037821054458618\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=14647, loss=0.030790502205491066\n", - "Surface training t=14648, loss=0.030235628597438335\n", - "Surface training t=14649, loss=0.035025062039494514\n", - "Surface training t=14650, loss=0.04224836453795433\n", - "Surface training t=14651, loss=0.037880903109908104\n", - "Surface training t=14652, loss=0.03134852182120085\n", - "Surface training t=14653, loss=0.03216871619224548\n", - "Surface training t=14654, loss=0.0355074517428875\n", - "Surface training t=14655, loss=0.05457836017012596\n", - "Surface training t=14656, loss=0.06318793073296547\n", - "Surface training t=14657, loss=0.049047477543354034\n", - "Surface training t=14658, loss=0.07264153100550175\n", - "Surface training t=14659, loss=0.043684015050530434\n", - "Surface training t=14660, loss=0.03937288839370012\n", - "Surface training t=14661, loss=0.03968206886202097\n", - "Surface training t=14662, loss=0.027336928993463516\n", - "Surface training t=14663, loss=0.026614677160978317\n", - "Surface training t=14664, loss=0.02583096269518137\n", - "Surface training t=14665, loss=0.0270165977999568\n", - "Surface training t=14666, loss=0.02949741017073393\n", - "Surface training t=14667, loss=0.03295387513935566\n", - "Surface training t=14668, loss=0.03174677211791277\n", - "Surface training t=14669, loss=0.024831589311361313\n", - "Surface training t=14670, loss=0.030772159807384014\n", - "Surface training t=14671, loss=0.030150648206472397\n", - "Surface training t=14672, loss=0.03017403930425644\n", - "Surface training t=14673, loss=0.022388567216694355\n", - "Surface training t=14674, loss=0.02405296266078949\n", - "Surface training t=14675, loss=0.02085775788873434\n", - "Surface training t=14676, loss=0.029062965884804726\n", - "Surface training t=14677, loss=0.03765677474439144\n", - "Surface training t=14678, loss=0.03009692393243313\n", - "Surface training t=14679, loss=0.047008002176880836\n", - "Surface training t=14680, loss=0.036413442343473434\n", - "Surface training t=14681, loss=0.03860749676823616\n", - "Surface training t=14682, loss=0.032888004556298256\n", - "Surface training t=14683, loss=0.03560022357851267\n", - "Surface training t=14684, loss=0.039425572380423546\n", - "Surface training t=14685, loss=0.040300311520695686\n", - "Surface training t=14686, loss=0.031003671698272228\n", - "Surface training t=14687, loss=0.029408561065793037\n", - "Surface training t=14688, loss=0.026715420186519623\n", - "Surface training t=14689, loss=0.032217228785157204\n", - "Surface training t=14690, loss=0.04425251483917236\n", - "Surface training t=14691, loss=0.028979790396988392\n", - "Surface training t=14692, loss=0.02002234337851405\n", - "Surface training t=14693, loss=0.01899200165644288\n", - "Surface training t=14694, loss=0.017029283568263054\n", - "Surface training t=14695, loss=0.017360825091600418\n", - "Surface training t=14696, loss=0.02491142973303795\n", - "Surface training t=14697, loss=0.01901335036382079\n", - "Surface training t=14698, loss=0.02180231362581253\n", - "Surface training t=14699, loss=0.017870577052235603\n", - "Surface training t=14700, loss=0.023037197068333626\n", - "Surface training t=14701, loss=0.021835094317793846\n", - "Surface training t=14702, loss=0.02774345502257347\n", - "Surface training t=14703, loss=0.024554288014769554\n", - "Surface training t=14704, loss=0.02858489193022251\n", - "Surface training t=14705, loss=0.029411046765744686\n", - "Surface training t=14706, loss=0.03146505355834961\n", - "Surface training t=14707, loss=0.038954295217990875\n", - "Surface training t=14708, loss=0.03797786869108677\n", - "Surface training t=14709, loss=0.04058322124183178\n", - "Surface training t=14710, loss=0.03413465712219477\n", - "Surface training t=14711, loss=0.026638170704245567\n", - "Surface training t=14712, loss=0.022735940292477608\n", - "Surface training t=14713, loss=0.03054061532020569\n", - "Surface training t=14714, loss=0.024008406326174736\n", - "Surface training t=14715, loss=0.02375233918428421\n", - "Surface training t=14716, loss=0.027510536834597588\n", - "Surface training t=14717, loss=0.022900204174220562\n", - "Surface training t=14718, loss=0.019546416588127613\n", - "Surface training t=14719, loss=0.021224599331617355\n", - "Surface training t=14720, loss=0.02486582938581705\n", - "Surface training t=14721, loss=0.034919507801532745\n", - "Surface training t=14722, loss=0.04531195014715195\n", - "Surface training t=14723, loss=0.04703045729547739\n", - "Surface training t=14724, loss=0.0415448434650898\n", - "Surface training t=14725, loss=0.04278997704386711\n", - "Surface training t=14726, loss=0.03605066239833832\n", - "Surface training t=14727, loss=0.027237174101173878\n", - "Surface training t=14728, loss=0.031683717854321\n", - "Surface training t=14729, loss=0.029679981991648674\n", - "Surface training t=14730, loss=0.03612198680639267\n", - "Surface training t=14731, loss=0.037239582277834415\n", - "Surface training t=14732, loss=0.03319677896797657\n", - "Surface training t=14733, loss=0.031959060579538345\n", - "Surface training t=14734, loss=0.03383590281009674\n", - "Surface training t=14735, loss=0.03275960311293602\n", - "Surface training t=14736, loss=0.05081087723374367\n", - "Surface training t=14737, loss=0.0373365031555295\n", - "Surface training t=14738, loss=0.045803558081388474\n", - "Surface training t=14739, loss=0.03139459155499935\n", - "Surface training t=14740, loss=0.034484583884477615\n", - "Surface training t=14741, loss=0.03156307153403759\n", - "Surface training t=14742, loss=0.029448915272951126\n", - "Surface training t=14743, loss=0.029547596350312233\n", - "Surface training t=14744, loss=0.03971519693732262\n", - "Surface training t=14745, loss=0.03553934581577778\n", - "Surface training t=14746, loss=0.05420858971774578\n", - "Surface training t=14747, loss=0.028970837593078613\n", - "Surface training t=14748, loss=0.036434208042919636\n", - "Surface training t=14749, loss=0.04098181053996086\n", - "Surface training t=14750, loss=0.03720193635672331\n", - "Surface training t=14751, loss=0.03758283704519272\n", - "Surface training t=14752, loss=0.028045176528394222\n", - "Surface training t=14753, loss=0.04377066157758236\n", - "Surface training t=14754, loss=0.03980712220072746\n", - "Surface training t=14755, loss=0.031235179863870144\n", - "Surface training t=14756, loss=0.033427963964641094\n", - "Surface training t=14757, loss=0.03697965852916241\n", - "Surface training t=14758, loss=0.04677066393196583\n", - "Surface training t=14759, loss=0.03896172158420086\n", - "Surface training t=14760, loss=0.02833027858287096\n", - "Surface training t=14761, loss=0.030347044579684734\n", - "Surface training t=14762, loss=0.025543554686009884\n", - "Surface training t=14763, loss=0.02868015132844448\n", - "Surface training t=14764, loss=0.03015940450131893\n", - "Surface training t=14765, loss=0.0282584922388196\n", - "Surface training t=14766, loss=0.028454345650970936\n", - "Surface training t=14767, loss=0.027065057307481766\n", - "Surface training t=14768, loss=0.031190217472612858\n", - "Surface training t=14769, loss=0.031790840439498425\n", - "Surface training t=14770, loss=0.028010668233036995\n", - "Surface training t=14771, loss=0.025015609338879585\n", - "Surface training t=14772, loss=0.017526039853692055\n", - "Surface training t=14773, loss=0.018713164143264294\n", - "Surface training t=14774, loss=0.021724597550928593\n", - "Surface training t=14775, loss=0.024292951449751854\n", - "Surface training t=14776, loss=0.01885318197309971\n", - "Surface training t=14777, loss=0.02075230050832033\n", - "Surface training t=14778, loss=0.022893519140779972\n", - "Surface training t=14779, loss=0.025885568000376225\n", - "Surface training t=14780, loss=0.027670307084918022\n", - "Surface training t=14781, loss=0.020160396583378315\n", - "Surface training t=14782, loss=0.027111073955893517\n", - "Surface training t=14783, loss=0.02764908503741026\n", - "Surface training t=14784, loss=0.032979000359773636\n", - "Surface training t=14785, loss=0.025374198332428932\n", - "Surface training t=14786, loss=0.02324309851974249\n", - "Surface training t=14787, loss=0.019592080265283585\n", - "Surface training t=14788, loss=0.021181391552090645\n", - "Surface training t=14789, loss=0.024914919398725033\n", - "Surface training t=14790, loss=0.029047873802483082\n", - "Surface training t=14791, loss=0.020766916684806347\n", - "Surface training t=14792, loss=0.022528184577822685\n", - "Surface training t=14793, loss=0.017069464549422264\n", - "Surface training t=14794, loss=0.0207609455101192\n", - "Surface training t=14795, loss=0.02234857715666294\n", - "Surface training t=14796, loss=0.02326724585145712\n", - "Surface training t=14797, loss=0.026257596909999847\n", - "Surface training t=14798, loss=0.03443365916609764\n", - "Surface training t=14799, loss=0.03334043733775616\n", - "Surface training t=14800, loss=0.026318836957216263\n", - "Surface training t=14801, loss=0.022576837800443172\n", - "Surface training t=14802, loss=0.019845161586999893\n", - "Surface training t=14803, loss=0.02001618780195713\n", - "Surface training t=14804, loss=0.02692379616200924\n", - "Surface training t=14805, loss=0.024166779592633247\n", - "Surface training t=14806, loss=0.022196709644049406\n", - "Surface training t=14807, loss=0.04143448360264301\n", - "Surface training t=14808, loss=0.030840329825878143\n", - "Surface training t=14809, loss=0.044484635815024376\n", - "Surface training t=14810, loss=0.03708674293011427\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=14811, loss=0.03461820725351572\n", - "Surface training t=14812, loss=0.0714222714304924\n", - "Surface training t=14813, loss=0.051839519292116165\n", - "Surface training t=14814, loss=0.07042700238525867\n", - "Surface training t=14815, loss=0.05704209767282009\n", - "Surface training t=14816, loss=0.050358790904283524\n", - "Surface training t=14817, loss=0.07651129737496376\n", - "Surface training t=14818, loss=0.04312440566718578\n", - "Surface training t=14819, loss=0.060027822852134705\n", - "Surface training t=14820, loss=0.04641519859433174\n", - "Surface training t=14821, loss=0.060072725638747215\n", - "Surface training t=14822, loss=0.05240422673523426\n", - "Surface training t=14823, loss=0.05202162079513073\n", - "Surface training t=14824, loss=0.03927704505622387\n", - "Surface training t=14825, loss=0.044966939836740494\n", - "Surface training t=14826, loss=0.04414377175271511\n", - "Surface training t=14827, loss=0.03893780708312988\n", - "Surface training t=14828, loss=0.05810593254864216\n", - "Surface training t=14829, loss=0.04475197568535805\n", - "Surface training t=14830, loss=0.04094741307199001\n", - "Surface training t=14831, loss=0.05159301869571209\n", - "Surface training t=14832, loss=0.06393280811607838\n", - "Surface training t=14833, loss=0.04481532610952854\n", - "Surface training t=14834, loss=0.04728267714381218\n", - "Surface training t=14835, loss=0.06111092120409012\n", - "Surface training t=14836, loss=0.04479987360537052\n", - "Surface training t=14837, loss=0.03720259107649326\n", - "Surface training t=14838, loss=0.029525652527809143\n", - "Surface training t=14839, loss=0.034794121980667114\n", - "Surface training t=14840, loss=0.03038083389401436\n", - "Surface training t=14841, loss=0.02216207655146718\n", - "Surface training t=14842, loss=0.0222352659329772\n", - "Surface training t=14843, loss=0.025169952772557735\n", - "Surface training t=14844, loss=0.02540847286581993\n", - "Surface training t=14845, loss=0.022720327600836754\n", - "Surface training t=14846, loss=0.02928626537322998\n", - "Surface training t=14847, loss=0.025493310764431953\n", - "Surface training t=14848, loss=0.020718250423669815\n", - "Surface training t=14849, loss=0.026504913344979286\n", - "Surface training t=14850, loss=0.027267961762845516\n", - "Surface training t=14851, loss=0.04407843388617039\n", - "Surface training t=14852, loss=0.022735510021448135\n", - "Surface training t=14853, loss=0.026456650346517563\n", - "Surface training t=14854, loss=0.03276762552559376\n", - "Surface training t=14855, loss=0.031022622250020504\n", - "Surface training t=14856, loss=0.02696584351360798\n", - "Surface training t=14857, loss=0.05499829351902008\n", - "Surface training t=14858, loss=0.04908584617078304\n", - "Surface training t=14859, loss=0.03934415057301521\n", - "Surface training t=14860, loss=0.033466508612036705\n", - "Surface training t=14861, loss=0.03024081140756607\n", - "Surface training t=14862, loss=0.029530229978263378\n", - "Surface training t=14863, loss=0.047477228567004204\n", - "Surface training t=14864, loss=0.05001882463693619\n", - "Surface training t=14865, loss=0.04491589684039354\n", - "Surface training t=14866, loss=0.04784647561609745\n", - "Surface training t=14867, loss=0.05440368130803108\n", - "Surface training t=14868, loss=0.046669915318489075\n", - "Surface training t=14869, loss=0.038807233795523643\n", - "Surface training t=14870, loss=0.034582775086164474\n", - "Surface training t=14871, loss=0.042741917073726654\n", - "Surface training t=14872, loss=0.029893139377236366\n", - "Surface training t=14873, loss=0.039271388202905655\n", - "Surface training t=14874, loss=0.030129238963127136\n", - "Surface training t=14875, loss=0.028353605419397354\n", - "Surface training t=14876, loss=0.031021030619740486\n", - "Surface training t=14877, loss=0.02644565049558878\n", - "Surface training t=14878, loss=0.027708424255251884\n", - "Surface training t=14879, loss=0.021678715012967587\n", - "Surface training t=14880, loss=0.02133149653673172\n", - "Surface training t=14881, loss=0.016151142306625843\n", - "Surface training t=14882, loss=0.024267311207950115\n", - "Surface training t=14883, loss=0.0237449137493968\n", - "Surface training t=14884, loss=0.029681353829801083\n", - "Surface training t=14885, loss=0.036926064640283585\n", - "Surface training t=14886, loss=0.03759586811065674\n", - "Surface training t=14887, loss=0.037416426464915276\n", - "Surface training t=14888, loss=0.03517606668174267\n", - "Surface training t=14889, loss=0.03646175004541874\n", - "Surface training t=14890, loss=0.03373667877167463\n", - "Surface training t=14891, loss=0.02578025683760643\n", - "Surface training t=14892, loss=0.031856922432780266\n", - "Surface training t=14893, loss=0.03257541637867689\n", - "Surface training t=14894, loss=0.032198451459407806\n", - "Surface training t=14895, loss=0.037250662222504616\n", - "Surface training t=14896, loss=0.029604660347104073\n", - "Surface training t=14897, loss=0.048388609662652016\n", - "Surface training t=14898, loss=0.03771193511784077\n", - "Surface training t=14899, loss=0.03756230976432562\n", - "Surface training t=14900, loss=0.044977810233831406\n", - "Surface training t=14901, loss=0.04035212658345699\n", - "Surface training t=14902, loss=0.03862352296710014\n", - "Surface training t=14903, loss=0.03472587279975414\n", - "Surface training t=14904, loss=0.03791241720318794\n", - "Surface training t=14905, loss=0.0438633281737566\n", - "Surface training t=14906, loss=0.04833095520734787\n", - "Surface training t=14907, loss=0.05820138566195965\n", - "Surface training t=14908, loss=0.04235684685409069\n", - "Surface training t=14909, loss=0.03537747077643871\n", - "Surface training t=14910, loss=0.030656966380774975\n", - "Surface training t=14911, loss=0.022597920149564743\n", - "Surface training t=14912, loss=0.026238396763801575\n", - "Surface training t=14913, loss=0.026418662630021572\n", - "Surface training t=14914, loss=0.029482011683285236\n", - "Surface training t=14915, loss=0.027490461245179176\n", - "Surface training t=14916, loss=0.024658330716192722\n", - "Surface training t=14917, loss=0.023109283298254013\n", - "Surface training t=14918, loss=0.03708342835307121\n", - "Surface training t=14919, loss=0.02940687444061041\n", - "Surface training t=14920, loss=0.02795390971004963\n", - "Surface training t=14921, loss=0.024564845487475395\n", - "Surface training t=14922, loss=0.02080015279352665\n", - "Surface training t=14923, loss=0.020203428342938423\n", - "Surface training t=14924, loss=0.02398399729281664\n", - "Surface training t=14925, loss=0.032088056206703186\n", - "Surface training t=14926, loss=0.02939747367054224\n", - "Surface training t=14927, loss=0.0240129129961133\n", - "Surface training t=14928, loss=0.0209078686311841\n", - "Surface training t=14929, loss=0.019591777585446835\n", - "Surface training t=14930, loss=0.021213104017078876\n", - "Surface training t=14931, loss=0.02247515320777893\n", - "Surface training t=14932, loss=0.024536553770303726\n", - "Surface training t=14933, loss=0.022672190330922604\n", - "Surface training t=14934, loss=0.01628392841666937\n", - "Surface training t=14935, loss=0.02616934757679701\n", - "Surface training t=14936, loss=0.03901160508394241\n", - "Surface training t=14937, loss=0.04927893355488777\n", - "Surface training t=14938, loss=0.03357283677905798\n", - "Surface training t=14939, loss=0.03038155660033226\n", - "Surface training t=14940, loss=0.03120441921055317\n", - "Surface training t=14941, loss=0.03806396760046482\n", - "Surface training t=14942, loss=0.03839998412877321\n", - "Surface training t=14943, loss=0.028905510902404785\n", - "Surface training t=14944, loss=0.02982563991099596\n", - "Surface training t=14945, loss=0.04795660637319088\n", - "Surface training t=14946, loss=0.06529025733470917\n", - "Surface training t=14947, loss=0.05051240138709545\n", - "Surface training t=14948, loss=0.06377586163580418\n", - "Surface training t=14949, loss=0.05951935984194279\n", - "Surface training t=14950, loss=0.060124997049570084\n", - "Surface training t=14951, loss=0.04706851579248905\n", - "Surface training t=14952, loss=0.049412135034799576\n", - "Surface training t=14953, loss=0.045802166685462\n", - "Surface training t=14954, loss=0.04013565741479397\n", - "Surface training t=14955, loss=0.051699692383408546\n", - "Surface training t=14956, loss=0.04192976653575897\n", - "Surface training t=14957, loss=0.035728223621845245\n", - "Surface training t=14958, loss=0.033735780976712704\n", - "Surface training t=14959, loss=0.03273976594209671\n", - "Surface training t=14960, loss=0.027272884733974934\n", - "Surface training t=14961, loss=0.02219737507402897\n", - "Surface training t=14962, loss=0.019547978416085243\n", - "Surface training t=14963, loss=0.0231284461915493\n", - "Surface training t=14964, loss=0.024099870584905148\n", - "Surface training t=14965, loss=0.028719897381961346\n", - "Surface training t=14966, loss=0.026288467459380627\n", - "Surface training t=14967, loss=0.024433079175651073\n", - "Surface training t=14968, loss=0.020517664030194283\n", - "Surface training t=14969, loss=0.02209391538053751\n", - "Surface training t=14970, loss=0.03798668086528778\n", - "Surface training t=14971, loss=0.03527834080159664\n", - "Surface training t=14972, loss=0.03267460409551859\n", - "Surface training t=14973, loss=0.03992060758173466\n", - "Surface training t=14974, loss=0.035689364187419415\n", - "Surface training t=14975, loss=0.04112738184630871\n", - "Surface training t=14976, loss=0.04470241069793701\n", - "Surface training t=14977, loss=0.05426301434636116\n", - "Surface training t=14978, loss=0.0325406389310956\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=14979, loss=0.030154483392834663\n", - "Surface training t=14980, loss=0.038306355476379395\n", - "Surface training t=14981, loss=0.03475421015173197\n", - "Surface training t=14982, loss=0.03340405412018299\n", - "Surface training t=14983, loss=0.029585043899714947\n", - "Surface training t=14984, loss=0.036237264052033424\n", - "Surface training t=14985, loss=0.043970199301838875\n", - "Surface training t=14986, loss=0.03379361890256405\n", - "Surface training t=14987, loss=0.058759186416864395\n", - "Surface training t=14988, loss=0.032475823536515236\n", - "Surface training t=14989, loss=0.03688844479620457\n", - "Surface training t=14990, loss=0.03402273915708065\n", - "Surface training t=14991, loss=0.037831103429198265\n", - "Surface training t=14992, loss=0.03988667204976082\n", - "Surface training t=14993, loss=0.03787502646446228\n", - "Surface training t=14994, loss=0.037396226078271866\n", - "Surface training t=14995, loss=0.027577885426580906\n", - "Surface training t=14996, loss=0.024708544835448265\n", - "Surface training t=14997, loss=0.030634109862148762\n", - "Surface training t=14998, loss=0.02557909395545721\n", - "Surface training t=14999, loss=0.028318684548139572\n", - "Surface training t=15000, loss=0.035872168838977814\n", - "Surface training t=15001, loss=0.021843967959284782\n", - "Surface training t=15002, loss=0.020537002943456173\n", - "Surface training t=15003, loss=0.028834285214543343\n", - "Surface training t=15004, loss=0.027310844510793686\n", - "Surface training t=15005, loss=0.03763979580253363\n", - "Surface training t=15006, loss=0.03889341000467539\n", - "Surface training t=15007, loss=0.038733034394681454\n", - "Surface training t=15008, loss=0.03505484201014042\n", - "Surface training t=15009, loss=0.029126303270459175\n", - "Surface training t=15010, loss=0.024851874448359013\n", - "Surface training t=15011, loss=0.02406773716211319\n", - "Surface training t=15012, loss=0.025246864184737206\n", - "Surface training t=15013, loss=0.025350390002131462\n", - "Surface training t=15014, loss=0.022890202701091766\n", - "Surface training t=15015, loss=0.026039733551442623\n", - "Surface training t=15016, loss=0.028854187577962875\n", - "Surface training t=15017, loss=0.030506672337651253\n", - "Surface training t=15018, loss=0.031903572380542755\n", - "Surface training t=15019, loss=0.030222252942621708\n", - "Surface training t=15020, loss=0.02879152912646532\n", - "Surface training t=15021, loss=0.022390280850231647\n", - "Surface training t=15022, loss=0.029756231233477592\n", - "Surface training t=15023, loss=0.025926836766302586\n", - "Surface training t=15024, loss=0.021105157677084208\n", - "Surface training t=15025, loss=0.02641029842197895\n", - "Surface training t=15026, loss=0.02876809611916542\n", - "Surface training t=15027, loss=0.026555745862424374\n", - "Surface training t=15028, loss=0.031376615166664124\n", - "Surface training t=15029, loss=0.028607498854398727\n", - "Surface training t=15030, loss=0.03642373904585838\n", - "Surface training t=15031, loss=0.04230774752795696\n", - "Surface training t=15032, loss=0.0354207344353199\n", - "Surface training t=15033, loss=0.042456965893507004\n", - "Surface training t=15034, loss=0.037848230451345444\n", - "Surface training t=15035, loss=0.02906200662255287\n", - "Surface training t=15036, loss=0.029597804881632328\n", - "Surface training t=15037, loss=0.02652245294302702\n", - "Surface training t=15038, loss=0.024859524331986904\n", - "Surface training t=15039, loss=0.02321698982268572\n", - "Surface training t=15040, loss=0.017046797089278698\n", - "Surface training t=15041, loss=0.019387600012123585\n", - "Surface training t=15042, loss=0.022272989153862\n", - "Surface training t=15043, loss=0.01897173747420311\n", - "Surface training t=15044, loss=0.026109798811376095\n", - "Surface training t=15045, loss=0.01792672462761402\n", - "Surface training t=15046, loss=0.021791782695800066\n", - "Surface training t=15047, loss=0.035009680315852165\n", - "Surface training t=15048, loss=0.028726722113788128\n", - "Surface training t=15049, loss=0.04067746177315712\n", - "Surface training t=15050, loss=0.0571853406727314\n", - "Surface training t=15051, loss=0.03335770592093468\n", - "Surface training t=15052, loss=0.03090020827949047\n", - "Surface training t=15053, loss=0.03840817138552666\n", - "Surface training t=15054, loss=0.04157673195004463\n", - "Surface training t=15055, loss=0.03095510322600603\n", - "Surface training t=15056, loss=0.0467566829174757\n", - "Surface training t=15057, loss=0.034106602892279625\n", - "Surface training t=15058, loss=0.037356168031692505\n", - "Surface training t=15059, loss=0.032756232656538486\n", - "Surface training t=15060, loss=0.03270841855555773\n", - "Surface training t=15061, loss=0.031180775724351406\n", - "Surface training t=15062, loss=0.03857925906777382\n", - "Surface training t=15063, loss=0.043291373178362846\n", - "Surface training t=15064, loss=0.03936300054192543\n", - "Surface training t=15065, loss=0.023632358759641647\n", - "Surface training t=15066, loss=0.02654614206403494\n", - "Surface training t=15067, loss=0.03094937466084957\n", - "Surface training t=15068, loss=0.04275118000805378\n", - "Surface training t=15069, loss=0.04324040561914444\n", - "Surface training t=15070, loss=0.03128260839730501\n", - "Surface training t=15071, loss=0.035753706470131874\n", - "Surface training t=15072, loss=0.03572102636098862\n", - "Surface training t=15073, loss=0.03953784145414829\n", - "Surface training t=15074, loss=0.03190483897924423\n", - "Surface training t=15075, loss=0.023926792666316032\n", - "Surface training t=15076, loss=0.025533176958560944\n", - "Surface training t=15077, loss=0.02758217044174671\n", - "Surface training t=15078, loss=0.020210150629281998\n", - "Surface training t=15079, loss=0.022624392993748188\n", - "Surface training t=15080, loss=0.026112692430615425\n", - "Surface training t=15081, loss=0.023042311891913414\n", - "Surface training t=15082, loss=0.023256690241396427\n", - "Surface training t=15083, loss=0.03921345993876457\n", - "Surface training t=15084, loss=0.03160989470779896\n", - "Surface training t=15085, loss=0.038107577711343765\n", - "Surface training t=15086, loss=0.04736359976232052\n", - "Surface training t=15087, loss=0.04325043223798275\n", - "Surface training t=15088, loss=0.03668496757745743\n", - "Surface training t=15089, loss=0.03139086626470089\n", - "Surface training t=15090, loss=0.03127029538154602\n", - "Surface training t=15091, loss=0.03858558461070061\n", - "Surface training t=15092, loss=0.04557598754763603\n", - "Surface training t=15093, loss=0.038238679990172386\n", - "Surface training t=15094, loss=0.04293639771640301\n", - "Surface training t=15095, loss=0.036249094642698765\n", - "Surface training t=15096, loss=0.03465061914175749\n", - "Surface training t=15097, loss=0.04929704591631889\n", - "Surface training t=15098, loss=0.0317152738571167\n", - "Surface training t=15099, loss=0.03229132667183876\n", - "Surface training t=15100, loss=0.023047572001814842\n", - "Surface training t=15101, loss=0.03435772471129894\n", - "Surface training t=15102, loss=0.020357532426714897\n", - "Surface training t=15103, loss=0.023405645042657852\n", - "Surface training t=15104, loss=0.022531109862029552\n", - "Surface training t=15105, loss=0.024169535376131535\n", - "Surface training t=15106, loss=0.025872550904750824\n", - "Surface training t=15107, loss=0.02155851572751999\n", - "Surface training t=15108, loss=0.028785265050828457\n", - "Surface training t=15109, loss=0.026066862046718597\n", - "Surface training t=15110, loss=0.036949075758457184\n", - "Surface training t=15111, loss=0.04245085082948208\n", - "Surface training t=15112, loss=0.03791217878460884\n", - "Surface training t=15113, loss=0.0314197214320302\n", - "Surface training t=15114, loss=0.03676407225430012\n", - "Surface training t=15115, loss=0.024505620822310448\n", - "Surface training t=15116, loss=0.030171263962984085\n", - "Surface training t=15117, loss=0.03244854137301445\n", - "Surface training t=15118, loss=0.0314783938229084\n", - "Surface training t=15119, loss=0.029688320122659206\n", - "Surface training t=15120, loss=0.037624698132276535\n", - "Surface training t=15121, loss=0.03360296320170164\n", - "Surface training t=15122, loss=0.024783072993159294\n", - "Surface training t=15123, loss=0.02455913368612528\n", - "Surface training t=15124, loss=0.02298442553728819\n", - "Surface training t=15125, loss=0.03401149809360504\n", - "Surface training t=15126, loss=0.029489918611943722\n", - "Surface training t=15127, loss=0.0231455834582448\n", - "Surface training t=15128, loss=0.04484408348798752\n", - "Surface training t=15129, loss=0.04525492712855339\n", - "Surface training t=15130, loss=0.02879390586167574\n", - "Surface training t=15131, loss=0.03544835653156042\n", - "Surface training t=15132, loss=0.03576475288718939\n", - "Surface training t=15133, loss=0.03383990749716759\n", - "Surface training t=15134, loss=0.03099407535046339\n", - "Surface training t=15135, loss=0.03017563559114933\n", - "Surface training t=15136, loss=0.02414026390761137\n", - "Surface training t=15137, loss=0.021268337033689022\n", - "Surface training t=15138, loss=0.025412194430828094\n", - "Surface training t=15139, loss=0.020321286283433437\n", - "Surface training t=15140, loss=0.020250589586794376\n", - "Surface training t=15141, loss=0.01946160662919283\n", - "Surface training t=15142, loss=0.022019676864147186\n", - "Surface training t=15143, loss=0.027998490259051323\n", - "Surface training t=15144, loss=0.021237329579889774\n", - "Surface training t=15145, loss=0.026991084218025208\n", - "Surface training t=15146, loss=0.029815114103257656\n", - "Surface training t=15147, loss=0.036661023274064064\n", - "Surface training t=15148, loss=0.03510045353323221\n", - "Surface training t=15149, loss=0.03552866820245981\n", - "Surface training t=15150, loss=0.033400509506464005\n", - "Surface training t=15151, loss=0.031026553362607956\n", - "Surface training t=15152, loss=0.036613866686820984\n", - "Surface training t=15153, loss=0.02529510110616684\n", - "Surface training t=15154, loss=0.03137247357517481\n", - "Surface training t=15155, loss=0.025029560551047325\n", - "Surface training t=15156, loss=0.034587765112519264\n", - "Surface training t=15157, loss=0.02968159131705761\n", - "Surface training t=15158, loss=0.02357185073196888\n", - "Surface training t=15159, loss=0.02335439156740904\n", - "Surface training t=15160, loss=0.029448282904922962\n", - "Surface training t=15161, loss=0.022786548361182213\n", - "Surface training t=15162, loss=0.025141184218227863\n", - "Surface training t=15163, loss=0.03565248288214207\n", - "Surface training t=15164, loss=0.04121347889304161\n", - "Surface training t=15165, loss=0.024807706475257874\n", - "Surface training t=15166, loss=0.03632932901382446\n", - "Surface training t=15167, loss=0.023955239914357662\n", - "Surface training t=15168, loss=0.03843092918395996\n", - "Surface training t=15169, loss=0.0333851370960474\n", - "Surface training t=15170, loss=0.02741834055632353\n", - "Surface training t=15171, loss=0.03315651975572109\n", - "Surface training t=15172, loss=0.048325708135962486\n", - "Surface training t=15173, loss=0.0323941595852375\n", - "Surface training t=15174, loss=0.04125814884901047\n", - "Surface training t=15175, loss=0.04856242425739765\n", - "Surface training t=15176, loss=0.05409959889948368\n", - "Surface training t=15177, loss=0.044672444462776184\n", - "Surface training t=15178, loss=0.03969264030456543\n", - "Surface training t=15179, loss=0.03425964340567589\n", - "Surface training t=15180, loss=0.037174616008996964\n", - "Surface training t=15181, loss=0.03571275994181633\n", - "Surface training t=15182, loss=0.03779183514416218\n", - "Surface training t=15183, loss=0.030110825784504414\n", - "Surface training t=15184, loss=0.02526071108877659\n", - "Surface training t=15185, loss=0.02471269480884075\n", - "Surface training t=15186, loss=0.019655758515000343\n", - "Surface training t=15187, loss=0.020947673358023167\n", - "Surface training t=15188, loss=0.025733912363648415\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=15189, loss=0.04071275144815445\n", - "Surface training t=15190, loss=0.03455535601824522\n", - "Surface training t=15191, loss=0.03663475066423416\n", - "Surface training t=15192, loss=0.03549319785088301\n", - "Surface training t=15193, loss=0.02710307016968727\n", - "Surface training t=15194, loss=0.037617145106196404\n", - "Surface training t=15195, loss=0.04118681885302067\n", - "Surface training t=15196, loss=0.03838116489350796\n", - "Surface training t=15197, loss=0.051619360223412514\n", - "Surface training t=15198, loss=0.029988822527229786\n", - "Surface training t=15199, loss=0.039311593398451805\n", - "Surface training t=15200, loss=0.02894795872271061\n", - "Surface training t=15201, loss=0.02887051459401846\n", - "Surface training t=15202, loss=0.029929548501968384\n", - "Surface training t=15203, loss=0.022359635680913925\n", - "Surface training t=15204, loss=0.02519870176911354\n", - "Surface training t=15205, loss=0.03346828743815422\n", - "Surface training t=15206, loss=0.028022761456668377\n", - "Surface training t=15207, loss=0.027102001011371613\n", - "Surface training t=15208, loss=0.05546630918979645\n", - "Surface training t=15209, loss=0.03813370130956173\n", - "Surface training t=15210, loss=0.04593355022370815\n", - "Surface training t=15211, loss=0.04206651262938976\n", - "Surface training t=15212, loss=0.0433470644056797\n", - "Surface training t=15213, loss=0.0414297254756093\n", - "Surface training t=15214, loss=0.039000075310468674\n", - "Surface training t=15215, loss=0.04195425100624561\n", - "Surface training t=15216, loss=0.06019884906709194\n", - "Surface training t=15217, loss=0.039081012830138206\n", - "Surface training t=15218, loss=0.04934836830943823\n", - "Surface training t=15219, loss=0.04572309926152229\n", - "Surface training t=15220, loss=0.05005377158522606\n", - "Surface training t=15221, loss=0.03944585286080837\n", - "Surface training t=15222, loss=0.06808904372155666\n", - "Surface training t=15223, loss=0.039072586223483086\n", - "Surface training t=15224, loss=0.04163086786866188\n", - "Surface training t=15225, loss=0.03834014758467674\n", - "Surface training t=15226, loss=0.031206531450152397\n", - "Surface training t=15227, loss=0.041853344067931175\n", - "Surface training t=15228, loss=0.039036212489008904\n", - "Surface training t=15229, loss=0.029719128273427486\n", - "Surface training t=15230, loss=0.03454127907752991\n", - "Surface training t=15231, loss=0.028098193928599358\n", - "Surface training t=15232, loss=0.031557921320199966\n", - "Surface training t=15233, loss=0.051622333005070686\n", - "Surface training t=15234, loss=0.03959731571376324\n", - "Surface training t=15235, loss=0.042023804038763046\n", - "Surface training t=15236, loss=0.06575890630483627\n", - "Surface training t=15237, loss=0.046087417751550674\n", - "Surface training t=15238, loss=0.039937831461429596\n", - "Surface training t=15239, loss=0.03871818818151951\n", - "Surface training t=15240, loss=0.05489284172654152\n", - "Surface training t=15241, loss=0.046274442225694656\n", - "Surface training t=15242, loss=0.0393533892929554\n", - "Surface training t=15243, loss=0.04820125363767147\n", - "Surface training t=15244, loss=0.042128026485443115\n", - "Surface training t=15245, loss=0.031062286347150803\n", - "Surface training t=15246, loss=0.03387299180030823\n", - "Surface training t=15247, loss=0.0425557978451252\n", - "Surface training t=15248, loss=0.047223107889294624\n", - "Surface training t=15249, loss=0.03792184218764305\n", - "Surface training t=15250, loss=0.03543564211577177\n", - "Surface training t=15251, loss=0.02811131626367569\n", - "Surface training t=15252, loss=0.027047925628721714\n", - "Surface training t=15253, loss=0.027216549031436443\n", - "Surface training t=15254, loss=0.03343955706804991\n", - "Surface training t=15255, loss=0.031179186888039112\n", - "Surface training t=15256, loss=0.03171716816723347\n", - "Surface training t=15257, loss=0.03131342027336359\n", - "Surface training t=15258, loss=0.0294580589979887\n", - "Surface training t=15259, loss=0.02232828550040722\n", - "Surface training t=15260, loss=0.028821103274822235\n", - "Surface training t=15261, loss=0.034532505087554455\n", - "Surface training t=15262, loss=0.03942881524562836\n", - "Surface training t=15263, loss=0.031231829896569252\n", - "Surface training t=15264, loss=0.03210196923464537\n", - "Surface training t=15265, loss=0.02366002555936575\n", - "Surface training t=15266, loss=0.02381425816565752\n", - "Surface training t=15267, loss=0.02262010332196951\n", - "Surface training t=15268, loss=0.027956493198871613\n", - "Surface training t=15269, loss=0.022159294225275517\n", - "Surface training t=15270, loss=0.01981926243752241\n", - "Surface training t=15271, loss=0.021706247702240944\n", - "Surface training t=15272, loss=0.03033525589853525\n", - "Surface training t=15273, loss=0.04455200769007206\n", - "Surface training t=15274, loss=0.040818155743181705\n", - "Surface training t=15275, loss=0.051128849387168884\n", - "Surface training t=15276, loss=0.05040697939693928\n", - "Surface training t=15277, loss=0.047275424003601074\n", - "Surface training t=15278, loss=0.03766264766454697\n", - "Surface training t=15279, loss=0.047301555052399635\n", - "Surface training t=15280, loss=0.03742195200175047\n", - "Surface training t=15281, loss=0.048134759068489075\n", - "Surface training t=15282, loss=0.04551492538303137\n", - "Surface training t=15283, loss=0.06094200350344181\n", - "Surface training t=15284, loss=0.04433344304561615\n", - "Surface training t=15285, loss=0.06094921752810478\n", - "Surface training t=15286, loss=0.03937242738902569\n", - "Surface training t=15287, loss=0.045765455812215805\n", - "Surface training t=15288, loss=0.03392449487000704\n", - "Surface training t=15289, loss=0.0332789346575737\n", - "Surface training t=15290, loss=0.027786923572421074\n", - "Surface training t=15291, loss=0.035349106416106224\n", - "Surface training t=15292, loss=0.022494551725685596\n", - "Surface training t=15293, loss=0.017357012256979942\n", - "Surface training t=15294, loss=0.02249153610318899\n", - "Surface training t=15295, loss=0.02370939403772354\n", - "Surface training t=15296, loss=0.024958311580121517\n", - "Surface training t=15297, loss=0.022533100098371506\n", - "Surface training t=15298, loss=0.02431580889970064\n", - "Surface training t=15299, loss=0.03136770613491535\n", - "Surface training t=15300, loss=0.03160075657069683\n", - "Surface training t=15301, loss=0.020031972788274288\n", - "Surface training t=15302, loss=0.02000970020890236\n", - "Surface training t=15303, loss=0.029584025964140892\n", - "Surface training t=15304, loss=0.04356475733220577\n", - "Surface training t=15305, loss=0.03254327829927206\n", - "Surface training t=15306, loss=0.037643685936927795\n", - "Surface training t=15307, loss=0.03524326719343662\n", - "Surface training t=15308, loss=0.03929763752967119\n", - "Surface training t=15309, loss=0.031166622415184975\n", - "Surface training t=15310, loss=0.032771652564406395\n", - "Surface training t=15311, loss=0.04259094223380089\n", - "Surface training t=15312, loss=0.023737926967442036\n", - "Surface training t=15313, loss=0.03349723108112812\n", - "Surface training t=15314, loss=0.03550969623029232\n", - "Surface training t=15315, loss=0.03307926468551159\n", - "Surface training t=15316, loss=0.03476143628358841\n", - "Surface training t=15317, loss=0.02934842463582754\n", - "Surface training t=15318, loss=0.02812312263995409\n", - "Surface training t=15319, loss=0.03105069510638714\n", - "Surface training t=15320, loss=0.027601461857557297\n", - "Surface training t=15321, loss=0.03150874376296997\n", - "Surface training t=15322, loss=0.03230824135243893\n", - "Surface training t=15323, loss=0.024539240635931492\n", - "Surface training t=15324, loss=0.02166091650724411\n", - "Surface training t=15325, loss=0.0265837162733078\n", - "Surface training t=15326, loss=0.025830180384218693\n", - "Surface training t=15327, loss=0.026578246615827084\n", - "Surface training t=15328, loss=0.025950287468731403\n", - "Surface training t=15329, loss=0.03822942264378071\n", - "Surface training t=15330, loss=0.03195448126643896\n", - "Surface training t=15331, loss=0.02652145829051733\n", - "Surface training t=15332, loss=0.027157222852110863\n", - "Surface training t=15333, loss=0.01807170920073986\n", - "Surface training t=15334, loss=0.01905650831758976\n", - "Surface training t=15335, loss=0.023626035079360008\n", - "Surface training t=15336, loss=0.024403647519648075\n", - "Surface training t=15337, loss=0.017880717292428017\n", - "Surface training t=15338, loss=0.02027930412441492\n", - "Surface training t=15339, loss=0.019680820405483246\n", - "Surface training t=15340, loss=0.019655439537018538\n", - "Surface training t=15341, loss=0.020965982228517532\n", - "Surface training t=15342, loss=0.02701718918979168\n", - "Surface training t=15343, loss=0.02578413486480713\n", - "Surface training t=15344, loss=0.017125731334090233\n", - "Surface training t=15345, loss=0.016260885633528233\n", - "Surface training t=15346, loss=0.0142126539722085\n", - "Surface training t=15347, loss=0.024075536988675594\n", - "Surface training t=15348, loss=0.025937851518392563\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=15349, loss=0.02416436094790697\n", - "Surface training t=15350, loss=0.025236506946384907\n", - "Surface training t=15351, loss=0.015507987700402737\n", - "Surface training t=15352, loss=0.02109680138528347\n", - "Surface training t=15353, loss=0.02733113430440426\n", - "Surface training t=15354, loss=0.03207891806960106\n", - "Surface training t=15355, loss=0.03197050280869007\n", - "Surface training t=15356, loss=0.03566392604261637\n", - "Surface training t=15357, loss=0.03284095413982868\n", - "Surface training t=15358, loss=0.031293464824557304\n", - "Surface training t=15359, loss=0.026997439563274384\n", - "Surface training t=15360, loss=0.038122598081827164\n", - "Surface training t=15361, loss=0.05421802029013634\n", - "Surface training t=15362, loss=0.04469728656113148\n", - "Surface training t=15363, loss=0.06290527060627937\n", - "Surface training t=15364, loss=0.04565666988492012\n", - "Surface training t=15365, loss=0.046674976125359535\n", - "Surface training t=15366, loss=0.04143163003027439\n", - "Surface training t=15367, loss=0.034794473089277744\n", - "Surface training t=15368, loss=0.027465468272566795\n", - "Surface training t=15369, loss=0.03175725694745779\n", - "Surface training t=15370, loss=0.03312279004603624\n", - "Surface training t=15371, loss=0.030424601398408413\n", - "Surface training t=15372, loss=0.032782590948045254\n", - "Surface training t=15373, loss=0.03522895276546478\n", - "Surface training t=15374, loss=0.04493056237697601\n", - "Surface training t=15375, loss=0.07506505772471428\n", - "Surface training t=15376, loss=0.05234985798597336\n", - "Surface training t=15377, loss=0.08397665992379189\n", - "Surface training t=15378, loss=0.055520445108413696\n", - "Surface training t=15379, loss=0.04888099804520607\n", - "Surface training t=15380, loss=0.07031509466469288\n", - "Surface training t=15381, loss=0.03769771009683609\n", - "Surface training t=15382, loss=0.03293406963348389\n", - "Surface training t=15383, loss=0.039748094975948334\n", - "Surface training t=15384, loss=0.03569369576871395\n", - "Surface training t=15385, loss=0.03434084914624691\n", - "Surface training t=15386, loss=0.03518327884376049\n", - "Surface training t=15387, loss=0.031870948150753975\n", - "Surface training t=15388, loss=0.03158479183912277\n", - "Surface training t=15389, loss=0.030750352889299393\n", - "Surface training t=15390, loss=0.02449152059853077\n", - "Surface training t=15391, loss=0.03664942178875208\n", - "Surface training t=15392, loss=0.023616917431354523\n", - "Surface training t=15393, loss=0.026852981187403202\n", - "Surface training t=15394, loss=0.027906293980777264\n", - "Surface training t=15395, loss=0.025705430656671524\n", - "Surface training t=15396, loss=0.03971292823553085\n", - "Surface training t=15397, loss=0.03134367987513542\n", - "Surface training t=15398, loss=0.026576191186904907\n", - "Surface training t=15399, loss=0.03695118613541126\n", - "Surface training t=15400, loss=0.04554547369480133\n", - "Surface training t=15401, loss=0.03771987184882164\n", - "Surface training t=15402, loss=0.021064667962491512\n", - "Surface training t=15403, loss=0.02616407722234726\n", - "Surface training t=15404, loss=0.027872154489159584\n", - "Surface training t=15405, loss=0.02815447933971882\n", - "Surface training t=15406, loss=0.025316506624221802\n", - "Surface training t=15407, loss=0.03376915864646435\n", - "Surface training t=15408, loss=0.025631199590861797\n", - "Surface training t=15409, loss=0.02418704330921173\n", - "Surface training t=15410, loss=0.024145156145095825\n", - "Surface training t=15411, loss=0.03291966952383518\n", - "Surface training t=15412, loss=0.025993015617132187\n", - "Surface training t=15413, loss=0.054848309606313705\n", - "Surface training t=15414, loss=0.04414381645619869\n", - "Surface training t=15415, loss=0.045070938766002655\n", - "Surface training t=15416, loss=0.030003498308360577\n", - "Surface training t=15417, loss=0.035712956450879574\n", - "Surface training t=15418, loss=0.049563098698854446\n", - "Surface training t=15419, loss=0.04185549356043339\n", - "Surface training t=15420, loss=0.041443632915616035\n", - "Surface training t=15421, loss=0.033710477873682976\n", - "Surface training t=15422, loss=0.0485635232180357\n", - "Surface training t=15423, loss=0.04300372302532196\n", - "Surface training t=15424, loss=0.04833174869418144\n", - "Surface training t=15425, loss=0.062309928238391876\n", - "Surface training t=15426, loss=0.03285157214850187\n", - "Surface training t=15427, loss=0.03558841161429882\n", - "Surface training t=15428, loss=0.0329244714230299\n", - "Surface training t=15429, loss=0.02749159187078476\n", - "Surface training t=15430, loss=0.02597676869481802\n", - "Surface training t=15431, loss=0.029104292392730713\n", - "Surface training t=15432, loss=0.03578292764723301\n", - "Surface training t=15433, loss=0.024387413635849953\n", - "Surface training t=15434, loss=0.027576557360589504\n", - "Surface training t=15435, loss=0.027365023270249367\n", - "Surface training t=15436, loss=0.021193822845816612\n", - "Surface training t=15437, loss=0.02726319432258606\n", - "Surface training t=15438, loss=0.03134919237345457\n", - "Surface training t=15439, loss=0.027537078596651554\n", - "Surface training t=15440, loss=0.023281918838620186\n", - "Surface training t=15441, loss=0.0259825699031353\n", - "Surface training t=15442, loss=0.03391887806355953\n", - "Surface training t=15443, loss=0.03148360084742308\n", - "Surface training t=15444, loss=0.02999679371714592\n", - "Surface training t=15445, loss=0.040974076837301254\n", - "Surface training t=15446, loss=0.02861373219639063\n", - "Surface training t=15447, loss=0.03354723937809467\n", - "Surface training t=15448, loss=0.035148030146956444\n", - "Surface training t=15449, loss=0.04287026263773441\n", - "Surface training t=15450, loss=0.0577937588095665\n", - "Surface training t=15451, loss=0.04480113461613655\n", - "Surface training t=15452, loss=0.06753470003604889\n", - "Surface training t=15453, loss=0.05906166322529316\n", - "Surface training t=15454, loss=0.06100947968661785\n", - "Surface training t=15455, loss=0.0505671501159668\n", - "Surface training t=15456, loss=0.04942736774682999\n", - "Surface training t=15457, loss=0.03852817416191101\n", - "Surface training t=15458, loss=0.045357661321759224\n", - "Surface training t=15459, loss=0.029058239422738552\n", - "Surface training t=15460, loss=0.024453267455101013\n", - "Surface training t=15461, loss=0.028780890628695488\n", - "Surface training t=15462, loss=0.025208679027855396\n", - "Surface training t=15463, loss=0.025833027437329292\n", - "Surface training t=15464, loss=0.0247125206515193\n", - "Surface training t=15465, loss=0.020214191637933254\n", - "Surface training t=15466, loss=0.021512201987206936\n", - "Surface training t=15467, loss=0.021020712330937386\n", - "Surface training t=15468, loss=0.021337871439754963\n", - "Surface training t=15469, loss=0.018021277152001858\n", - "Surface training t=15470, loss=0.04186508618295193\n", - "Surface training t=15471, loss=0.03175138495862484\n", - "Surface training t=15472, loss=0.04575423337519169\n", - "Surface training t=15473, loss=0.03752464707940817\n", - "Surface training t=15474, loss=0.03318133298307657\n", - "Surface training t=15475, loss=0.032470593228936195\n", - "Surface training t=15476, loss=0.027591840364038944\n", - "Surface training t=15477, loss=0.02767145447432995\n", - "Surface training t=15478, loss=0.037202777341008186\n", - "Surface training t=15479, loss=0.023704933933913708\n", - "Surface training t=15480, loss=0.027558297850191593\n", - "Surface training t=15481, loss=0.02856991533190012\n", - "Surface training t=15482, loss=0.025517815724015236\n", - "Surface training t=15483, loss=0.027482083067297935\n", - "Surface training t=15484, loss=0.01952300500124693\n", - "Surface training t=15485, loss=0.021849369630217552\n", - "Surface training t=15486, loss=0.02296392433345318\n", - "Surface training t=15487, loss=0.01990081463009119\n", - "Surface training t=15488, loss=0.017353837378323078\n", - "Surface training t=15489, loss=0.017746607773005962\n", - "Surface training t=15490, loss=0.02001695241779089\n", - "Surface training t=15491, loss=0.023239722475409508\n", - "Surface training t=15492, loss=0.0204480467364192\n", - "Surface training t=15493, loss=0.020392264239490032\n", - "Surface training t=15494, loss=0.025009791366755962\n", - "Surface training t=15495, loss=0.03015989065170288\n", - "Surface training t=15496, loss=0.02288348600268364\n", - "Surface training t=15497, loss=0.02614655252546072\n", - "Surface training t=15498, loss=0.027360012754797935\n", - "Surface training t=15499, loss=0.03302668686956167\n", - "Surface training t=15500, loss=0.02805164735764265\n", - "Surface training t=15501, loss=0.02438394259661436\n", - "Surface training t=15502, loss=0.031398115679621696\n", - "Surface training t=15503, loss=0.032447868026793\n", - "Surface training t=15504, loss=0.027962549589574337\n", - "Surface training t=15505, loss=0.030859134159982204\n", - "Surface training t=15506, loss=0.034785935655236244\n", - "Surface training t=15507, loss=0.02496548742055893\n", - "Surface training t=15508, loss=0.02885294985026121\n", - "Surface training t=15509, loss=0.023092525079846382\n", - "Surface training t=15510, loss=0.029985088855028152\n", - "Surface training t=15511, loss=0.02438183967024088\n", - "Surface training t=15512, loss=0.026176735758781433\n", - "Surface training t=15513, loss=0.017918616998940706\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=15514, loss=0.024211694486439228\n", - "Surface training t=15515, loss=0.022384281270205975\n", - "Surface training t=15516, loss=0.02868500631302595\n", - "Surface training t=15517, loss=0.02128252200782299\n", - "Surface training t=15518, loss=0.026249799877405167\n", - "Surface training t=15519, loss=0.0195238059386611\n", - "Surface training t=15520, loss=0.02688614185899496\n", - "Surface training t=15521, loss=0.0188873540610075\n", - "Surface training t=15522, loss=0.02385317161679268\n", - "Surface training t=15523, loss=0.021115087904036045\n", - "Surface training t=15524, loss=0.021195690147578716\n", - "Surface training t=15525, loss=0.019770794548094273\n", - "Surface training t=15526, loss=0.025441205129027367\n", - "Surface training t=15527, loss=0.021201717667281628\n", - "Surface training t=15528, loss=0.030664476566016674\n", - "Surface training t=15529, loss=0.023585334420204163\n", - "Surface training t=15530, loss=0.02453522849828005\n", - "Surface training t=15531, loss=0.03102372493594885\n", - "Surface training t=15532, loss=0.020865999162197113\n", - "Surface training t=15533, loss=0.0208194674924016\n", - "Surface training t=15534, loss=0.024144482798874378\n", - "Surface training t=15535, loss=0.022643910720944405\n", - "Surface training t=15536, loss=0.024614524096250534\n", - "Surface training t=15537, loss=0.028061901219189167\n", - "Surface training t=15538, loss=0.022330619394779205\n", - "Surface training t=15539, loss=0.019890643656253815\n", - "Surface training t=15540, loss=0.01886215340346098\n", - "Surface training t=15541, loss=0.018754911608994007\n", - "Surface training t=15542, loss=0.01802636031061411\n", - "Surface training t=15543, loss=0.020047493278980255\n", - "Surface training t=15544, loss=0.02153055462986231\n", - "Surface training t=15545, loss=0.025078846141695976\n", - "Surface training t=15546, loss=0.022728512063622475\n", - "Surface training t=15547, loss=0.018027614802122116\n", - "Surface training t=15548, loss=0.0254152650013566\n", - "Surface training t=15549, loss=0.017299401573836803\n", - "Surface training t=15550, loss=0.024010013788938522\n", - "Surface training t=15551, loss=0.02939168270677328\n", - "Surface training t=15552, loss=0.034618351608514786\n", - "Surface training t=15553, loss=0.030536637641489506\n", - "Surface training t=15554, loss=0.03616218362003565\n", - "Surface training t=15555, loss=0.0392384622246027\n", - "Surface training t=15556, loss=0.042323850095272064\n", - "Surface training t=15557, loss=0.046909451484680176\n", - "Surface training t=15558, loss=0.044432684779167175\n", - "Surface training t=15559, loss=0.039827995002269745\n", - "Surface training t=15560, loss=0.037901872768998146\n", - "Surface training t=15561, loss=0.04546169936656952\n", - "Surface training t=15562, loss=0.04757666774094105\n", - "Surface training t=15563, loss=0.03426308836787939\n", - "Surface training t=15564, loss=0.029896091669797897\n", - "Surface training t=15565, loss=0.03173602279275656\n", - "Surface training t=15566, loss=0.03949769400060177\n", - "Surface training t=15567, loss=0.03631558362394571\n", - "Surface training t=15568, loss=0.03747112303972244\n", - "Surface training t=15569, loss=0.04918411932885647\n", - "Surface training t=15570, loss=0.0355228828266263\n", - "Surface training t=15571, loss=0.038060709834098816\n", - "Surface training t=15572, loss=0.034195429645478725\n", - "Surface training t=15573, loss=0.03304165229201317\n", - "Surface training t=15574, loss=0.031914543360471725\n", - "Surface training t=15575, loss=0.026478610932826996\n", - "Surface training t=15576, loss=0.03096064832061529\n", - "Surface training t=15577, loss=0.03717488422989845\n", - "Surface training t=15578, loss=0.028942499309778214\n", - "Surface training t=15579, loss=0.02751602977514267\n", - "Surface training t=15580, loss=0.03315145242959261\n", - "Surface training t=15581, loss=0.04174906946718693\n", - "Surface training t=15582, loss=0.03571513667702675\n", - "Surface training t=15583, loss=0.03578466922044754\n", - "Surface training t=15584, loss=0.031730953603982925\n", - "Surface training t=15585, loss=0.02728702686727047\n", - "Surface training t=15586, loss=0.025094651617109776\n", - "Surface training t=15587, loss=0.03362848423421383\n", - "Surface training t=15588, loss=0.02650786004960537\n", - "Surface training t=15589, loss=0.03254857752472162\n", - "Surface training t=15590, loss=0.038107750937342644\n", - "Surface training t=15591, loss=0.02718375436961651\n", - "Surface training t=15592, loss=0.030176742933690548\n", - "Surface training t=15593, loss=0.03784124739468098\n", - "Surface training t=15594, loss=0.028204871341586113\n", - "Surface training t=15595, loss=0.026469222269952297\n", - "Surface training t=15596, loss=0.03916606493294239\n", - "Surface training t=15597, loss=0.029900791123509407\n", - "Surface training t=15598, loss=0.030290115624666214\n", - "Surface training t=15599, loss=0.02383136097341776\n", - "Surface training t=15600, loss=0.022955958731472492\n", - "Surface training t=15601, loss=0.02294343803077936\n", - "Surface training t=15602, loss=0.018649865873157978\n", - "Surface training t=15603, loss=0.016872776672244072\n", - "Surface training t=15604, loss=0.023253572173416615\n", - "Surface training t=15605, loss=0.02542850375175476\n", - "Surface training t=15606, loss=0.028610344976186752\n", - "Surface training t=15607, loss=0.03114071860909462\n", - "Surface training t=15608, loss=0.03521896153688431\n", - "Surface training t=15609, loss=0.03737238235771656\n", - "Surface training t=15610, loss=0.03153856284916401\n", - "Surface training t=15611, loss=0.03318065591156483\n", - "Surface training t=15612, loss=0.027213262394070625\n", - "Surface training t=15613, loss=0.029233530163764954\n", - "Surface training t=15614, loss=0.027691401541233063\n", - "Surface training t=15615, loss=0.03516964986920357\n", - "Surface training t=15616, loss=0.029642436653375626\n", - "Surface training t=15617, loss=0.0332756107673049\n", - "Surface training t=15618, loss=0.036674099043011665\n", - "Surface training t=15619, loss=0.03099394589662552\n", - "Surface training t=15620, loss=0.02849547192454338\n", - "Surface training t=15621, loss=0.026272589340806007\n", - "Surface training t=15622, loss=0.016142306849360466\n", - "Surface training t=15623, loss=0.029153630137443542\n", - "Surface training t=15624, loss=0.03670460171997547\n", - "Surface training t=15625, loss=0.04553364776074886\n", - "Surface training t=15626, loss=0.04084263928234577\n", - "Surface training t=15627, loss=0.03691013716161251\n", - "Surface training t=15628, loss=0.03231557738035917\n", - "Surface training t=15629, loss=0.023903843015432358\n", - "Surface training t=15630, loss=0.02045456040650606\n", - "Surface training t=15631, loss=0.02794811874628067\n", - "Surface training t=15632, loss=0.029256063513457775\n", - "Surface training t=15633, loss=0.03413252532482147\n", - "Surface training t=15634, loss=0.03475208953022957\n", - "Surface training t=15635, loss=0.021107859909534454\n", - "Surface training t=15636, loss=0.036854350939393044\n", - "Surface training t=15637, loss=0.036046240478754044\n", - "Surface training t=15638, loss=0.024917740374803543\n", - "Surface training t=15639, loss=0.023158395662903786\n", - "Surface training t=15640, loss=0.033392686396837234\n", - "Surface training t=15641, loss=0.026266107335686684\n", - "Surface training t=15642, loss=0.02387668751180172\n", - "Surface training t=15643, loss=0.029869982972741127\n", - "Surface training t=15644, loss=0.03270428627729416\n", - "Surface training t=15645, loss=0.038445739075541496\n", - "Surface training t=15646, loss=0.023693589493632317\n", - "Surface training t=15647, loss=0.03318726643919945\n", - "Surface training t=15648, loss=0.0300632668659091\n", - "Surface training t=15649, loss=0.03037348948419094\n", - "Surface training t=15650, loss=0.038376014679670334\n", - "Surface training t=15651, loss=0.026665964163839817\n", - "Surface training t=15652, loss=0.03278893604874611\n", - "Surface training t=15653, loss=0.028511458076536655\n", - "Surface training t=15654, loss=0.056950643658638\n", - "Surface training t=15655, loss=0.03492315672338009\n", - "Surface training t=15656, loss=0.030175406485795975\n", - "Surface training t=15657, loss=0.03374594636261463\n", - "Surface training t=15658, loss=0.03967026714235544\n", - "Surface training t=15659, loss=0.04287850111722946\n", - "Surface training t=15660, loss=0.04228784330189228\n", - "Surface training t=15661, loss=0.03260484617203474\n", - "Surface training t=15662, loss=0.033429672941565514\n", - "Surface training t=15663, loss=0.037964699789881706\n", - "Surface training t=15664, loss=0.036006041802465916\n", - "Surface training t=15665, loss=0.06184297613799572\n", - "Surface training t=15666, loss=0.03635288029909134\n", - "Surface training t=15667, loss=0.03427094966173172\n", - "Surface training t=15668, loss=0.03086489997804165\n", - "Surface training t=15669, loss=0.026690850034356117\n", - "Surface training t=15670, loss=0.026976278983056545\n", - "Surface training t=15671, loss=0.023570767603814602\n", - "Surface training t=15672, loss=0.02251159306615591\n", - "Surface training t=15673, loss=0.02154921554028988\n", - "Surface training t=15674, loss=0.025123849511146545\n", - "Surface training t=15675, loss=0.03247045632451773\n", - "Surface training t=15676, loss=0.0421422328799963\n", - "Surface training t=15677, loss=0.04844948649406433\n", - "Surface training t=15678, loss=0.039688197895884514\n", - "Surface training t=15679, loss=0.04933101683855057\n", - "Surface training t=15680, loss=0.045863986015319824\n", - "Surface training t=15681, loss=0.045191871002316475\n", - "Surface training t=15682, loss=0.0406420286744833\n", - "Surface training t=15683, loss=0.03794400952756405\n", - "Surface training t=15684, loss=0.031433602795004845\n", - "Surface training t=15685, loss=0.03165876027196646\n", - "Surface training t=15686, loss=0.030771028250455856\n", - "Surface training t=15687, loss=0.047853583469986916\n", - "Surface training t=15688, loss=0.030790260061621666\n", - "Surface training t=15689, loss=0.03249947726726532\n", - "Surface training t=15690, loss=0.03166940901428461\n", - "Surface training t=15691, loss=0.02608345542103052\n", - "Surface training t=15692, loss=0.023070515133440495\n", - "Surface training t=15693, loss=0.018668957520276308\n", - "Surface training t=15694, loss=0.03579794615507126\n", - "Surface training t=15695, loss=0.030850306153297424\n", - "Surface training t=15696, loss=0.03931304067373276\n", - "Surface training t=15697, loss=0.03082057647407055\n", - "Surface training t=15698, loss=0.032640241086483\n", - "Surface training t=15699, loss=0.03503426909446716\n", - "Surface training t=15700, loss=0.04307582601904869\n", - "Surface training t=15701, loss=0.0357164591550827\n", - "Surface training t=15702, loss=0.05634156987071037\n", - "Surface training t=15703, loss=0.04069438762962818\n", - "Surface training t=15704, loss=0.03789801523089409\n", - "Surface training t=15705, loss=0.03587358724325895\n", - "Surface training t=15706, loss=0.03695694636553526\n", - "Surface training t=15707, loss=0.042252397164702415\n", - "Surface training t=15708, loss=0.034611888229846954\n", - "Surface training t=15709, loss=0.04153137095272541\n", - "Surface training t=15710, loss=0.03830880671739578\n", - "Surface training t=15711, loss=0.031980348750948906\n", - "Surface training t=15712, loss=0.0377805233001709\n", - "Surface training t=15713, loss=0.026146222837269306\n", - "Surface training t=15714, loss=0.038316212594509125\n", - "Surface training t=15715, loss=0.027355393394827843\n", - "Surface training t=15716, loss=0.024930600076913834\n", - "Surface training t=15717, loss=0.030017894692718983\n", - "Surface training t=15718, loss=0.029213073663413525\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=15719, loss=0.022966885939240456\n", - "Surface training t=15720, loss=0.025824337266385555\n", - "Surface training t=15721, loss=0.041364554315805435\n", - "Surface training t=15722, loss=0.025270920246839523\n", - "Surface training t=15723, loss=0.029362354427576065\n", - "Surface training t=15724, loss=0.044282468035817146\n", - "Surface training t=15725, loss=0.02759317308664322\n", - "Surface training t=15726, loss=0.030459209345281124\n", - "Surface training t=15727, loss=0.03619024716317654\n", - "Surface training t=15728, loss=0.028363638557493687\n", - "Surface training t=15729, loss=0.03812399320304394\n", - "Surface training t=15730, loss=0.028976453468203545\n", - "Surface training t=15731, loss=0.02400352992117405\n", - "Surface training t=15732, loss=0.03707408159971237\n", - "Surface training t=15733, loss=0.03212850447744131\n", - "Surface training t=15734, loss=0.027920091524720192\n", - "Surface training t=15735, loss=0.027365966700017452\n", - "Surface training t=15736, loss=0.021671374328434467\n", - "Surface training t=15737, loss=0.01865849643945694\n", - "Surface training t=15738, loss=0.01889218483120203\n", - "Surface training t=15739, loss=0.01709859073162079\n", - "Surface training t=15740, loss=0.017727931030094624\n", - "Surface training t=15741, loss=0.019578556530177593\n", - "Surface training t=15742, loss=0.0204637898132205\n", - "Surface training t=15743, loss=0.01892266795039177\n", - "Surface training t=15744, loss=0.02066379226744175\n", - "Surface training t=15745, loss=0.024406843818724155\n", - "Surface training t=15746, loss=0.025370892137289047\n", - "Surface training t=15747, loss=0.026127428747713566\n", - "Surface training t=15748, loss=0.014717877842485905\n", - "Surface training t=15749, loss=0.020405221730470657\n", - "Surface training t=15750, loss=0.021323764696717262\n", - "Surface training t=15751, loss=0.0203507412225008\n", - "Surface training t=15752, loss=0.023691777139902115\n", - "Surface training t=15753, loss=0.025418154895305634\n", - "Surface training t=15754, loss=0.02696719393134117\n", - "Surface training t=15755, loss=0.02677479200065136\n", - "Surface training t=15756, loss=0.03069592732936144\n", - "Surface training t=15757, loss=0.02797526028007269\n", - "Surface training t=15758, loss=0.023672141134738922\n", - "Surface training t=15759, loss=0.028870154172182083\n", - "Surface training t=15760, loss=0.03052039910107851\n", - "Surface training t=15761, loss=0.026792348362505436\n", - "Surface training t=15762, loss=0.05081101693212986\n", - "Surface training t=15763, loss=0.04384349659085274\n", - "Surface training t=15764, loss=0.04946546256542206\n", - "Surface training t=15765, loss=0.03128632716834545\n", - "Surface training t=15766, loss=0.031805465929210186\n", - "Surface training t=15767, loss=0.03939596749842167\n", - "Surface training t=15768, loss=0.02789849042892456\n", - "Surface training t=15769, loss=0.03110644780099392\n", - "Surface training t=15770, loss=0.02618452813476324\n", - "Surface training t=15771, loss=0.02657841145992279\n", - "Surface training t=15772, loss=0.02599995583295822\n", - "Surface training t=15773, loss=0.018650048412382603\n", - "Surface training t=15774, loss=0.03937584348022938\n", - "Surface training t=15775, loss=0.03137415274977684\n", - "Surface training t=15776, loss=0.044606706127524376\n", - "Surface training t=15777, loss=0.03421878628432751\n", - "Surface training t=15778, loss=0.034990094602108\n", - "Surface training t=15779, loss=0.034325988963246346\n", - "Surface training t=15780, loss=0.039339903742074966\n", - "Surface training t=15781, loss=0.0386742502450943\n", - "Surface training t=15782, loss=0.0491860955953598\n", - "Surface training t=15783, loss=0.04571058601140976\n", - "Surface training t=15784, loss=0.03862587735056877\n", - "Surface training t=15785, loss=0.03415527194738388\n", - "Surface training t=15786, loss=0.030937418341636658\n", - "Surface training t=15787, loss=0.04097258113324642\n", - "Surface training t=15788, loss=0.028917303308844566\n", - "Surface training t=15789, loss=0.03164204675704241\n", - "Surface training t=15790, loss=0.029517545364797115\n", - "Surface training t=15791, loss=0.02808926161378622\n", - "Surface training t=15792, loss=0.02355615422129631\n", - "Surface training t=15793, loss=0.026319251395761967\n", - "Surface training t=15794, loss=0.028821466490626335\n", - "Surface training t=15795, loss=0.032861300744116306\n", - "Surface training t=15796, loss=0.027084503322839737\n", - "Surface training t=15797, loss=0.023651795461773872\n", - "Surface training t=15798, loss=0.020556146278977394\n", - "Surface training t=15799, loss=0.024355669505894184\n", - "Surface training t=15800, loss=0.020613281056284904\n", - "Surface training t=15801, loss=0.02462968695908785\n", - "Surface training t=15802, loss=0.022248380817472935\n", - "Surface training t=15803, loss=0.019836250692605972\n", - "Surface training t=15804, loss=0.032396506518125534\n", - "Surface training t=15805, loss=0.029139386489987373\n", - "Surface training t=15806, loss=0.02213421557098627\n", - "Surface training t=15807, loss=0.04499134607613087\n", - "Surface training t=15808, loss=0.05819929763674736\n", - "Surface training t=15809, loss=0.04100774694234133\n", - "Surface training t=15810, loss=0.038033630698919296\n", - "Surface training t=15811, loss=0.061162879690527916\n", - "Surface training t=15812, loss=0.06086342968046665\n", - "Surface training t=15813, loss=0.04741383343935013\n", - "Surface training t=15814, loss=0.05630430579185486\n", - "Surface training t=15815, loss=0.05782277137041092\n", - "Surface training t=15816, loss=0.04812300205230713\n", - "Surface training t=15817, loss=0.04781578667461872\n", - "Surface training t=15818, loss=0.0342218354344368\n", - "Surface training t=15819, loss=0.04122980125248432\n", - "Surface training t=15820, loss=0.042227379977703094\n", - "Surface training t=15821, loss=0.040089123882353306\n", - "Surface training t=15822, loss=0.04154660925269127\n", - "Surface training t=15823, loss=0.06751811876893044\n", - "Surface training t=15824, loss=0.04809625819325447\n", - "Surface training t=15825, loss=0.04420839063823223\n", - "Surface training t=15826, loss=0.05823804624378681\n", - "Surface training t=15827, loss=0.04540478251874447\n", - "Surface training t=15828, loss=0.040117572993040085\n", - "Surface training t=15829, loss=0.033829337917268276\n", - "Surface training t=15830, loss=0.031047048047184944\n", - "Surface training t=15831, loss=0.028113112784922123\n", - "Surface training t=15832, loss=0.028952203691005707\n", - "Surface training t=15833, loss=0.03614202607423067\n", - "Surface training t=15834, loss=0.032952685840427876\n", - "Surface training t=15835, loss=0.036961160600185394\n", - "Surface training t=15836, loss=0.04478790983557701\n", - "Surface training t=15837, loss=0.05377568118274212\n", - "Surface training t=15838, loss=0.036940788850188255\n", - "Surface training t=15839, loss=0.03139446675777435\n", - "Surface training t=15840, loss=0.03294371720403433\n", - "Surface training t=15841, loss=0.02980584930628538\n", - "Surface training t=15842, loss=0.028524961322546005\n", - "Surface training t=15843, loss=0.024851161986589432\n", - "Surface training t=15844, loss=0.02792180236428976\n", - "Surface training t=15845, loss=0.029614772647619247\n", - "Surface training t=15846, loss=0.029156731441617012\n", - "Surface training t=15847, loss=0.030926935374736786\n", - "Surface training t=15848, loss=0.034397853538393974\n", - "Surface training t=15849, loss=0.025825909338891506\n", - "Surface training t=15850, loss=0.03505315072834492\n", - "Surface training t=15851, loss=0.028788385912775993\n", - "Surface training t=15852, loss=0.02509764675050974\n", - "Surface training t=15853, loss=0.02837403118610382\n", - "Surface training t=15854, loss=0.019765996374189854\n", - "Surface training t=15855, loss=0.023956499062478542\n", - "Surface training t=15856, loss=0.02514677122235298\n", - "Surface training t=15857, loss=0.026123923249542713\n", - "Surface training t=15858, loss=0.023922253400087357\n", - "Surface training t=15859, loss=0.029606235213577747\n", - "Surface training t=15860, loss=0.028010642156004906\n", - "Surface training t=15861, loss=0.037012615241110325\n", - "Surface training t=15862, loss=0.03816164471209049\n", - "Surface training t=15863, loss=0.025579281151294708\n", - "Surface training t=15864, loss=0.023591966368258\n", - "Surface training t=15865, loss=0.025687054730951786\n", - "Surface training t=15866, loss=0.025973121635615826\n", - "Surface training t=15867, loss=0.04008401930332184\n", - "Surface training t=15868, loss=0.025275906547904015\n", - "Surface training t=15869, loss=0.02748546190559864\n", - "Surface training t=15870, loss=0.02781838458031416\n", - "Surface training t=15871, loss=0.032606106251478195\n", - "Surface training t=15872, loss=0.028908276930451393\n", - "Surface training t=15873, loss=0.03388374112546444\n", - "Surface training t=15874, loss=0.03419164847582579\n", - "Surface training t=15875, loss=0.02495912928134203\n", - "Surface training t=15876, loss=0.027575630694627762\n", - "Surface training t=15877, loss=0.028937743976712227\n", - "Surface training t=15878, loss=0.03310317825525999\n", - "Surface training t=15879, loss=0.031962063163518906\n", - "Surface training t=15880, loss=0.04642953537404537\n", - "Surface training t=15881, loss=0.03439009469002485\n", - "Surface training t=15882, loss=0.02708382625132799\n", - "Surface training t=15883, loss=0.034241147339344025\n", - "Surface training t=15884, loss=0.027898739092051983\n", - "Surface training t=15885, loss=0.027324694208800793\n", - "Surface training t=15886, loss=0.02448712196201086\n", - "Surface training t=15887, loss=0.02239732164889574\n", - "Surface training t=15888, loss=0.02495412714779377\n", - "Surface training t=15889, loss=0.025289087556302547\n", - "Surface training t=15890, loss=0.02763022854924202\n", - "Surface training t=15891, loss=0.024051327258348465\n", - "Surface training t=15892, loss=0.020743178203701973\n", - "Surface training t=15893, loss=0.02023844886571169\n", - "Surface training t=15894, loss=0.017116709612309933\n", - "Surface training t=15895, loss=0.026315434835851192\n", - "Surface training t=15896, loss=0.017716918140649796\n", - "Surface training t=15897, loss=0.01998956222087145\n", - "Surface training t=15898, loss=0.021794346161186695\n", - "Surface training t=15899, loss=0.02272704616189003\n", - "Surface training t=15900, loss=0.032862789928913116\n", - "Surface training t=15901, loss=0.027852349914610386\n", - "Surface training t=15902, loss=0.0326939607039094\n", - "Surface training t=15903, loss=0.0283629409968853\n", - "Surface training t=15904, loss=0.042816074565052986\n", - "Surface training t=15905, loss=0.03959418274462223\n", - "Surface training t=15906, loss=0.047533201053738594\n", - "Surface training t=15907, loss=0.03892657905817032\n", - "Surface training t=15908, loss=0.04215353913605213\n", - "Surface training t=15909, loss=0.035921262577176094\n", - "Surface training t=15910, loss=0.026204152964055538\n", - "Surface training t=15911, loss=0.02870149165391922\n", - "Surface training t=15912, loss=0.025919771753251553\n", - "Surface training t=15913, loss=0.02411738410592079\n", - "Surface training t=15914, loss=0.023363140411674976\n", - "Surface training t=15915, loss=0.027116266079246998\n", - "Surface training t=15916, loss=0.03039968479424715\n", - "Surface training t=15917, loss=0.02259747963398695\n", - "Surface training t=15918, loss=0.019122297875583172\n", - "Surface training t=15919, loss=0.026863425970077515\n", - "Surface training t=15920, loss=0.02401718031615019\n", - "Surface training t=15921, loss=0.02401942852884531\n", - "Surface training t=15922, loss=0.0224861279129982\n", - "Surface training t=15923, loss=0.0207931911572814\n", - "Surface training t=15924, loss=0.03285770211368799\n", - "Surface training t=15925, loss=0.030570974573493004\n", - "Surface training t=15926, loss=0.028778540901839733\n", - "Surface training t=15927, loss=0.032938034273684025\n", - "Surface training t=15928, loss=0.03085289429873228\n", - "Surface training t=15929, loss=0.026128031313419342\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=15930, loss=0.02488014753907919\n", - "Surface training t=15931, loss=0.027758879587054253\n", - "Surface training t=15932, loss=0.02780450414866209\n", - "Surface training t=15933, loss=0.035170797258615494\n", - "Surface training t=15934, loss=0.030254239216446877\n", - "Surface training t=15935, loss=0.02743374276906252\n", - "Surface training t=15936, loss=0.0350512471050024\n", - "Surface training t=15937, loss=0.03870112635195255\n", - "Surface training t=15938, loss=0.03133537992835045\n", - "Surface training t=15939, loss=0.030126744881272316\n", - "Surface training t=15940, loss=0.025047970935702324\n", - "Surface training t=15941, loss=0.024470741860568523\n", - "Surface training t=15942, loss=0.029740934260189533\n", - "Surface training t=15943, loss=0.021259120665490627\n", - "Surface training t=15944, loss=0.039716100320219994\n", - "Surface training t=15945, loss=0.03332815505564213\n", - "Surface training t=15946, loss=0.02914919052273035\n", - "Surface training t=15947, loss=0.045731544494628906\n", - "Surface training t=15948, loss=0.040084488689899445\n", - "Surface training t=15949, loss=0.044205646961927414\n", - "Surface training t=15950, loss=0.03402782045304775\n", - "Surface training t=15951, loss=0.0363750234246254\n", - "Surface training t=15952, loss=0.022162682376801968\n", - "Surface training t=15953, loss=0.022677174769341946\n", - "Surface training t=15954, loss=0.021678253076970577\n", - "Surface training t=15955, loss=0.02234515082091093\n", - "Surface training t=15956, loss=0.02086067944765091\n", - "Surface training t=15957, loss=0.016339070163667202\n", - "Surface training t=15958, loss=0.02245946042239666\n", - "Surface training t=15959, loss=0.01888938806951046\n", - "Surface training t=15960, loss=0.020546289160847664\n", - "Surface training t=15961, loss=0.02753981575369835\n", - "Surface training t=15962, loss=0.022168569266796112\n", - "Surface training t=15963, loss=0.023563960567116737\n", - "Surface training t=15964, loss=0.026290981099009514\n", - "Surface training t=15965, loss=0.03692127205431461\n", - "Surface training t=15966, loss=0.02385303657501936\n", - "Surface training t=15967, loss=0.021740932017564774\n", - "Surface training t=15968, loss=0.02365689631551504\n", - "Surface training t=15969, loss=0.024819666519761086\n", - "Surface training t=15970, loss=0.02175415214151144\n", - "Surface training t=15971, loss=0.022811430506408215\n", - "Surface training t=15972, loss=0.03585507534444332\n", - "Surface training t=15973, loss=0.034321089275181293\n", - "Surface training t=15974, loss=0.04013654217123985\n", - "Surface training t=15975, loss=0.02503922674804926\n", - "Surface training t=15976, loss=0.029342525638639927\n", - "Surface training t=15977, loss=0.02069673128426075\n", - "Surface training t=15978, loss=0.028819488361477852\n", - "Surface training t=15979, loss=0.033446721732616425\n", - "Surface training t=15980, loss=0.048242220655083656\n", - "Surface training t=15981, loss=0.04875280614942312\n", - "Surface training t=15982, loss=0.05031518079340458\n", - "Surface training t=15983, loss=0.07290169037878513\n", - "Surface training t=15984, loss=0.05632774718105793\n", - "Surface training t=15985, loss=0.06741839833557606\n", - "Surface training t=15986, loss=0.07462773658335209\n", - "Surface training t=15987, loss=0.04808584600687027\n", - "Surface training t=15988, loss=0.05640951730310917\n", - "Surface training t=15989, loss=0.06306791678071022\n", - "Surface training t=15990, loss=0.04438118264079094\n", - "Surface training t=15991, loss=0.05683475732803345\n", - "Surface training t=15992, loss=0.0610501766204834\n", - "Surface training t=15993, loss=0.04991211835294962\n", - "Surface training t=15994, loss=0.08789429068565369\n", - "Surface training t=15995, loss=0.04977790825068951\n", - "Surface training t=15996, loss=0.04216937534511089\n", - "Surface training t=15997, loss=0.04152443911880255\n", - "Surface training t=15998, loss=0.06258906237781048\n", - "Surface training t=15999, loss=0.04159589111804962\n", - "Surface training t=16000, loss=0.04414280317723751\n", - "Surface training t=16001, loss=0.061214471235871315\n", - "Surface training t=16002, loss=0.06564925611019135\n", - "Surface training t=16003, loss=0.06103505939245224\n", - "Surface training t=16004, loss=0.059368645772337914\n", - "Surface training t=16005, loss=0.03867531195282936\n", - "Surface training t=16006, loss=0.0452308040112257\n", - "Surface training t=16007, loss=0.031734032556414604\n", - "Surface training t=16008, loss=0.02699464652687311\n", - "Surface training t=16009, loss=0.0380979236215353\n", - "Surface training t=16010, loss=0.024552849121391773\n", - "Surface training t=16011, loss=0.029047736898064613\n", - "Surface training t=16012, loss=0.029621897265315056\n", - "Surface training t=16013, loss=0.02565467171370983\n", - "Surface training t=16014, loss=0.02868795581161976\n", - "Surface training t=16015, loss=0.02549120970070362\n", - "Surface training t=16016, loss=0.021681216545403004\n", - "Surface training t=16017, loss=0.023916435427963734\n", - "Surface training t=16018, loss=0.020113643258810043\n", - "Surface training t=16019, loss=0.029279636219143867\n", - "Surface training t=16020, loss=0.03378977719694376\n", - "Surface training t=16021, loss=0.031064974144101143\n", - "Surface training t=16022, loss=0.03484161105006933\n", - "Surface training t=16023, loss=0.036618880927562714\n", - "Surface training t=16024, loss=0.025171538814902306\n", - "Surface training t=16025, loss=0.023700186982750893\n", - "Surface training t=16026, loss=0.027556772343814373\n", - "Surface training t=16027, loss=0.028946226462721825\n", - "Surface training t=16028, loss=0.03169481083750725\n", - "Surface training t=16029, loss=0.025737255811691284\n", - "Surface training t=16030, loss=0.036737969145178795\n", - "Surface training t=16031, loss=0.02784332912415266\n", - "Surface training t=16032, loss=0.021402316633611917\n", - "Surface training t=16033, loss=0.02726352959871292\n", - "Surface training t=16034, loss=0.03264736197888851\n", - "Surface training t=16035, loss=0.021715521812438965\n", - "Surface training t=16036, loss=0.020827163942158222\n", - "Surface training t=16037, loss=0.031945718452334404\n", - "Surface training t=16038, loss=0.022448400035500526\n", - "Surface training t=16039, loss=0.02402605675160885\n", - "Surface training t=16040, loss=0.024053932167589664\n", - "Surface training t=16041, loss=0.02416621334850788\n", - "Surface training t=16042, loss=0.017988025210797787\n", - "Surface training t=16043, loss=0.032368121668696404\n", - "Surface training t=16044, loss=0.027846205979585648\n", - "Surface training t=16045, loss=0.031165478751063347\n", - "Surface training t=16046, loss=0.030591626651585102\n", - "Surface training t=16047, loss=0.03316420875489712\n", - "Surface training t=16048, loss=0.030993499793112278\n", - "Surface training t=16049, loss=0.04691632464528084\n", - "Surface training t=16050, loss=0.033746019937098026\n", - "Surface training t=16051, loss=0.03352724481374025\n", - "Surface training t=16052, loss=0.022794504649937153\n", - "Surface training t=16053, loss=0.02444312535226345\n", - "Surface training t=16054, loss=0.026347477920353413\n", - "Surface training t=16055, loss=0.037716373801231384\n", - "Surface training t=16056, loss=0.029644961468875408\n", - "Surface training t=16057, loss=0.03695983625948429\n", - "Surface training t=16058, loss=0.02509856317192316\n", - "Surface training t=16059, loss=0.020435066893696785\n", - "Surface training t=16060, loss=0.02069389447569847\n", - "Surface training t=16061, loss=0.03999643586575985\n", - "Surface training t=16062, loss=0.0337011544033885\n", - "Surface training t=16063, loss=0.029693705961108208\n", - "Surface training t=16064, loss=0.026087770238518715\n", - "Surface training t=16065, loss=0.025738087482750416\n", - "Surface training t=16066, loss=0.03819381445646286\n", - "Surface training t=16067, loss=0.03777011297643185\n", - "Surface training t=16068, loss=0.042517900466918945\n", - "Surface training t=16069, loss=0.03941089008003473\n", - "Surface training t=16070, loss=0.038866691291332245\n", - "Surface training t=16071, loss=0.02927551604807377\n", - "Surface training t=16072, loss=0.028436478227376938\n", - "Surface training t=16073, loss=0.03192823100835085\n", - "Surface training t=16074, loss=0.02396235056221485\n", - "Surface training t=16075, loss=0.02357561932876706\n", - "Surface training t=16076, loss=0.01679617539048195\n", - "Surface training t=16077, loss=0.017037534154951572\n", - "Surface training t=16078, loss=0.021189359948039055\n", - "Surface training t=16079, loss=0.01839878037571907\n", - "Surface training t=16080, loss=0.022084389813244343\n", - "Surface training t=16081, loss=0.02477658074349165\n", - "Surface training t=16082, loss=0.024190410040318966\n", - "Surface training t=16083, loss=0.028342743404209614\n", - "Surface training t=16084, loss=0.040196461603045464\n", - "Surface training t=16085, loss=0.03735689911991358\n", - "Surface training t=16086, loss=0.06726585328578949\n", - "Surface training t=16087, loss=0.04134915117174387\n", - "Surface training t=16088, loss=0.04060852527618408\n", - "Surface training t=16089, loss=0.032046690583229065\n", - "Surface training t=16090, loss=0.03726394288241863\n", - "Surface training t=16091, loss=0.03383242338895798\n", - "Surface training t=16092, loss=0.04272215627133846\n", - "Surface training t=16093, loss=0.036128233186900616\n", - "Surface training t=16094, loss=0.03991597332060337\n", - "Surface training t=16095, loss=0.04046559892594814\n", - "Surface training t=16096, loss=0.04301628842949867\n", - "Surface training t=16097, loss=0.05548678711056709\n", - "Surface training t=16098, loss=0.04411087557673454\n", - "Surface training t=16099, loss=0.04613960161805153\n", - "Surface training t=16100, loss=0.05242433212697506\n", - "Surface training t=16101, loss=0.05405939556658268\n", - "Surface training t=16102, loss=0.038655007258057594\n", - "Surface training t=16103, loss=0.04077538847923279\n", - "Surface training t=16104, loss=0.03560328669846058\n", - "Surface training t=16105, loss=0.02771997544914484\n", - "Surface training t=16106, loss=0.03337400034070015\n", - "Surface training t=16107, loss=0.028065742924809456\n", - "Surface training t=16108, loss=0.020280742086470127\n", - "Surface training t=16109, loss=0.022607408463954926\n", - "Surface training t=16110, loss=0.026034782640635967\n", - "Surface training t=16111, loss=0.022899307310581207\n", - "Surface training t=16112, loss=0.02386600151658058\n", - "Surface training t=16113, loss=0.02963197883218527\n", - "Surface training t=16114, loss=0.025495252572000027\n", - "Surface training t=16115, loss=0.04166559502482414\n", - "Surface training t=16116, loss=0.04950735718011856\n", - "Surface training t=16117, loss=0.03293870948255062\n", - "Surface training t=16118, loss=0.0359977874904871\n", - "Surface training t=16119, loss=0.0328375780954957\n", - "Surface training t=16120, loss=0.027188386768102646\n", - "Surface training t=16121, loss=0.03803757764399052\n", - "Surface training t=16122, loss=0.029870386235415936\n", - "Surface training t=16123, loss=0.024349058978259563\n", - "Surface training t=16124, loss=0.027843660674989223\n", - "Surface training t=16125, loss=0.0295416247099638\n", - "Surface training t=16126, loss=0.030907703563570976\n", - "Surface training t=16127, loss=0.02148399781435728\n", - "Surface training t=16128, loss=0.023663410916924477\n", - "Surface training t=16129, loss=0.029586516320705414\n", - "Surface training t=16130, loss=0.030400780960917473\n", - "Surface training t=16131, loss=0.025986580178141594\n", - "Surface training t=16132, loss=0.041925569996237755\n", - "Surface training t=16133, loss=0.02725112345069647\n", - "Surface training t=16134, loss=0.03369414061307907\n", - "Surface training t=16135, loss=0.0305866077542305\n", - "Surface training t=16136, loss=0.025991201400756836\n", - "Surface training t=16137, loss=0.024930794723331928\n", - "Surface training t=16138, loss=0.02915574051439762\n", - "Surface training t=16139, loss=0.026432378217577934\n", - "Surface training t=16140, loss=0.033471858128905296\n", - "Surface training t=16141, loss=0.03119182586669922\n", - "Surface training t=16142, loss=0.02982445526868105\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=16143, loss=0.024021940305829048\n", - "Surface training t=16144, loss=0.03326742071658373\n", - "Surface training t=16145, loss=0.029899579472839832\n", - "Surface training t=16146, loss=0.029256219044327736\n", - "Surface training t=16147, loss=0.02498300652951002\n", - "Surface training t=16148, loss=0.02536193560808897\n", - "Surface training t=16149, loss=0.025866812095046043\n", - "Surface training t=16150, loss=0.033450646325945854\n", - "Surface training t=16151, loss=0.03291573282331228\n", - "Surface training t=16152, loss=0.033883203752338886\n", - "Surface training t=16153, loss=0.030197005718946457\n", - "Surface training t=16154, loss=0.03303850628435612\n", - "Surface training t=16155, loss=0.038406072184443474\n", - "Surface training t=16156, loss=0.033296141773462296\n", - "Surface training t=16157, loss=0.04295185208320618\n", - "Surface training t=16158, loss=0.04157756827771664\n", - "Surface training t=16159, loss=0.034087625332176685\n", - "Surface training t=16160, loss=0.048594314604997635\n", - "Surface training t=16161, loss=0.037968989461660385\n", - "Surface training t=16162, loss=0.046196091920137405\n", - "Surface training t=16163, loss=0.031422064639627934\n", - "Surface training t=16164, loss=0.05147118307650089\n", - "Surface training t=16165, loss=0.0509542990475893\n", - "Surface training t=16166, loss=0.06941617280244827\n", - "Surface training t=16167, loss=0.04586916044354439\n", - "Surface training t=16168, loss=0.05685681663453579\n", - "Surface training t=16169, loss=0.04645639657974243\n", - "Surface training t=16170, loss=0.04948693513870239\n", - "Surface training t=16171, loss=0.03944068402051926\n", - "Surface training t=16172, loss=0.04915842041373253\n", - "Surface training t=16173, loss=0.043433777987957\n", - "Surface training t=16174, loss=0.037024788558483124\n", - "Surface training t=16175, loss=0.025087697431445122\n", - "Surface training t=16176, loss=0.02863250393420458\n", - "Surface training t=16177, loss=0.02717140782624483\n", - "Surface training t=16178, loss=0.034452859312295914\n", - "Surface training t=16179, loss=0.031388552859425545\n", - "Surface training t=16180, loss=0.027923667803406715\n", - "Surface training t=16181, loss=0.03827837482094765\n", - "Surface training t=16182, loss=0.024654727429151535\n", - "Surface training t=16183, loss=0.02474312763661146\n", - "Surface training t=16184, loss=0.03100588358938694\n", - "Surface training t=16185, loss=0.02916189655661583\n", - "Surface training t=16186, loss=0.032858360558748245\n", - "Surface training t=16187, loss=0.0333445193246007\n", - "Surface training t=16188, loss=0.03241423051804304\n", - "Surface training t=16189, loss=0.03300662338733673\n", - "Surface training t=16190, loss=0.027826488949358463\n", - "Surface training t=16191, loss=0.031926026567816734\n", - "Surface training t=16192, loss=0.031130761839449406\n", - "Surface training t=16193, loss=0.028623849153518677\n", - "Surface training t=16194, loss=0.021459725685417652\n", - "Surface training t=16195, loss=0.024372299201786518\n", - "Surface training t=16196, loss=0.030791189521551132\n", - "Surface training t=16197, loss=0.02742127515375614\n", - "Surface training t=16198, loss=0.028633992187678814\n", - "Surface training t=16199, loss=0.028700856491923332\n", - "Surface training t=16200, loss=0.03153786528855562\n", - "Surface training t=16201, loss=0.026249714195728302\n", - "Surface training t=16202, loss=0.039607252925634384\n", - "Surface training t=16203, loss=0.025426399894058704\n", - "Surface training t=16204, loss=0.025263466872274876\n", - "Surface training t=16205, loss=0.032185016199946404\n", - "Surface training t=16206, loss=0.030556950718164444\n", - "Surface training t=16207, loss=0.024375958368182182\n", - "Surface training t=16208, loss=0.020555729046463966\n", - "Surface training t=16209, loss=0.026593961752951145\n", - "Surface training t=16210, loss=0.029937617480754852\n", - "Surface training t=16211, loss=0.028554954566061497\n", - "Surface training t=16212, loss=0.021209669299423695\n", - "Surface training t=16213, loss=0.02732306346297264\n", - "Surface training t=16214, loss=0.02750734705477953\n", - "Surface training t=16215, loss=0.022616303525865078\n", - "Surface training t=16216, loss=0.02152147237211466\n", - "Surface training t=16217, loss=0.023656263016164303\n", - "Surface training t=16218, loss=0.021837467327713966\n", - "Surface training t=16219, loss=0.01877992507070303\n", - "Surface training t=16220, loss=0.02890403289347887\n", - "Surface training t=16221, loss=0.025018448941409588\n", - "Surface training t=16222, loss=0.021733202040195465\n", - "Surface training t=16223, loss=0.03215754218399525\n", - "Surface training t=16224, loss=0.027107037603855133\n", - "Surface training t=16225, loss=0.037088869139552116\n", - "Surface training t=16226, loss=0.051128579303622246\n", - "Surface training t=16227, loss=0.03647992666810751\n", - "Surface training t=16228, loss=0.03599393926560879\n", - "Surface training t=16229, loss=0.031462738290429115\n", - "Surface training t=16230, loss=0.03405530005693436\n", - "Surface training t=16231, loss=0.03251473046839237\n", - "Surface training t=16232, loss=0.032032933086156845\n", - "Surface training t=16233, loss=0.0290200337767601\n", - "Surface training t=16234, loss=0.02821226231753826\n", - "Surface training t=16235, loss=0.024600641801953316\n", - "Surface training t=16236, loss=0.03264179639518261\n", - "Surface training t=16237, loss=0.024240930564701557\n", - "Surface training t=16238, loss=0.03314676508307457\n", - "Surface training t=16239, loss=0.02809679601341486\n", - "Surface training t=16240, loss=0.02846828941255808\n", - "Surface training t=16241, loss=0.030958084389567375\n", - "Surface training t=16242, loss=0.02668245229870081\n", - "Surface training t=16243, loss=0.03363128378987312\n", - "Surface training t=16244, loss=0.03257635608315468\n", - "Surface training t=16245, loss=0.0322352284565568\n", - "Surface training t=16246, loss=0.033296553418040276\n", - "Surface training t=16247, loss=0.0496938768774271\n", - "Surface training t=16248, loss=0.033117749728262424\n", - "Surface training t=16249, loss=0.03527725860476494\n", - "Surface training t=16250, loss=0.027071748860180378\n", - "Surface training t=16251, loss=0.02533910982310772\n", - "Surface training t=16252, loss=0.023051176220178604\n", - "Surface training t=16253, loss=0.02768153790384531\n", - "Surface training t=16254, loss=0.030108816921710968\n", - "Surface training t=16255, loss=0.02648960892111063\n", - "Surface training t=16256, loss=0.029697073623538017\n", - "Surface training t=16257, loss=0.028977200388908386\n", - "Surface training t=16258, loss=0.026747181080281734\n", - "Surface training t=16259, loss=0.03106937650591135\n", - "Surface training t=16260, loss=0.03159098979085684\n", - "Surface training t=16261, loss=0.0281910989433527\n", - "Surface training t=16262, loss=0.022576906718313694\n", - "Surface training t=16263, loss=0.026606284081935883\n", - "Surface training t=16264, loss=0.021364853717386723\n", - "Surface training t=16265, loss=0.028858800418674946\n", - "Surface training t=16266, loss=0.02797351498156786\n", - "Surface training t=16267, loss=0.03572166711091995\n", - "Surface training t=16268, loss=0.033232953399419785\n", - "Surface training t=16269, loss=0.03367687202990055\n", - "Surface training t=16270, loss=0.030975385569036007\n", - "Surface training t=16271, loss=0.03478212282061577\n", - "Surface training t=16272, loss=0.038020575419068336\n", - "Surface training t=16273, loss=0.03434906993061304\n", - "Surface training t=16274, loss=0.06035969965159893\n", - "Surface training t=16275, loss=0.03481170907616615\n", - "Surface training t=16276, loss=0.03930443711578846\n", - "Surface training t=16277, loss=0.0399505952373147\n", - "Surface training t=16278, loss=0.03412921354174614\n", - "Surface training t=16279, loss=0.03225524444133043\n", - "Surface training t=16280, loss=0.028758938424289227\n", - "Surface training t=16281, loss=0.03098296094685793\n", - "Surface training t=16282, loss=0.03170999325811863\n", - "Surface training t=16283, loss=0.02097463607788086\n", - "Surface training t=16284, loss=0.023277183063328266\n", - "Surface training t=16285, loss=0.03539160639047623\n", - "Surface training t=16286, loss=0.030003413558006287\n", - "Surface training t=16287, loss=0.026159174740314484\n", - "Surface training t=16288, loss=0.03302362188696861\n", - "Surface training t=16289, loss=0.028905004262924194\n", - "Surface training t=16290, loss=0.024563239887356758\n", - "Surface training t=16291, loss=0.03851102292537689\n", - "Surface training t=16292, loss=0.02605109754949808\n", - "Surface training t=16293, loss=0.027053920552134514\n", - "Surface training t=16294, loss=0.028357340022921562\n", - "Surface training t=16295, loss=0.03522411361336708\n", - "Surface training t=16296, loss=0.030817179940640926\n", - "Surface training t=16297, loss=0.042829493060708046\n", - "Surface training t=16298, loss=0.030011242255568504\n", - "Surface training t=16299, loss=0.04391833022236824\n", - "Surface training t=16300, loss=0.029529932886362076\n", - "Surface training t=16301, loss=0.02597809676080942\n", - "Surface training t=16302, loss=0.036336785182356834\n", - "Surface training t=16303, loss=0.024504405446350574\n", - "Surface training t=16304, loss=0.021612543612718582\n", - "Surface training t=16305, loss=0.02212686836719513\n", - "Surface training t=16306, loss=0.018925901502370834\n", - "Surface training t=16307, loss=0.02112884446978569\n", - "Surface training t=16308, loss=0.01744119543582201\n", - "Surface training t=16309, loss=0.024576197378337383\n", - "Surface training t=16310, loss=0.025272206403315067\n", - "Surface training t=16311, loss=0.03153500705957413\n", - "Surface training t=16312, loss=0.03253350034356117\n", - "Surface training t=16313, loss=0.034479920752346516\n", - "Surface training t=16314, loss=0.03059413842856884\n", - "Surface training t=16315, loss=0.030550582334399223\n", - "Surface training t=16316, loss=0.03448196966201067\n", - "Surface training t=16317, loss=0.03355031553655863\n", - "Surface training t=16318, loss=0.029606172814965248\n", - "Surface training t=16319, loss=0.027694009244441986\n", - "Surface training t=16320, loss=0.03217262029647827\n", - "Surface training t=16321, loss=0.02738112211227417\n", - "Surface training t=16322, loss=0.015456508845090866\n", - "Surface training t=16323, loss=0.022221856750547886\n", - "Surface training t=16324, loss=0.020299552008509636\n", - "Surface training t=16325, loss=0.02341039478778839\n", - "Surface training t=16326, loss=0.023495597764849663\n", - "Surface training t=16327, loss=0.03101063333451748\n", - "Surface training t=16328, loss=0.039049070328474045\n", - "Surface training t=16329, loss=0.040386682376265526\n", - "Surface training t=16330, loss=0.03403173200786114\n", - "Surface training t=16331, loss=0.025266694836318493\n", - "Surface training t=16332, loss=0.04168766736984253\n", - "Surface training t=16333, loss=0.02479181718081236\n", - "Surface training t=16334, loss=0.03288749419152737\n", - "Surface training t=16335, loss=0.02688744757324457\n", - "Surface training t=16336, loss=0.028200454078614712\n", - "Surface training t=16337, loss=0.029928063042461872\n", - "Surface training t=16338, loss=0.024505929090082645\n", - "Surface training t=16339, loss=0.03249953221529722\n", - "Surface training t=16340, loss=0.027235706336796284\n", - "Surface training t=16341, loss=0.019878790713846684\n", - "Surface training t=16342, loss=0.023275550454854965\n", - "Surface training t=16343, loss=0.02950507216155529\n", - "Surface training t=16344, loss=0.037332212552428246\n", - "Surface training t=16345, loss=0.030597446486353874\n", - "Surface training t=16346, loss=0.026634210720658302\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=16347, loss=0.03721117973327637\n", - "Surface training t=16348, loss=0.03439240995794535\n", - "Surface training t=16349, loss=0.029073627665638924\n", - "Surface training t=16350, loss=0.03849571384489536\n", - "Surface training t=16351, loss=0.03687123954296112\n", - "Surface training t=16352, loss=0.05170613154768944\n", - "Surface training t=16353, loss=0.0333812702447176\n", - "Surface training t=16354, loss=0.029347753152251244\n", - "Surface training t=16355, loss=0.030077188275754452\n", - "Surface training t=16356, loss=0.023744781501591206\n", - "Surface training t=16357, loss=0.022419105283915997\n", - "Surface training t=16358, loss=0.021360909566283226\n", - "Surface training t=16359, loss=0.021223225630819798\n", - "Surface training t=16360, loss=0.023297586478292942\n", - "Surface training t=16361, loss=0.024020826444029808\n", - "Surface training t=16362, loss=0.022629148326814175\n", - "Surface training t=16363, loss=0.018031935207545757\n", - "Surface training t=16364, loss=0.03287927061319351\n", - "Surface training t=16365, loss=0.050177060067653656\n", - "Surface training t=16366, loss=0.04376015719026327\n", - "Surface training t=16367, loss=0.03503922559320927\n", - "Surface training t=16368, loss=0.030214587226510048\n", - "Surface training t=16369, loss=0.03384470194578171\n", - "Surface training t=16370, loss=0.036637745797634125\n", - "Surface training t=16371, loss=0.038258129730820656\n", - "Surface training t=16372, loss=0.036613017320632935\n", - "Surface training t=16373, loss=0.024837682023644447\n", - "Surface training t=16374, loss=0.020841455552726984\n", - "Surface training t=16375, loss=0.02388780191540718\n", - "Surface training t=16376, loss=0.026244078762829304\n", - "Surface training t=16377, loss=0.03405486326664686\n", - "Surface training t=16378, loss=0.042210400104522705\n", - "Surface training t=16379, loss=0.042180100455880165\n", - "Surface training t=16380, loss=0.04044973663985729\n", - "Surface training t=16381, loss=0.038200486451387405\n", - "Surface training t=16382, loss=0.03775922209024429\n", - "Surface training t=16383, loss=0.04090875759720802\n", - "Surface training t=16384, loss=0.04075113497674465\n", - "Surface training t=16385, loss=0.044157253578305244\n", - "Surface training t=16386, loss=0.04010230302810669\n", - "Surface training t=16387, loss=0.030727820470929146\n", - "Surface training t=16388, loss=0.025078821927309036\n", - "Surface training t=16389, loss=0.03890167735517025\n", - "Surface training t=16390, loss=0.05731850117444992\n", - "Surface training t=16391, loss=0.047247941605746746\n", - "Surface training t=16392, loss=0.062213629484176636\n", - "Surface training t=16393, loss=0.07997951284050941\n", - "Surface training t=16394, loss=0.04977695271372795\n", - "Surface training t=16395, loss=0.06608748622238636\n", - "Surface training t=16396, loss=0.045349977910518646\n", - "Surface training t=16397, loss=0.04622978717088699\n", - "Surface training t=16398, loss=0.038768148981034756\n", - "Surface training t=16399, loss=0.03213715832680464\n", - "Surface training t=16400, loss=0.032948799431324005\n", - "Surface training t=16401, loss=0.03840129263699055\n", - "Surface training t=16402, loss=0.030043775215744972\n", - "Surface training t=16403, loss=0.029009776189923286\n", - "Surface training t=16404, loss=0.028352311812341213\n", - "Surface training t=16405, loss=0.02766704186797142\n", - "Surface training t=16406, loss=0.023214426822960377\n", - "Surface training t=16407, loss=0.026928922161459923\n", - "Surface training t=16408, loss=0.022875133901834488\n", - "Surface training t=16409, loss=0.024067655205726624\n", - "Surface training t=16410, loss=0.030926394276320934\n", - "Surface training t=16411, loss=0.041408419609069824\n", - "Surface training t=16412, loss=0.028223571367561817\n", - "Surface training t=16413, loss=0.03595435991883278\n", - "Surface training t=16414, loss=0.021946237422525883\n", - "Surface training t=16415, loss=0.03397160954773426\n", - "Surface training t=16416, loss=0.0427042692899704\n", - "Surface training t=16417, loss=0.03216221369802952\n", - "Surface training t=16418, loss=0.03006597887724638\n", - "Surface training t=16419, loss=0.028279822319746017\n", - "Surface training t=16420, loss=0.02932578045874834\n", - "Surface training t=16421, loss=0.027184507809579372\n", - "Surface training t=16422, loss=0.029850424267351627\n", - "Surface training t=16423, loss=0.0277108671143651\n", - "Surface training t=16424, loss=0.03382356744259596\n", - "Surface training t=16425, loss=0.023730918299406767\n", - "Surface training t=16426, loss=0.02806045487523079\n", - "Surface training t=16427, loss=0.03226765617728233\n", - "Surface training t=16428, loss=0.021525993011891842\n", - "Surface training t=16429, loss=0.020934145897626877\n", - "Surface training t=16430, loss=0.022592995315790176\n", - "Surface training t=16431, loss=0.023054025135934353\n", - "Surface training t=16432, loss=0.021900855004787445\n", - "Surface training t=16433, loss=0.02234954573214054\n", - "Surface training t=16434, loss=0.02668941020965576\n", - "Surface training t=16435, loss=0.02031866181641817\n", - "Surface training t=16436, loss=0.02324147243052721\n", - "Surface training t=16437, loss=0.025440492667257786\n", - "Surface training t=16438, loss=0.02287794928997755\n", - "Surface training t=16439, loss=0.01964613702148199\n", - "Surface training t=16440, loss=0.017295848112553358\n", - "Surface training t=16441, loss=0.02060666773468256\n", - "Surface training t=16442, loss=0.018120872788131237\n", - "Surface training t=16443, loss=0.02368335612118244\n", - "Surface training t=16444, loss=0.03250245377421379\n", - "Surface training t=16445, loss=0.03165388014167547\n", - "Surface training t=16446, loss=0.028542708605527878\n", - "Surface training t=16447, loss=0.029336141422390938\n", - "Surface training t=16448, loss=0.02406575996428728\n", - "Surface training t=16449, loss=0.02804518584161997\n", - "Surface training t=16450, loss=0.03191572614014149\n", - "Surface training t=16451, loss=0.02552442066371441\n", - "Surface training t=16452, loss=0.019807720091193914\n", - "Surface training t=16453, loss=0.022682610899209976\n", - "Surface training t=16454, loss=0.023481158539652824\n", - "Surface training t=16455, loss=0.03151437547057867\n", - "Surface training t=16456, loss=0.021042809821665287\n", - "Surface training t=16457, loss=0.028220168314874172\n", - "Surface training t=16458, loss=0.02094428613781929\n", - "Surface training t=16459, loss=0.02247368823736906\n", - "Surface training t=16460, loss=0.029968634247779846\n", - "Surface training t=16461, loss=0.048866672441363335\n", - "Surface training t=16462, loss=0.038103317841887474\n", - "Surface training t=16463, loss=0.029998132959008217\n", - "Surface training t=16464, loss=0.027993088588118553\n", - "Surface training t=16465, loss=0.03518468514084816\n", - "Surface training t=16466, loss=0.0353864599019289\n", - "Surface training t=16467, loss=0.02788715809583664\n", - "Surface training t=16468, loss=0.03435869421809912\n", - "Surface training t=16469, loss=0.041823145002126694\n", - "Surface training t=16470, loss=0.061918025836348534\n", - "Surface training t=16471, loss=0.04572908952832222\n", - "Surface training t=16472, loss=0.049787646159529686\n", - "Surface training t=16473, loss=0.06223512068390846\n", - "Surface training t=16474, loss=0.04613376222550869\n", - "Surface training t=16475, loss=0.06912872567772865\n", - "Surface training t=16476, loss=0.05595632828772068\n", - "Surface training t=16477, loss=0.061487575992941856\n", - "Surface training t=16478, loss=0.05306325852870941\n", - "Surface training t=16479, loss=0.04809614457190037\n", - "Surface training t=16480, loss=0.02677128091454506\n", - "Surface training t=16481, loss=0.0364770982414484\n", - "Surface training t=16482, loss=0.04451717250049114\n", - "Surface training t=16483, loss=0.030751998536288738\n", - "Surface training t=16484, loss=0.04623355157673359\n", - "Surface training t=16485, loss=0.03093886561691761\n", - "Surface training t=16486, loss=0.0563829243183136\n", - "Surface training t=16487, loss=0.03248154558241367\n", - "Surface training t=16488, loss=0.030358964577317238\n", - "Surface training t=16489, loss=0.028988574631512165\n", - "Surface training t=16490, loss=0.030781203880906105\n", - "Surface training t=16491, loss=0.0360313355922699\n", - "Surface training t=16492, loss=0.030509041622281075\n", - "Surface training t=16493, loss=0.02445027232170105\n", - "Surface training t=16494, loss=0.03302035387605429\n", - "Surface training t=16495, loss=0.037453994154930115\n", - "Surface training t=16496, loss=0.046545276418328285\n", - "Surface training t=16497, loss=0.035446980968117714\n", - "Surface training t=16498, loss=0.03336193412542343\n", - "Surface training t=16499, loss=0.031077515333890915\n", - "Surface training t=16500, loss=0.03229360096156597\n", - "Surface training t=16501, loss=0.038236310705542564\n", - "Surface training t=16502, loss=0.04931769520044327\n", - "Surface training t=16503, loss=0.04025224409997463\n", - "Surface training t=16504, loss=0.037925418466329575\n", - "Surface training t=16505, loss=0.05289265140891075\n", - "Surface training t=16506, loss=0.04078955017030239\n", - "Surface training t=16507, loss=0.04305725917220116\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=16508, loss=0.037032321095466614\n", - "Surface training t=16509, loss=0.03805629163980484\n", - "Surface training t=16510, loss=0.03373712953180075\n", - "Surface training t=16511, loss=0.055404506623744965\n", - "Surface training t=16512, loss=0.036322965286672115\n", - "Surface training t=16513, loss=0.041629732586443424\n", - "Surface training t=16514, loss=0.04360599257051945\n", - "Surface training t=16515, loss=0.056493811309337616\n", - "Surface training t=16516, loss=0.03889295645058155\n", - "Surface training t=16517, loss=0.03167607355862856\n", - "Surface training t=16518, loss=0.032444680109620094\n", - "Surface training t=16519, loss=0.03494578693062067\n", - "Surface training t=16520, loss=0.037886640056967735\n", - "Surface training t=16521, loss=0.04128059931099415\n", - "Surface training t=16522, loss=0.04085224308073521\n", - "Surface training t=16523, loss=0.04518573172390461\n", - "Surface training t=16524, loss=0.039413370192050934\n", - "Surface training t=16525, loss=0.0361388148739934\n", - "Surface training t=16526, loss=0.04334353283047676\n", - "Surface training t=16527, loss=0.05251511000096798\n", - "Surface training t=16528, loss=0.033781224861741066\n", - "Surface training t=16529, loss=0.032948995009064674\n", - "Surface training t=16530, loss=0.029304049909114838\n", - "Surface training t=16531, loss=0.02695510722696781\n", - "Surface training t=16532, loss=0.03228787146508694\n", - "Surface training t=16533, loss=0.028111422434449196\n", - "Surface training t=16534, loss=0.028928584419190884\n", - "Surface training t=16535, loss=0.03911754861474037\n", - "Surface training t=16536, loss=0.049441464245319366\n", - "Surface training t=16537, loss=0.054277513176202774\n", - "Surface training t=16538, loss=0.04448366537690163\n", - "Surface training t=16539, loss=0.07158500328660011\n", - "Surface training t=16540, loss=0.05062966048717499\n", - "Surface training t=16541, loss=0.05088074877858162\n", - "Surface training t=16542, loss=0.03395059984177351\n", - "Surface training t=16543, loss=0.04140484519302845\n", - "Surface training t=16544, loss=0.02981684636324644\n", - "Surface training t=16545, loss=0.026672491803765297\n", - "Surface training t=16546, loss=0.027241414412856102\n", - "Surface training t=16547, loss=0.027548267506062984\n", - "Surface training t=16548, loss=0.021140468306839466\n", - "Surface training t=16549, loss=0.03369438275694847\n", - "Surface training t=16550, loss=0.029743660241365433\n", - "Surface training t=16551, loss=0.025133632123470306\n", - "Surface training t=16552, loss=0.024097809568047523\n", - "Surface training t=16553, loss=0.02586285676807165\n", - "Surface training t=16554, loss=0.022358118556439877\n", - "Surface training t=16555, loss=0.020858864299952984\n", - "Surface training t=16556, loss=0.021710840053856373\n", - "Surface training t=16557, loss=0.022948870435357094\n", - "Surface training t=16558, loss=0.029229414649307728\n", - "Surface training t=16559, loss=0.0242338040843606\n", - "Surface training t=16560, loss=0.023400388658046722\n", - "Surface training t=16561, loss=0.016742076724767685\n", - "Surface training t=16562, loss=0.020880959928035736\n", - "Surface training t=16563, loss=0.02106680255383253\n", - "Surface training t=16564, loss=0.019803408533334732\n", - "Surface training t=16565, loss=0.03331035375595093\n", - "Surface training t=16566, loss=0.03375473991036415\n", - "Surface training t=16567, loss=0.02486183773726225\n", - "Surface training t=16568, loss=0.023959770798683167\n", - "Surface training t=16569, loss=0.022297963500022888\n", - "Surface training t=16570, loss=0.02561325766146183\n", - "Surface training t=16571, loss=0.01820218190550804\n", - "Surface training t=16572, loss=0.022442461922764778\n", - "Surface training t=16573, loss=0.02303839474916458\n", - "Surface training t=16574, loss=0.021598055958747864\n", - "Surface training t=16575, loss=0.025767543353140354\n", - "Surface training t=16576, loss=0.01840376202017069\n", - "Surface training t=16577, loss=0.021949397400021553\n", - "Surface training t=16578, loss=0.02248771581798792\n", - "Surface training t=16579, loss=0.02753440197557211\n", - "Surface training t=16580, loss=0.020403752103447914\n", - "Surface training t=16581, loss=0.020535767544060946\n", - "Surface training t=16582, loss=0.022048368118703365\n", - "Surface training t=16583, loss=0.022744636982679367\n", - "Surface training t=16584, loss=0.02217222796753049\n", - "Surface training t=16585, loss=0.02835727296769619\n", - "Surface training t=16586, loss=0.022499914281070232\n", - "Surface training t=16587, loss=0.028327864594757557\n", - "Surface training t=16588, loss=0.030178780667483807\n", - "Surface training t=16589, loss=0.026355349458754063\n", - "Surface training t=16590, loss=0.024827025830745697\n", - "Surface training t=16591, loss=0.022672835737466812\n", - "Surface training t=16592, loss=0.020071073435246944\n", - "Surface training t=16593, loss=0.02945668902248144\n", - "Surface training t=16594, loss=0.031219071708619595\n", - "Surface training t=16595, loss=0.02567861881107092\n", - "Surface training t=16596, loss=0.028372477740049362\n", - "Surface training t=16597, loss=0.028391565196216106\n", - "Surface training t=16598, loss=0.02867375873029232\n", - "Surface training t=16599, loss=0.029631061479449272\n", - "Surface training t=16600, loss=0.0427066795527935\n", - "Surface training t=16601, loss=0.027454177848994732\n", - "Surface training t=16602, loss=0.035921018570661545\n", - "Surface training t=16603, loss=0.041703877970576286\n", - "Surface training t=16604, loss=0.028644603677093983\n", - "Surface training t=16605, loss=0.037413718178868294\n", - "Surface training t=16606, loss=0.028133021667599678\n", - "Surface training t=16607, loss=0.02676427084952593\n", - "Surface training t=16608, loss=0.03639199957251549\n", - "Surface training t=16609, loss=0.024807466194033623\n", - "Surface training t=16610, loss=0.023810449987649918\n", - "Surface training t=16611, loss=0.024295253679156303\n", - "Surface training t=16612, loss=0.03174920752644539\n", - "Surface training t=16613, loss=0.0330655612051487\n", - "Surface training t=16614, loss=0.029533461667597294\n", - "Surface training t=16615, loss=0.0207185884937644\n", - "Surface training t=16616, loss=0.02491953782737255\n", - "Surface training t=16617, loss=0.04278707690536976\n", - "Surface training t=16618, loss=0.041539013385772705\n", - "Surface training t=16619, loss=0.02700225729495287\n", - "Surface training t=16620, loss=0.03217608667910099\n", - "Surface training t=16621, loss=0.03305695950984955\n", - "Surface training t=16622, loss=0.03192208334803581\n", - "Surface training t=16623, loss=0.029609423130750656\n", - "Surface training t=16624, loss=0.03240860719233751\n", - "Surface training t=16625, loss=0.03937637433409691\n", - "Surface training t=16626, loss=0.03243590146303177\n", - "Surface training t=16627, loss=0.03766484372317791\n", - "Surface training t=16628, loss=0.03163471817970276\n", - "Surface training t=16629, loss=0.028641541488468647\n", - "Surface training t=16630, loss=0.04043937101960182\n", - "Surface training t=16631, loss=0.03024353925138712\n", - "Surface training t=16632, loss=0.0256710397079587\n", - "Surface training t=16633, loss=0.04342948831617832\n", - "Surface training t=16634, loss=0.03773460537195206\n", - "Surface training t=16635, loss=0.04488668218255043\n", - "Surface training t=16636, loss=0.045237625017762184\n", - "Surface training t=16637, loss=0.06214573234319687\n", - "Surface training t=16638, loss=0.03891785629093647\n", - "Surface training t=16639, loss=0.043078821152448654\n", - "Surface training t=16640, loss=0.029560789465904236\n", - "Surface training t=16641, loss=0.029187487438321114\n", - "Surface training t=16642, loss=0.03440050967037678\n", - "Surface training t=16643, loss=0.03416547458618879\n", - "Surface training t=16644, loss=0.03719628695398569\n", - "Surface training t=16645, loss=0.027257022447884083\n", - "Surface training t=16646, loss=0.03267100639641285\n", - "Surface training t=16647, loss=0.03050166927278042\n", - "Surface training t=16648, loss=0.034295327961444855\n", - "Surface training t=16649, loss=0.027381245978176594\n", - "Surface training t=16650, loss=0.026736771687865257\n", - "Surface training t=16651, loss=0.03162945993244648\n", - "Surface training t=16652, loss=0.03241174574941397\n", - "Surface training t=16653, loss=0.025878234766423702\n", - "Surface training t=16654, loss=0.023521721363067627\n", - "Surface training t=16655, loss=0.02514189761132002\n", - "Surface training t=16656, loss=0.03557890560477972\n", - "Surface training t=16657, loss=0.035758545622229576\n", - "Surface training t=16658, loss=0.03264770843088627\n", - "Surface training t=16659, loss=0.027767078951001167\n", - "Surface training t=16660, loss=0.026767365634441376\n", - "Surface training t=16661, loss=0.032741015776991844\n", - "Surface training t=16662, loss=0.057100383564829826\n", - "Surface training t=16663, loss=0.03761504590511322\n", - "Surface training t=16664, loss=0.042303575202822685\n", - "Surface training t=16665, loss=0.04176541790366173\n", - "Surface training t=16666, loss=0.05824187025427818\n", - "Surface training t=16667, loss=0.04320630803704262\n", - "Surface training t=16668, loss=0.053082121536135674\n", - "Surface training t=16669, loss=0.03969983942806721\n", - "Surface training t=16670, loss=0.039280456490814686\n", - "Surface training t=16671, loss=0.0382473636418581\n", - "Surface training t=16672, loss=0.04305766709148884\n", - "Surface training t=16673, loss=0.06253624148666859\n", - "Surface training t=16674, loss=0.044677646830677986\n", - "Surface training t=16675, loss=0.048534358851611614\n", - "Surface training t=16676, loss=0.03875587694346905\n", - "Surface training t=16677, loss=0.026761743240058422\n", - "Surface training t=16678, loss=0.030175630003213882\n", - "Surface training t=16679, loss=0.03185168467462063\n", - "Surface training t=16680, loss=0.040620118379592896\n", - "Surface training t=16681, loss=0.029322911985218525\n", - "Surface training t=16682, loss=0.03638938441872597\n", - "Surface training t=16683, loss=0.03124036081135273\n", - "Surface training t=16684, loss=0.032201423309743404\n", - "Surface training t=16685, loss=0.036827338859438896\n", - "Surface training t=16686, loss=0.03884384036064148\n", - "Surface training t=16687, loss=0.04929085448384285\n", - "Surface training t=16688, loss=0.036384815350174904\n", - "Surface training t=16689, loss=0.04012669809162617\n", - "Surface training t=16690, loss=0.03462773934006691\n", - "Surface training t=16691, loss=0.032177750021219254\n", - "Surface training t=16692, loss=0.023076494224369526\n", - "Surface training t=16693, loss=0.027329332195222378\n", - "Surface training t=16694, loss=0.027256103232502937\n", - "Surface training t=16695, loss=0.027924024499952793\n", - "Surface training t=16696, loss=0.02737865224480629\n", - "Surface training t=16697, loss=0.03069761861115694\n", - "Surface training t=16698, loss=0.024988187476992607\n", - "Surface training t=16699, loss=0.027063393965363503\n", - "Surface training t=16700, loss=0.026278101839125156\n", - "Surface training t=16701, loss=0.03063024766743183\n", - "Surface training t=16702, loss=0.026537321507930756\n", - "Surface training t=16703, loss=0.028870228677988052\n", - "Surface training t=16704, loss=0.025138321332633495\n", - "Surface training t=16705, loss=0.0311528192833066\n", - "Surface training t=16706, loss=0.02968204114586115\n", - "Surface training t=16707, loss=0.023769154213368893\n", - "Surface training t=16708, loss=0.02315420377999544\n", - "Surface training t=16709, loss=0.03048010915517807\n", - "Surface training t=16710, loss=0.028702554292976856\n", - "Surface training t=16711, loss=0.03333977051079273\n", - "Surface training t=16712, loss=0.03612964414060116\n", - "Surface training t=16713, loss=0.03689524345099926\n", - "Surface training t=16714, loss=0.03791424445807934\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=16715, loss=0.040357569232583046\n", - "Surface training t=16716, loss=0.03823794424533844\n", - "Surface training t=16717, loss=0.03182263486087322\n", - "Surface training t=16718, loss=0.0359104173257947\n", - "Surface training t=16719, loss=0.03574240207672119\n", - "Surface training t=16720, loss=0.04092278331518173\n", - "Surface training t=16721, loss=0.039333514869213104\n", - "Surface training t=16722, loss=0.030870873481035233\n", - "Surface training t=16723, loss=0.035003986209630966\n", - "Surface training t=16724, loss=0.03679851442575455\n", - "Surface training t=16725, loss=0.03251748904585838\n", - "Surface training t=16726, loss=0.029102702625095844\n", - "Surface training t=16727, loss=0.04497768171131611\n", - "Surface training t=16728, loss=0.04394981078803539\n", - "Surface training t=16729, loss=0.038782721385359764\n", - "Surface training t=16730, loss=0.043640635907649994\n", - "Surface training t=16731, loss=0.048251571133732796\n", - "Surface training t=16732, loss=0.0379223357886076\n", - "Surface training t=16733, loss=0.03325304202735424\n", - "Surface training t=16734, loss=0.03453715518116951\n", - "Surface training t=16735, loss=0.02702760510146618\n", - "Surface training t=16736, loss=0.02160272840410471\n", - "Surface training t=16737, loss=0.03627642057836056\n", - "Surface training t=16738, loss=0.035283831879496574\n", - "Surface training t=16739, loss=0.024826199747622013\n", - "Surface training t=16740, loss=0.0312634501606226\n", - "Surface training t=16741, loss=0.02630033530294895\n", - "Surface training t=16742, loss=0.024906408041715622\n", - "Surface training t=16743, loss=0.03785277344286442\n", - "Surface training t=16744, loss=0.03531294036656618\n", - "Surface training t=16745, loss=0.041205400601029396\n", - "Surface training t=16746, loss=0.04126616008579731\n", - "Surface training t=16747, loss=0.04851755127310753\n", - "Surface training t=16748, loss=0.04314504191279411\n", - "Surface training t=16749, loss=0.04352403245866299\n", - "Surface training t=16750, loss=0.023472920060157776\n", - "Surface training t=16751, loss=0.0316363163292408\n", - "Surface training t=16752, loss=0.02933089155703783\n", - "Surface training t=16753, loss=0.026246478781104088\n", - "Surface training t=16754, loss=0.03223979473114014\n", - "Surface training t=16755, loss=0.03328578267246485\n", - "Surface training t=16756, loss=0.02121552173048258\n", - "Surface training t=16757, loss=0.01996680162847042\n", - "Surface training t=16758, loss=0.019918072037398815\n", - "Surface training t=16759, loss=0.024510754272341728\n", - "Surface training t=16760, loss=0.021229859441518784\n", - "Surface training t=16761, loss=0.019364195875823498\n", - "Surface training t=16762, loss=0.01920162420719862\n", - "Surface training t=16763, loss=0.023452493362128735\n", - "Surface training t=16764, loss=0.028391826897859573\n", - "Surface training t=16765, loss=0.026570497080683708\n", - "Surface training t=16766, loss=0.029278071597218513\n", - "Surface training t=16767, loss=0.026736498810350895\n", - "Surface training t=16768, loss=0.025273107923567295\n", - "Surface training t=16769, loss=0.028804251924157143\n", - "Surface training t=16770, loss=0.02195405215024948\n", - "Surface training t=16771, loss=0.028176596388220787\n", - "Surface training t=16772, loss=0.023668533191084862\n", - "Surface training t=16773, loss=0.02412494830787182\n", - "Surface training t=16774, loss=0.035480461083352566\n", - "Surface training t=16775, loss=0.026274518109858036\n", - "Surface training t=16776, loss=0.029104312881827354\n", - "Surface training t=16777, loss=0.0382551085203886\n", - "Surface training t=16778, loss=0.026708736084401608\n", - "Surface training t=16779, loss=0.032131507992744446\n", - "Surface training t=16780, loss=0.04764382541179657\n", - "Surface training t=16781, loss=0.028634214773774147\n", - "Surface training t=16782, loss=0.04147365130484104\n", - "Surface training t=16783, loss=0.031087848357856274\n", - "Surface training t=16784, loss=0.03618050739169121\n", - "Surface training t=16785, loss=0.04140852019190788\n", - "Surface training t=16786, loss=0.03287195134907961\n", - "Surface training t=16787, loss=0.041175175458192825\n", - "Surface training t=16788, loss=0.03690348658710718\n", - "Surface training t=16789, loss=0.03968711756169796\n", - "Surface training t=16790, loss=0.04596997611224651\n", - "Surface training t=16791, loss=0.03240921162068844\n", - "Surface training t=16792, loss=0.05089152790606022\n", - "Surface training t=16793, loss=0.03552028816193342\n", - "Surface training t=16794, loss=0.030814683996140957\n", - "Surface training t=16795, loss=0.03150124754756689\n", - "Surface training t=16796, loss=0.026647505350410938\n", - "Surface training t=16797, loss=0.02723396196961403\n", - "Surface training t=16798, loss=0.03277401439845562\n", - "Surface training t=16799, loss=0.0242389515042305\n", - "Surface training t=16800, loss=0.024382933974266052\n", - "Surface training t=16801, loss=0.025620303116738796\n", - "Surface training t=16802, loss=0.02659190073609352\n", - "Surface training t=16803, loss=0.025327746756374836\n", - "Surface training t=16804, loss=0.02846681885421276\n", - "Surface training t=16805, loss=0.039652179926633835\n", - "Surface training t=16806, loss=0.025133997201919556\n", - "Surface training t=16807, loss=0.03278477303683758\n", - "Surface training t=16808, loss=0.02695626299828291\n", - "Surface training t=16809, loss=0.02473453152924776\n", - "Surface training t=16810, loss=0.027298825792968273\n", - "Surface training t=16811, loss=0.0248075183480978\n", - "Surface training t=16812, loss=0.020786000415682793\n", - "Surface training t=16813, loss=0.02733856812119484\n", - "Surface training t=16814, loss=0.025059704668819904\n", - "Surface training t=16815, loss=0.017826302908360958\n", - "Surface training t=16816, loss=0.018734272569417953\n", - "Surface training t=16817, loss=0.024889998137950897\n", - "Surface training t=16818, loss=0.0204814444296062\n", - "Surface training t=16819, loss=0.031125403009355068\n", - "Surface training t=16820, loss=0.024745911359786987\n", - "Surface training t=16821, loss=0.026746200397610664\n", - "Surface training t=16822, loss=0.029236377216875553\n", - "Surface training t=16823, loss=0.035873886197805405\n", - "Surface training t=16824, loss=0.030401872005313635\n", - "Surface training t=16825, loss=0.03262939676642418\n", - "Surface training t=16826, loss=0.036960527300834656\n", - "Surface training t=16827, loss=0.02314073219895363\n", - "Surface training t=16828, loss=0.03297483175992966\n", - "Surface training t=16829, loss=0.02416976075619459\n", - "Surface training t=16830, loss=0.03013236355036497\n", - "Surface training t=16831, loss=0.027167698368430138\n", - "Surface training t=16832, loss=0.023026066832244396\n", - "Surface training t=16833, loss=0.040956635028123856\n", - "Surface training t=16834, loss=0.02695001382380724\n", - "Surface training t=16835, loss=0.028173294849693775\n", - "Surface training t=16836, loss=0.024134954437613487\n", - "Surface training t=16837, loss=0.0219071377068758\n", - "Surface training t=16838, loss=0.02071821689605713\n", - "Surface training t=16839, loss=0.023844941519200802\n", - "Surface training t=16840, loss=0.031038555316627026\n", - "Surface training t=16841, loss=0.031003624200820923\n", - "Surface training t=16842, loss=0.030196494422852993\n", - "Surface training t=16843, loss=0.03235499747097492\n", - "Surface training t=16844, loss=0.03155623376369476\n", - "Surface training t=16845, loss=0.025444489903748035\n", - "Surface training t=16846, loss=0.022402029484510422\n", - "Surface training t=16847, loss=0.024307530373334885\n", - "Surface training t=16848, loss=0.03367782384157181\n", - "Surface training t=16849, loss=0.03172742668539286\n", - "Surface training t=16850, loss=0.024705804884433746\n", - "Surface training t=16851, loss=0.029965488240122795\n", - "Surface training t=16852, loss=0.024197343736886978\n", - "Surface training t=16853, loss=0.0326794758439064\n", - "Surface training t=16854, loss=0.03335465397685766\n", - "Surface training t=16855, loss=0.03530846443027258\n", - "Surface training t=16856, loss=0.03593330644071102\n", - "Surface training t=16857, loss=0.03930609114468098\n", - "Surface training t=16858, loss=0.025262516923248768\n", - "Surface training t=16859, loss=0.02131655625998974\n", - "Surface training t=16860, loss=0.024540637619793415\n", - "Surface training t=16861, loss=0.03537771292030811\n", - "Surface training t=16862, loss=0.025287600234150887\n", - "Surface training t=16863, loss=0.030342884361743927\n", - "Surface training t=16864, loss=0.0366698345169425\n", - "Surface training t=16865, loss=0.02850495558232069\n", - "Surface training t=16866, loss=0.023219910450279713\n", - "Surface training t=16867, loss=0.02803113590925932\n", - "Surface training t=16868, loss=0.024713712744414806\n", - "Surface training t=16869, loss=0.021551421843469143\n", - "Surface training t=16870, loss=0.020646817050874233\n", - "Surface training t=16871, loss=0.02118243370205164\n", - "Surface training t=16872, loss=0.026750548742711544\n", - "Surface training t=16873, loss=0.01979443058371544\n", - "Surface training t=16874, loss=0.015954388305544853\n", - "Surface training t=16875, loss=0.022792035713791847\n", - "Surface training t=16876, loss=0.027387420646846294\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=16877, loss=0.02765876892954111\n", - "Surface training t=16878, loss=0.024323039688169956\n", - "Surface training t=16879, loss=0.035161200910806656\n", - "Surface training t=16880, loss=0.036679744720458984\n", - "Surface training t=16881, loss=0.03330775536596775\n", - "Surface training t=16882, loss=0.034854414872825146\n", - "Surface training t=16883, loss=0.033413128927350044\n", - "Surface training t=16884, loss=0.028407078236341476\n", - "Surface training t=16885, loss=0.026461812667548656\n", - "Surface training t=16886, loss=0.029387153685092926\n", - "Surface training t=16887, loss=0.030324376188218594\n", - "Surface training t=16888, loss=0.026035704649984837\n", - "Surface training t=16889, loss=0.03825869970023632\n", - "Surface training t=16890, loss=0.03977174684405327\n", - "Surface training t=16891, loss=0.035825615748763084\n", - "Surface training t=16892, loss=0.04015125706791878\n", - "Surface training t=16893, loss=0.044749392196536064\n", - "Surface training t=16894, loss=0.03786090202629566\n", - "Surface training t=16895, loss=0.03539412468671799\n", - "Surface training t=16896, loss=0.024622438475489616\n", - "Surface training t=16897, loss=0.026571856811642647\n", - "Surface training t=16898, loss=0.03409089706838131\n", - "Surface training t=16899, loss=0.030315518379211426\n", - "Surface training t=16900, loss=0.034107401967048645\n", - "Surface training t=16901, loss=0.030801215209066868\n", - "Surface training t=16902, loss=0.0275934636592865\n", - "Surface training t=16903, loss=0.0332571379840374\n", - "Surface training t=16904, loss=0.048487547785043716\n", - "Surface training t=16905, loss=0.03363138623535633\n", - "Surface training t=16906, loss=0.049381762742996216\n", - "Surface training t=16907, loss=0.061726298183202744\n", - "Surface training t=16908, loss=0.04590521939098835\n", - "Surface training t=16909, loss=0.039742592722177505\n", - "Surface training t=16910, loss=0.03553416207432747\n", - "Surface training t=16911, loss=0.04279514402151108\n", - "Surface training t=16912, loss=0.042421722784638405\n", - "Surface training t=16913, loss=0.028827964328229427\n", - "Surface training t=16914, loss=0.03292322903871536\n", - "Surface training t=16915, loss=0.02182009071111679\n", - "Surface training t=16916, loss=0.0203335490077734\n", - "Surface training t=16917, loss=0.018471458926796913\n", - "Surface training t=16918, loss=0.02445415034890175\n", - "Surface training t=16919, loss=0.025524886325001717\n", - "Surface training t=16920, loss=0.028684048913419247\n", - "Surface training t=16921, loss=0.04056201130151749\n", - "Surface training t=16922, loss=0.03837187960743904\n", - "Surface training t=16923, loss=0.028559932485222816\n", - "Surface training t=16924, loss=0.030627604573965073\n", - "Surface training t=16925, loss=0.03283332567662001\n", - "Surface training t=16926, loss=0.03126310836523771\n", - "Surface training t=16927, loss=0.029697089456021786\n", - "Surface training t=16928, loss=0.03066536132246256\n", - "Surface training t=16929, loss=0.03421420231461525\n", - "Surface training t=16930, loss=0.02732130791991949\n", - "Surface training t=16931, loss=0.03153032809495926\n", - "Surface training t=16932, loss=0.03325449861586094\n", - "Surface training t=16933, loss=0.028117732144892216\n", - "Surface training t=16934, loss=0.045031238347291946\n", - "Surface training t=16935, loss=0.03965388610959053\n", - "Surface training t=16936, loss=0.04436380788683891\n", - "Surface training t=16937, loss=0.042777493596076965\n", - "Surface training t=16938, loss=0.06971592828631401\n", - "Surface training t=16939, loss=0.044327542185783386\n", - "Surface training t=16940, loss=0.07367116585373878\n", - "Surface training t=16941, loss=0.050083838403224945\n", - "Surface training t=16942, loss=0.043900664895772934\n", - "Surface training t=16943, loss=0.03215455636382103\n", - "Surface training t=16944, loss=0.029967631213366985\n", - "Surface training t=16945, loss=0.02525426633656025\n", - "Surface training t=16946, loss=0.029536670073866844\n", - "Surface training t=16947, loss=0.039256131276488304\n", - "Surface training t=16948, loss=0.033307408913969994\n", - "Surface training t=16949, loss=0.03125878516584635\n", - "Surface training t=16950, loss=0.027914763428270817\n", - "Surface training t=16951, loss=0.02672833949327469\n", - "Surface training t=16952, loss=0.03037348762154579\n", - "Surface training t=16953, loss=0.027397863566875458\n", - "Surface training t=16954, loss=0.021373098716139793\n", - "Surface training t=16955, loss=0.03590207453817129\n", - "Surface training t=16956, loss=0.04812307469546795\n", - "Surface training t=16957, loss=0.04762020520865917\n", - "Surface training t=16958, loss=0.042322827503085136\n", - "Surface training t=16959, loss=0.03975861705839634\n", - "Surface training t=16960, loss=0.06385424360632896\n", - "Surface training t=16961, loss=0.04186368174850941\n", - "Surface training t=16962, loss=0.035982752218842506\n", - "Surface training t=16963, loss=0.02696843631565571\n", - "Surface training t=16964, loss=0.03503398783504963\n", - "Surface training t=16965, loss=0.024782990105450153\n", - "Surface training t=16966, loss=0.03605117555707693\n", - "Surface training t=16967, loss=0.02215930912643671\n", - "Surface training t=16968, loss=0.020105593837797642\n", - "Surface training t=16969, loss=0.024187379516661167\n", - "Surface training t=16970, loss=0.027754557318985462\n", - "Surface training t=16971, loss=0.02575585152953863\n", - "Surface training t=16972, loss=0.025986001826822758\n", - "Surface training t=16973, loss=0.02397919725626707\n", - "Surface training t=16974, loss=0.026186369359493256\n", - "Surface training t=16975, loss=0.0375699158757925\n", - "Surface training t=16976, loss=0.02308105118572712\n", - "Surface training t=16977, loss=0.03960248455405235\n", - "Surface training t=16978, loss=0.03149121813476086\n", - "Surface training t=16979, loss=0.029224454425275326\n", - "Surface training t=16980, loss=0.04040909186005592\n", - "Surface training t=16981, loss=0.03096429631114006\n", - "Surface training t=16982, loss=0.0496795941144228\n", - "Surface training t=16983, loss=0.038942726328969\n", - "Surface training t=16984, loss=0.04870164208114147\n", - "Surface training t=16985, loss=0.04304905980825424\n", - "Surface training t=16986, loss=0.034797634929418564\n", - "Surface training t=16987, loss=0.038700833916664124\n", - "Surface training t=16988, loss=0.03174917213618755\n", - "Surface training t=16989, loss=0.03940119594335556\n", - "Surface training t=16990, loss=0.047383399680256844\n", - "Surface training t=16991, loss=0.03541935048997402\n", - "Surface training t=16992, loss=0.0430463831871748\n", - "Surface training t=16993, loss=0.03538103774189949\n", - "Surface training t=16994, loss=0.03717910312116146\n", - "Surface training t=16995, loss=0.04689935967326164\n", - "Surface training t=16996, loss=0.0371423214673996\n", - "Surface training t=16997, loss=0.03731375187635422\n", - "Surface training t=16998, loss=0.03294394351541996\n", - "Surface training t=16999, loss=0.031672630459070206\n", - "Surface training t=17000, loss=0.032572329975664616\n", - "Surface training t=17001, loss=0.04298717528581619\n", - "Surface training t=17002, loss=0.030425092205405235\n", - "Surface training t=17003, loss=0.03342638537287712\n", - "Surface training t=17004, loss=0.026509885676205158\n", - "Surface training t=17005, loss=0.023437583819031715\n", - "Surface training t=17006, loss=0.033952388912439346\n", - "Surface training t=17007, loss=0.02312003169208765\n", - "Surface training t=17008, loss=0.024500932544469833\n", - "Surface training t=17009, loss=0.03369354456663132\n", - "Surface training t=17010, loss=0.03316283505409956\n", - "Surface training t=17011, loss=0.027041655033826828\n", - "Surface training t=17012, loss=0.01988788601011038\n", - "Surface training t=17013, loss=0.023537601344287395\n", - "Surface training t=17014, loss=0.025325444526970387\n", - "Surface training t=17015, loss=0.018389392644166946\n", - "Surface training t=17016, loss=0.017671523615717888\n", - "Surface training t=17017, loss=0.022358314134180546\n", - "Surface training t=17018, loss=0.022459647618234158\n", - "Surface training t=17019, loss=0.021058913320302963\n", - "Surface training t=17020, loss=0.026171984151005745\n", - "Surface training t=17021, loss=0.02212895080447197\n", - "Surface training t=17022, loss=0.021665191277861595\n", - "Surface training t=17023, loss=0.021975521929562092\n", - "Surface training t=17024, loss=0.029833871871232986\n", - "Surface training t=17025, loss=0.03326214849948883\n", - "Surface training t=17026, loss=0.04117266647517681\n", - "Surface training t=17027, loss=0.04356170631945133\n", - "Surface training t=17028, loss=0.039002460427582264\n", - "Surface training t=17029, loss=0.0303608113899827\n", - "Surface training t=17030, loss=0.03704963997006416\n", - "Surface training t=17031, loss=0.021804346702992916\n", - "Surface training t=17032, loss=0.024419838562607765\n", - "Surface training t=17033, loss=0.02368124481290579\n", - "Surface training t=17034, loss=0.021743008866906166\n", - "Surface training t=17035, loss=0.028373277746140957\n", - "Surface training t=17036, loss=0.020413843914866447\n", - "Surface training t=17037, loss=0.022391186095774174\n", - "Surface training t=17038, loss=0.02573343925178051\n", - "Surface training t=17039, loss=0.025026511400938034\n", - "Surface training t=17040, loss=0.033957257866859436\n", - "Surface training t=17041, loss=0.02502672653645277\n", - "Surface training t=17042, loss=0.024151165038347244\n", - "Surface training t=17043, loss=0.024159472435712814\n", - "Surface training t=17044, loss=0.021432125009596348\n", - "Surface training t=17045, loss=0.023598353378474712\n", - "Surface training t=17046, loss=0.025566252879798412\n", - "Surface training t=17047, loss=0.027020210400223732\n", - "Surface training t=17048, loss=0.03580955043435097\n", - "Surface training t=17049, loss=0.04715399071574211\n", - "Surface training t=17050, loss=0.02805810421705246\n", - "Surface training t=17051, loss=0.025381125509738922\n", - "Surface training t=17052, loss=0.019750148057937622\n", - "Surface training t=17053, loss=0.023647678084671497\n", - "Surface training t=17054, loss=0.02511931210756302\n", - "Surface training t=17055, loss=0.030976800248026848\n", - "Surface training t=17056, loss=0.03411961626261473\n", - "Surface training t=17057, loss=0.024789374321699142\n", - "Surface training t=17058, loss=0.02348716091364622\n", - "Surface training t=17059, loss=0.020197870209813118\n", - "Surface training t=17060, loss=0.026338092982769012\n", - "Surface training t=17061, loss=0.021897779777646065\n", - "Surface training t=17062, loss=0.023446621373295784\n", - "Surface training t=17063, loss=0.02722062822431326\n", - "Surface training t=17064, loss=0.03114538360387087\n", - "Surface training t=17065, loss=0.027765359729528427\n", - "Surface training t=17066, loss=0.0297940531745553\n", - "Surface training t=17067, loss=0.026583300903439522\n", - "Surface training t=17068, loss=0.03018117882311344\n", - "Surface training t=17069, loss=0.03410704340785742\n", - "Surface training t=17070, loss=0.03070206195116043\n", - "Surface training t=17071, loss=0.02624058537185192\n", - "Surface training t=17072, loss=0.031929997727274895\n", - "Surface training t=17073, loss=0.03609892912209034\n", - "Surface training t=17074, loss=0.029154466465115547\n", - "Surface training t=17075, loss=0.043863508850336075\n", - "Surface training t=17076, loss=0.030607813969254494\n", - "Surface training t=17077, loss=0.0359798688441515\n", - "Surface training t=17078, loss=0.03388821892440319\n", - "Surface training t=17079, loss=0.02923649549484253\n", - "Surface training t=17080, loss=0.029191900976002216\n", - "Surface training t=17081, loss=0.02977347932755947\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=17082, loss=0.02247243281453848\n", - "Surface training t=17083, loss=0.024423026479780674\n", - "Surface training t=17084, loss=0.028342412784695625\n", - "Surface training t=17085, loss=0.031279501505196095\n", - "Surface training t=17086, loss=0.033174796029925346\n", - "Surface training t=17087, loss=0.02648674789816141\n", - "Surface training t=17088, loss=0.03135164640843868\n", - "Surface training t=17089, loss=0.027635948732495308\n", - "Surface training t=17090, loss=0.03149574063718319\n", - "Surface training t=17091, loss=0.031083212234079838\n", - "Surface training t=17092, loss=0.0316158551722765\n", - "Surface training t=17093, loss=0.03308836929500103\n", - "Surface training t=17094, loss=0.04526081308722496\n", - "Surface training t=17095, loss=0.04576255567371845\n", - "Surface training t=17096, loss=0.044194525107741356\n", - "Surface training t=17097, loss=0.04940582439303398\n", - "Surface training t=17098, loss=0.059252671897411346\n", - "Surface training t=17099, loss=0.044714558869600296\n", - "Surface training t=17100, loss=0.05558089353144169\n", - "Surface training t=17101, loss=0.05504859983921051\n", - "Surface training t=17102, loss=0.027336626313626766\n", - "Surface training t=17103, loss=0.031087389215826988\n", - "Surface training t=17104, loss=0.05146037042140961\n", - "Surface training t=17105, loss=0.03506735526025295\n", - "Surface training t=17106, loss=0.037096637301146984\n", - "Surface training t=17107, loss=0.04183725453913212\n", - "Surface training t=17108, loss=0.03616082854568958\n", - "Surface training t=17109, loss=0.04912644624710083\n", - "Surface training t=17110, loss=0.03217253368347883\n", - "Surface training t=17111, loss=0.03480349853634834\n", - "Surface training t=17112, loss=0.03471822664141655\n", - "Surface training t=17113, loss=0.02762782759964466\n", - "Surface training t=17114, loss=0.029876964166760445\n", - "Surface training t=17115, loss=0.026551813818514347\n", - "Surface training t=17116, loss=0.025202464312314987\n", - "Surface training t=17117, loss=0.02905076090246439\n", - "Surface training t=17118, loss=0.03010426089167595\n", - "Surface training t=17119, loss=0.04257766902446747\n", - "Surface training t=17120, loss=0.0341819915920496\n", - "Surface training t=17121, loss=0.03288237750530243\n", - "Surface training t=17122, loss=0.026063978672027588\n", - "Surface training t=17123, loss=0.02491435967385769\n", - "Surface training t=17124, loss=0.027993584983050823\n", - "Surface training t=17125, loss=0.0276305191218853\n", - "Surface training t=17126, loss=0.02135791163891554\n", - "Surface training t=17127, loss=0.02146988734602928\n", - "Surface training t=17128, loss=0.016454172786325216\n", - "Surface training t=17129, loss=0.02156155277043581\n", - "Surface training t=17130, loss=0.019641579128801823\n", - "Surface training t=17131, loss=0.0195221034809947\n", - "Surface training t=17132, loss=0.019159073941409588\n", - "Surface training t=17133, loss=0.02045445516705513\n", - "Surface training t=17134, loss=0.022050956264138222\n", - "Surface training t=17135, loss=0.031273314729332924\n", - "Surface training t=17136, loss=0.03433156758546829\n", - "Surface training t=17137, loss=0.02994721382856369\n", - "Surface training t=17138, loss=0.024876603856682777\n", - "Surface training t=17139, loss=0.024342975579202175\n", - "Surface training t=17140, loss=0.029107853770256042\n", - "Surface training t=17141, loss=0.0221913680434227\n", - "Surface training t=17142, loss=0.020764963701367378\n", - "Surface training t=17143, loss=0.01994580216705799\n", - "Surface training t=17144, loss=0.01774113718420267\n", - "Surface training t=17145, loss=0.02217854093760252\n", - "Surface training t=17146, loss=0.027989672496914864\n", - "Surface training t=17147, loss=0.01745706796646118\n", - "Surface training t=17148, loss=0.022597074508666992\n", - "Surface training t=17149, loss=0.02374313585460186\n", - "Surface training t=17150, loss=0.030383972451090813\n", - "Surface training t=17151, loss=0.02193899266421795\n", - "Surface training t=17152, loss=0.025751985609531403\n", - "Surface training t=17153, loss=0.026492342352867126\n", - "Surface training t=17154, loss=0.028702816925942898\n", - "Surface training t=17155, loss=0.026717310771346092\n", - "Surface training t=17156, loss=0.029462773352861404\n", - "Surface training t=17157, loss=0.03014540020376444\n", - "Surface training t=17158, loss=0.03131851926445961\n", - "Surface training t=17159, loss=0.0252795135602355\n", - "Surface training t=17160, loss=0.030463154427707195\n", - "Surface training t=17161, loss=0.023786778561770916\n", - "Surface training t=17162, loss=0.02288039866834879\n", - "Surface training t=17163, loss=0.02191267814487219\n", - "Surface training t=17164, loss=0.02242385968565941\n", - "Surface training t=17165, loss=0.0268024867400527\n", - "Surface training t=17166, loss=0.02132006548345089\n", - "Surface training t=17167, loss=0.02040851302444935\n", - "Surface training t=17168, loss=0.019907833077013493\n", - "Surface training t=17169, loss=0.0181005597114563\n", - "Surface training t=17170, loss=0.02250419743359089\n", - "Surface training t=17171, loss=0.01927414070814848\n", - "Surface training t=17172, loss=0.02178189903497696\n", - "Surface training t=17173, loss=0.02295565977692604\n", - "Surface training t=17174, loss=0.01702837459743023\n", - "Surface training t=17175, loss=0.01173978322185576\n", - "Surface training t=17176, loss=0.028077424503862858\n", - "Surface training t=17177, loss=0.02799897827208042\n", - "Surface training t=17178, loss=0.03452189639210701\n", - "Surface training t=17179, loss=0.062161851674318314\n", - "Surface training t=17180, loss=0.05244921613484621\n", - "Surface training t=17181, loss=0.08312484249472618\n", - "Surface training t=17182, loss=0.07111410610377789\n", - "Surface training t=17183, loss=0.04895168822258711\n", - "Surface training t=17184, loss=0.05938323773443699\n", - "Surface training t=17185, loss=0.041164133697748184\n", - "Surface training t=17186, loss=0.043485959991812706\n", - "Surface training t=17187, loss=0.042509086430072784\n", - "Surface training t=17188, loss=0.029546920210123062\n", - "Surface training t=17189, loss=0.038723746314644814\n", - "Surface training t=17190, loss=0.027436901815235615\n", - "Surface training t=17191, loss=0.02437067497521639\n", - "Surface training t=17192, loss=0.037638528272509575\n", - "Surface training t=17193, loss=0.028708034195005894\n", - "Surface training t=17194, loss=0.026659009978175163\n", - "Surface training t=17195, loss=0.02405410446226597\n", - "Surface training t=17196, loss=0.024131382815539837\n", - "Surface training t=17197, loss=0.03219777252525091\n", - "Surface training t=17198, loss=0.02828818093985319\n", - "Surface training t=17199, loss=0.03191200643777847\n", - "Surface training t=17200, loss=0.028627008199691772\n", - "Surface training t=17201, loss=0.030087693594396114\n", - "Surface training t=17202, loss=0.027251594699919224\n", - "Surface training t=17203, loss=0.030064205639064312\n", - "Surface training t=17204, loss=0.03109305165708065\n", - "Surface training t=17205, loss=0.025932098738849163\n", - "Surface training t=17206, loss=0.027135669253766537\n", - "Surface training t=17207, loss=0.02321053296327591\n", - "Surface training t=17208, loss=0.03013891726732254\n", - "Surface training t=17209, loss=0.02466829214245081\n", - "Surface training t=17210, loss=0.025438854470849037\n", - "Surface training t=17211, loss=0.02061430085450411\n", - "Surface training t=17212, loss=0.01615757355466485\n", - "Surface training t=17213, loss=0.02261112444102764\n", - "Surface training t=17214, loss=0.033055366948246956\n", - "Surface training t=17215, loss=0.023857714608311653\n", - "Surface training t=17216, loss=0.023780494928359985\n", - "Surface training t=17217, loss=0.02122531086206436\n", - "Surface training t=17218, loss=0.02328643761575222\n", - "Surface training t=17219, loss=0.019817231222987175\n", - "Surface training t=17220, loss=0.020547335036098957\n", - "Surface training t=17221, loss=0.020918825641274452\n", - "Surface training t=17222, loss=0.02172571513801813\n", - "Surface training t=17223, loss=0.018000943586230278\n", - "Surface training t=17224, loss=0.01709480583667755\n", - "Surface training t=17225, loss=0.017729626968503\n", - "Surface training t=17226, loss=0.02592580672353506\n", - "Surface training t=17227, loss=0.020413712598383427\n", - "Surface training t=17228, loss=0.0364536065608263\n", - "Surface training t=17229, loss=0.03614092618227005\n", - "Surface training t=17230, loss=0.02520456351339817\n", - "Surface training t=17231, loss=0.035059068351984024\n", - "Surface training t=17232, loss=0.024581169709563255\n", - "Surface training t=17233, loss=0.027792994864284992\n", - "Surface training t=17234, loss=0.021116943564265966\n", - "Surface training t=17235, loss=0.028425575233995914\n", - "Surface training t=17236, loss=0.024600493721663952\n", - "Surface training t=17237, loss=0.026084945537149906\n", - "Surface training t=17238, loss=0.02810743637382984\n", - "Surface training t=17239, loss=0.022966565564274788\n", - "Surface training t=17240, loss=0.028303146362304688\n", - "Surface training t=17241, loss=0.023242074996232986\n", - "Surface training t=17242, loss=0.026671605184674263\n", - "Surface training t=17243, loss=0.02577152941375971\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=17244, loss=0.02605059277266264\n", - "Surface training t=17245, loss=0.027531618252396584\n", - "Surface training t=17246, loss=0.02986212819814682\n", - "Surface training t=17247, loss=0.022043670527637005\n", - "Surface training t=17248, loss=0.029899895191192627\n", - "Surface training t=17249, loss=0.036367734894156456\n", - "Surface training t=17250, loss=0.03407189063727856\n", - "Surface training t=17251, loss=0.028043786995112896\n", - "Surface training t=17252, loss=0.03420039638876915\n", - "Surface training t=17253, loss=0.035124488174915314\n", - "Surface training t=17254, loss=0.034211342222988605\n", - "Surface training t=17255, loss=0.025532438419759274\n", - "Surface training t=17256, loss=0.029758643358945847\n", - "Surface training t=17257, loss=0.031846167519688606\n", - "Surface training t=17258, loss=0.03441682271659374\n", - "Surface training t=17259, loss=0.03714477829635143\n", - "Surface training t=17260, loss=0.039335738867521286\n", - "Surface training t=17261, loss=0.031280904076993465\n", - "Surface training t=17262, loss=0.04949171468615532\n", - "Surface training t=17263, loss=0.033817075192928314\n", - "Surface training t=17264, loss=0.03066884446889162\n", - "Surface training t=17265, loss=0.033940753899514675\n", - "Surface training t=17266, loss=0.022294062189757824\n", - "Surface training t=17267, loss=0.01916955504566431\n", - "Surface training t=17268, loss=0.01997498981654644\n", - "Surface training t=17269, loss=0.018000179901719093\n", - "Surface training t=17270, loss=0.022327310405671597\n", - "Surface training t=17271, loss=0.02191033400595188\n", - "Surface training t=17272, loss=0.024729350581765175\n", - "Surface training t=17273, loss=0.02741761226207018\n", - "Surface training t=17274, loss=0.026507890783250332\n", - "Surface training t=17275, loss=0.02615637518465519\n", - "Surface training t=17276, loss=0.03423935920000076\n", - "Surface training t=17277, loss=0.03170820791274309\n", - "Surface training t=17278, loss=0.025905005633831024\n", - "Surface training t=17279, loss=0.023466680198907852\n", - "Surface training t=17280, loss=0.024066012352705002\n", - "Surface training t=17281, loss=0.02781268861144781\n", - "Surface training t=17282, loss=0.038089148700237274\n", - "Surface training t=17283, loss=0.03316027671098709\n", - "Surface training t=17284, loss=0.02158122882246971\n", - "Surface training t=17285, loss=0.030408106744289398\n", - "Surface training t=17286, loss=0.030793704092502594\n", - "Surface training t=17287, loss=0.028063330333679914\n", - "Surface training t=17288, loss=0.06014002487063408\n", - "Surface training t=17289, loss=0.05016695708036423\n", - "Surface training t=17290, loss=0.05808980576694012\n", - "Surface training t=17291, loss=0.0512368306517601\n", - "Surface training t=17292, loss=0.03970503155142069\n", - "Surface training t=17293, loss=0.03933372162282467\n", - "Surface training t=17294, loss=0.04556090570986271\n", - "Surface training t=17295, loss=0.04879115708172321\n", - "Surface training t=17296, loss=0.04041684605181217\n", - "Surface training t=17297, loss=0.03921999875456095\n", - "Surface training t=17298, loss=0.05158640258014202\n", - "Surface training t=17299, loss=0.03104546293616295\n", - "Surface training t=17300, loss=0.028770477510988712\n", - "Surface training t=17301, loss=0.02076564822345972\n", - "Surface training t=17302, loss=0.026892442256212234\n", - "Surface training t=17303, loss=0.025255324319005013\n", - "Surface training t=17304, loss=0.021508402191102505\n", - "Surface training t=17305, loss=0.027071598917245865\n", - "Surface training t=17306, loss=0.03528414200991392\n", - "Surface training t=17307, loss=0.03400981053709984\n", - "Surface training t=17308, loss=0.04491078108549118\n", - "Surface training t=17309, loss=0.04156409204006195\n", - "Surface training t=17310, loss=0.04183636233210564\n", - "Surface training t=17311, loss=0.03364076837897301\n", - "Surface training t=17312, loss=0.03442357573658228\n", - "Surface training t=17313, loss=0.037673795595765114\n", - "Surface training t=17314, loss=0.02974405325949192\n", - "Surface training t=17315, loss=0.042719632387161255\n", - "Surface training t=17316, loss=0.03586006909608841\n", - "Surface training t=17317, loss=0.02997852023690939\n", - "Surface training t=17318, loss=0.030973791144788265\n", - "Surface training t=17319, loss=0.021326661109924316\n", - "Surface training t=17320, loss=0.023540837690234184\n", - "Surface training t=17321, loss=0.020803475752472878\n", - "Surface training t=17322, loss=0.025364653207361698\n", - "Surface training t=17323, loss=0.02068957034498453\n", - "Surface training t=17324, loss=0.023408123292028904\n", - "Surface training t=17325, loss=0.027803567238152027\n", - "Surface training t=17326, loss=0.02300045546144247\n", - "Surface training t=17327, loss=0.02185802161693573\n", - "Surface training t=17328, loss=0.022068369202315807\n", - "Surface training t=17329, loss=0.02255604090169072\n", - "Surface training t=17330, loss=0.025095969438552856\n", - "Surface training t=17331, loss=0.021873432211577892\n", - "Surface training t=17332, loss=0.037093931809067726\n", - "Surface training t=17333, loss=0.027438275516033173\n", - "Surface training t=17334, loss=0.033948495984077454\n", - "Surface training t=17335, loss=0.033896422013640404\n", - "Surface training t=17336, loss=0.03255616594105959\n", - "Surface training t=17337, loss=0.03428614791482687\n", - "Surface training t=17338, loss=0.031376760452985764\n", - "Surface training t=17339, loss=0.03308518044650555\n", - "Surface training t=17340, loss=0.029670956544578075\n", - "Surface training t=17341, loss=0.027185263112187386\n", - "Surface training t=17342, loss=0.02695541176944971\n", - "Surface training t=17343, loss=0.024713745340704918\n", - "Surface training t=17344, loss=0.019546481780707836\n", - "Surface training t=17345, loss=0.034957529976964\n", - "Surface training t=17346, loss=0.03084386233240366\n", - "Surface training t=17347, loss=0.030164132826030254\n", - "Surface training t=17348, loss=0.049015119671821594\n", - "Surface training t=17349, loss=0.02792464569211006\n", - "Surface training t=17350, loss=0.04035970754921436\n", - "Surface training t=17351, loss=0.043814195320010185\n", - "Surface training t=17352, loss=0.02797622885555029\n", - "Surface training t=17353, loss=0.02833909634500742\n", - "Surface training t=17354, loss=0.03632688522338867\n", - "Surface training t=17355, loss=0.031053196638822556\n", - "Surface training t=17356, loss=0.03287823311984539\n", - "Surface training t=17357, loss=0.03038759157061577\n", - "Surface training t=17358, loss=0.026082918047904968\n", - "Surface training t=17359, loss=0.030177125707268715\n", - "Surface training t=17360, loss=0.023521323688328266\n", - "Surface training t=17361, loss=0.04834175854921341\n", - "Surface training t=17362, loss=0.03699565306305885\n", - "Surface training t=17363, loss=0.03713699243962765\n", - "Surface training t=17364, loss=0.049972113221883774\n", - "Surface training t=17365, loss=0.045376451686024666\n", - "Surface training t=17366, loss=0.04121929407119751\n", - "Surface training t=17367, loss=0.04389025643467903\n", - "Surface training t=17368, loss=0.03064274787902832\n", - "Surface training t=17369, loss=0.028814426623284817\n", - "Surface training t=17370, loss=0.036362248472869396\n", - "Surface training t=17371, loss=0.028727836906909943\n", - "Surface training t=17372, loss=0.03543868288397789\n", - "Surface training t=17373, loss=0.035285837948322296\n", - "Surface training t=17374, loss=0.026346324011683464\n", - "Surface training t=17375, loss=0.03379246033728123\n", - "Surface training t=17376, loss=0.029687292873859406\n", - "Surface training t=17377, loss=0.03100376483052969\n", - "Surface training t=17378, loss=0.03068633656948805\n", - "Surface training t=17379, loss=0.03716029692441225\n", - "Surface training t=17380, loss=0.02160164900124073\n", - "Surface training t=17381, loss=0.02305548917502165\n", - "Surface training t=17382, loss=0.022183622233569622\n", - "Surface training t=17383, loss=0.024379306472837925\n", - "Surface training t=17384, loss=0.01898077502846718\n", - "Surface training t=17385, loss=0.02253334131091833\n", - "Surface training t=17386, loss=0.017033033072948456\n", - "Surface training t=17387, loss=0.018968453630805016\n", - "Surface training t=17388, loss=0.02484534401446581\n", - "Surface training t=17389, loss=0.02244679257273674\n", - "Surface training t=17390, loss=0.024722264148294926\n", - "Surface training t=17391, loss=0.02691001445055008\n", - "Surface training t=17392, loss=0.027773701585829258\n", - "Surface training t=17393, loss=0.0242469422519207\n", - "Surface training t=17394, loss=0.024854331277310848\n", - "Surface training t=17395, loss=0.025221210904419422\n", - "Surface training t=17396, loss=0.02723296359181404\n", - "Surface training t=17397, loss=0.02934971544891596\n", - "Surface training t=17398, loss=0.019878611899912357\n", - "Surface training t=17399, loss=0.02974790334701538\n", - "Surface training t=17400, loss=0.021270095370709896\n", - "Surface training t=17401, loss=0.023620611988008022\n", - "Surface training t=17402, loss=0.030624414794147015\n", - "Surface training t=17403, loss=0.03011615015566349\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=17404, loss=0.029288794845342636\n", - "Surface training t=17405, loss=0.026573571376502514\n", - "Surface training t=17406, loss=0.02612667065113783\n", - "Surface training t=17407, loss=0.024883697740733624\n", - "Surface training t=17408, loss=0.021583212539553642\n", - "Surface training t=17409, loss=0.022355543449521065\n", - "Surface training t=17410, loss=0.02185557223856449\n", - "Surface training t=17411, loss=0.021544821560382843\n", - "Surface training t=17412, loss=0.02396862395107746\n", - "Surface training t=17413, loss=0.026456640101969242\n", - "Surface training t=17414, loss=0.02629345841705799\n", - "Surface training t=17415, loss=0.022769669070839882\n", - "Surface training t=17416, loss=0.02554182428866625\n", - "Surface training t=17417, loss=0.028730152174830437\n", - "Surface training t=17418, loss=0.029226994141936302\n", - "Surface training t=17419, loss=0.034919168800115585\n", - "Surface training t=17420, loss=0.040198273956775665\n", - "Surface training t=17421, loss=0.0366192851215601\n", - "Surface training t=17422, loss=0.03680134750902653\n", - "Surface training t=17423, loss=0.029743067920207977\n", - "Surface training t=17424, loss=0.030151653103530407\n", - "Surface training t=17425, loss=0.030220860615372658\n", - "Surface training t=17426, loss=0.025801338255405426\n", - "Surface training t=17427, loss=0.02390120830386877\n", - "Surface training t=17428, loss=0.02333883009850979\n", - "Surface training t=17429, loss=0.022978007793426514\n", - "Surface training t=17430, loss=0.023467829450964928\n", - "Surface training t=17431, loss=0.021858325228095055\n", - "Surface training t=17432, loss=0.01996212638914585\n", - "Surface training t=17433, loss=0.019341139122843742\n", - "Surface training t=17434, loss=0.024320470169186592\n", - "Surface training t=17435, loss=0.018118552397936583\n", - "Surface training t=17436, loss=0.013811786659061909\n", - "Surface training t=17437, loss=0.019473441876471043\n", - "Surface training t=17438, loss=0.023122533224523067\n", - "Surface training t=17439, loss=0.022452996112406254\n", - "Surface training t=17440, loss=0.023817971348762512\n", - "Surface training t=17441, loss=0.02803091984242201\n", - "Surface training t=17442, loss=0.032123442739248276\n", - "Surface training t=17443, loss=0.037432342767715454\n", - "Surface training t=17444, loss=0.02807279210537672\n", - "Surface training t=17445, loss=0.03692655637860298\n", - "Surface training t=17446, loss=0.028703399002552032\n", - "Surface training t=17447, loss=0.037786396220326424\n", - "Surface training t=17448, loss=0.03559288941323757\n", - "Surface training t=17449, loss=0.03140243887901306\n", - "Surface training t=17450, loss=0.03936811164021492\n", - "Surface training t=17451, loss=0.02735370397567749\n", - "Surface training t=17452, loss=0.03264207765460014\n", - "Surface training t=17453, loss=0.048965588212013245\n", - "Surface training t=17454, loss=0.036356471478939056\n", - "Surface training t=17455, loss=0.04529428295791149\n", - "Surface training t=17456, loss=0.03271981794387102\n", - "Surface training t=17457, loss=0.031424734741449356\n", - "Surface training t=17458, loss=0.03489735163748264\n", - "Surface training t=17459, loss=0.02999974973499775\n", - "Surface training t=17460, loss=0.022771849762648344\n", - "Surface training t=17461, loss=0.03623300790786743\n", - "Surface training t=17462, loss=0.028121622279286385\n", - "Surface training t=17463, loss=0.034115809947252274\n", - "Surface training t=17464, loss=0.029534831643104553\n", - "Surface training t=17465, loss=0.030095694586634636\n", - "Surface training t=17466, loss=0.03187209367752075\n", - "Surface training t=17467, loss=0.0303405849263072\n", - "Surface training t=17468, loss=0.025636183097958565\n", - "Surface training t=17469, loss=0.04186211712658405\n", - "Surface training t=17470, loss=0.030989577062427998\n", - "Surface training t=17471, loss=0.03913174569606781\n", - "Surface training t=17472, loss=0.02910808566957712\n", - "Surface training t=17473, loss=0.023875162936747074\n", - "Surface training t=17474, loss=0.03362675942480564\n", - "Surface training t=17475, loss=0.03319607861340046\n", - "Surface training t=17476, loss=0.03181492164731026\n", - "Surface training t=17477, loss=0.036324264481663704\n", - "Surface training t=17478, loss=0.03348710108548403\n", - "Surface training t=17479, loss=0.025055231526494026\n", - "Surface training t=17480, loss=0.021988241001963615\n", - "Surface training t=17481, loss=0.019034285563975573\n", - "Surface training t=17482, loss=0.04485262557864189\n", - "Surface training t=17483, loss=0.03435799293220043\n", - "Surface training t=17484, loss=0.049358218908309937\n", - "Surface training t=17485, loss=0.0370942335575819\n", - "Surface training t=17486, loss=0.05476042442023754\n", - "Surface training t=17487, loss=0.035403928719460964\n", - "Surface training t=17488, loss=0.03762722574174404\n", - "Surface training t=17489, loss=0.04673788137733936\n", - "Surface training t=17490, loss=0.043769754469394684\n", - "Surface training t=17491, loss=0.05654396302998066\n", - "Surface training t=17492, loss=0.03378692455589771\n", - "Surface training t=17493, loss=0.04600514471530914\n", - "Surface training t=17494, loss=0.02220500260591507\n", - "Surface training t=17495, loss=0.03243453986942768\n", - "Surface training t=17496, loss=0.034285543486475945\n", - "Surface training t=17497, loss=0.027494842186570168\n", - "Surface training t=17498, loss=0.023015502840280533\n", - "Surface training t=17499, loss=0.023740166798233986\n", - "Surface training t=17500, loss=0.01828326191753149\n", - "Surface training t=17501, loss=0.029649595730006695\n", - "Surface training t=17502, loss=0.027170089073479176\n", - "Surface training t=17503, loss=0.022758291102945805\n", - "Surface training t=17504, loss=0.02283606957644224\n", - "Surface training t=17505, loss=0.02173034008592367\n", - "Surface training t=17506, loss=0.02023005113005638\n", - "Surface training t=17507, loss=0.020137115381658077\n", - "Surface training t=17508, loss=0.020177742466330528\n", - "Surface training t=17509, loss=0.016783404164016247\n", - "Surface training t=17510, loss=0.025791875086724758\n", - "Surface training t=17511, loss=0.023154704831540585\n", - "Surface training t=17512, loss=0.01984686590731144\n", - "Surface training t=17513, loss=0.023560971952974796\n", - "Surface training t=17514, loss=0.02570926584303379\n", - "Surface training t=17515, loss=0.027222711592912674\n", - "Surface training t=17516, loss=0.02328217215836048\n", - "Surface training t=17517, loss=0.033819315023720264\n", - "Surface training t=17518, loss=0.0338884349912405\n", - "Surface training t=17519, loss=0.03846179321408272\n", - "Surface training t=17520, loss=0.03057977557182312\n", - "Surface training t=17521, loss=0.044560788199305534\n", - "Surface training t=17522, loss=0.034255487844347954\n", - "Surface training t=17523, loss=0.03216410428285599\n", - "Surface training t=17524, loss=0.04245787858963013\n", - "Surface training t=17525, loss=0.027184723876416683\n", - "Surface training t=17526, loss=0.026112492196261883\n", - "Surface training t=17527, loss=0.03577894903719425\n", - "Surface training t=17528, loss=0.02933600451797247\n", - "Surface training t=17529, loss=0.03460825886577368\n", - "Surface training t=17530, loss=0.03259773273020983\n", - "Surface training t=17531, loss=0.030092256143689156\n", - "Surface training t=17532, loss=0.03914085403084755\n", - "Surface training t=17533, loss=0.03388563450425863\n", - "Surface training t=17534, loss=0.032845488749444485\n", - "Surface training t=17535, loss=0.03531728591769934\n", - "Surface training t=17536, loss=0.030037234537303448\n", - "Surface training t=17537, loss=0.03253170847892761\n", - "Surface training t=17538, loss=0.026293904520571232\n", - "Surface training t=17539, loss=0.02852644957602024\n", - "Surface training t=17540, loss=0.03542608395218849\n", - "Surface training t=17541, loss=0.022754850797355175\n", - "Surface training t=17542, loss=0.02507386263459921\n", - "Surface training t=17543, loss=0.03705819230526686\n", - "Surface training t=17544, loss=0.028524824418127537\n", - "Surface training t=17545, loss=0.021732093766331673\n", - "Surface training t=17546, loss=0.02330902684479952\n", - "Surface training t=17547, loss=0.020023848861455917\n", - "Surface training t=17548, loss=0.022290872409939766\n", - "Surface training t=17549, loss=0.019484235905110836\n", - "Surface training t=17550, loss=0.027145199477672577\n", - "Surface training t=17551, loss=0.018386716954410076\n", - "Surface training t=17552, loss=0.01469599548727274\n", - "Surface training t=17553, loss=0.023090672679245472\n", - "Surface training t=17554, loss=0.0215763496235013\n", - "Surface training t=17555, loss=0.01796597708016634\n", - "Surface training t=17556, loss=0.02365140337496996\n", - "Surface training t=17557, loss=0.0204488025046885\n", - "Surface training t=17558, loss=0.017794804647564888\n", - "Surface training t=17559, loss=0.0193985803052783\n", - "Surface training t=17560, loss=0.02296259067952633\n", - "Surface training t=17561, loss=0.02533707208931446\n", - "Surface training t=17562, loss=0.022917755879461765\n", - "Surface training t=17563, loss=0.016173340380191803\n", - "Surface training t=17564, loss=0.020822900347411633\n", - "Surface training t=17565, loss=0.023143955506384373\n", - "Surface training t=17566, loss=0.017038484569638968\n", - "Surface training t=17567, loss=0.02563614957034588\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=17568, loss=0.0170155824162066\n", - "Surface training t=17569, loss=0.02449630480259657\n", - "Surface training t=17570, loss=0.01946327742189169\n", - "Surface training t=17571, loss=0.018318680115044117\n", - "Surface training t=17572, loss=0.017633951269090176\n", - "Surface training t=17573, loss=0.01927352510392666\n", - "Surface training t=17574, loss=0.0184842050075531\n", - "Surface training t=17575, loss=0.019810297526419163\n", - "Surface training t=17576, loss=0.024507397785782814\n", - "Surface training t=17577, loss=0.021259482018649578\n", - "Surface training t=17578, loss=0.020144442096352577\n", - "Surface training t=17579, loss=0.01955909002572298\n", - "Surface training t=17580, loss=0.019617974758148193\n", - "Surface training t=17581, loss=0.01917885895818472\n", - "Surface training t=17582, loss=0.021656356751918793\n", - "Surface training t=17583, loss=0.02280275709927082\n", - "Surface training t=17584, loss=0.03364754468202591\n", - "Surface training t=17585, loss=0.04221176728606224\n", - "Surface training t=17586, loss=0.03309463895857334\n", - "Surface training t=17587, loss=0.02663112711161375\n", - "Surface training t=17588, loss=0.023622057400643826\n", - "Surface training t=17589, loss=0.024714178405702114\n", - "Surface training t=17590, loss=0.01645886106416583\n", - "Surface training t=17591, loss=0.02569871675223112\n", - "Surface training t=17592, loss=0.02233356051146984\n", - "Surface training t=17593, loss=0.02388924639672041\n", - "Surface training t=17594, loss=0.02640093769878149\n", - "Surface training t=17595, loss=0.027448758482933044\n", - "Surface training t=17596, loss=0.019103343598544598\n", - "Surface training t=17597, loss=0.02120701316744089\n", - "Surface training t=17598, loss=0.0253507848829031\n", - "Surface training t=17599, loss=0.02816780097782612\n", - "Surface training t=17600, loss=0.02445882186293602\n", - "Surface training t=17601, loss=0.02421383000910282\n", - "Surface training t=17602, loss=0.03472355008125305\n", - "Surface training t=17603, loss=0.021693465299904346\n", - "Surface training t=17604, loss=0.031860326416790485\n", - "Surface training t=17605, loss=0.0237431637942791\n", - "Surface training t=17606, loss=0.02899664919823408\n", - "Surface training t=17607, loss=0.030477521009743214\n", - "Surface training t=17608, loss=0.022762347012758255\n", - "Surface training t=17609, loss=0.02701407205313444\n", - "Surface training t=17610, loss=0.033366478979587555\n", - "Surface training t=17611, loss=0.028277648612856865\n", - "Surface training t=17612, loss=0.030387308448553085\n", - "Surface training t=17613, loss=0.02522411197423935\n", - "Surface training t=17614, loss=0.03202556073665619\n", - "Surface training t=17615, loss=0.0333529831841588\n", - "Surface training t=17616, loss=0.02785959281027317\n", - "Surface training t=17617, loss=0.02717493660748005\n", - "Surface training t=17618, loss=0.024716636165976524\n", - "Surface training t=17619, loss=0.030892128124833107\n", - "Surface training t=17620, loss=0.02634697314351797\n", - "Surface training t=17621, loss=0.023659583181142807\n", - "Surface training t=17622, loss=0.024977369233965874\n", - "Surface training t=17623, loss=0.0380451213568449\n", - "Surface training t=17624, loss=0.03429495356976986\n", - "Surface training t=17625, loss=0.03485463932156563\n", - "Surface training t=17626, loss=0.03058399073779583\n", - "Surface training t=17627, loss=0.030028355307877064\n", - "Surface training t=17628, loss=0.03475754614919424\n", - "Surface training t=17629, loss=0.045924099162220955\n", - "Surface training t=17630, loss=0.04497847147285938\n", - "Surface training t=17631, loss=0.038035159930586815\n", - "Surface training t=17632, loss=0.030179735273122787\n", - "Surface training t=17633, loss=0.041191257536411285\n", - "Surface training t=17634, loss=0.025457076728343964\n", - "Surface training t=17635, loss=0.030999677255749702\n", - "Surface training t=17636, loss=0.03963364660739899\n", - "Surface training t=17637, loss=0.03148413635790348\n", - "Surface training t=17638, loss=0.04543761722743511\n", - "Surface training t=17639, loss=0.02875742968171835\n", - "Surface training t=17640, loss=0.030702635645866394\n", - "Surface training t=17641, loss=0.035210736095905304\n", - "Surface training t=17642, loss=0.026896348223090172\n", - "Surface training t=17643, loss=0.0355672026053071\n", - "Surface training t=17644, loss=0.050154680386185646\n", - "Surface training t=17645, loss=0.03685635328292847\n", - "Surface training t=17646, loss=0.03122538421303034\n", - "Surface training t=17647, loss=0.029534753412008286\n", - "Surface training t=17648, loss=0.047543833032250404\n", - "Surface training t=17649, loss=0.03773570992052555\n", - "Surface training t=17650, loss=0.03515254147350788\n", - "Surface training t=17651, loss=0.027783245779573917\n", - "Surface training t=17652, loss=0.032386887818574905\n", - "Surface training t=17653, loss=0.0325942886993289\n", - "Surface training t=17654, loss=0.025035738945007324\n", - "Surface training t=17655, loss=0.03590282425284386\n", - "Surface training t=17656, loss=0.027324208058416843\n", - "Surface training t=17657, loss=0.02529223170131445\n", - "Surface training t=17658, loss=0.02399932872503996\n", - "Surface training t=17659, loss=0.021037892438471317\n", - "Surface training t=17660, loss=0.018217571545392275\n", - "Surface training t=17661, loss=0.02349824644625187\n", - "Surface training t=17662, loss=0.018926715943962336\n", - "Surface training t=17663, loss=0.022877931594848633\n", - "Surface training t=17664, loss=0.020449617877602577\n", - "Surface training t=17665, loss=0.024448972195386887\n", - "Surface training t=17666, loss=0.020868822932243347\n", - "Surface training t=17667, loss=0.02310256566852331\n", - "Surface training t=17668, loss=0.026317383162677288\n", - "Surface training t=17669, loss=0.024005049839615822\n", - "Surface training t=17670, loss=0.022026493214070797\n", - "Surface training t=17671, loss=0.01904072519391775\n", - "Surface training t=17672, loss=0.023670569993555546\n", - "Surface training t=17673, loss=0.03796030953526497\n", - "Surface training t=17674, loss=0.03302597068250179\n", - "Surface training t=17675, loss=0.03975392132997513\n", - "Surface training t=17676, loss=0.04236591421067715\n", - "Surface training t=17677, loss=0.03999614156782627\n", - "Surface training t=17678, loss=0.025914967991411686\n", - "Surface training t=17679, loss=0.03516710549592972\n", - "Surface training t=17680, loss=0.03035672288388014\n", - "Surface training t=17681, loss=0.028693057131022215\n", - "Surface training t=17682, loss=0.03436463791877031\n", - "Surface training t=17683, loss=0.02440840657800436\n", - "Surface training t=17684, loss=0.025732239708304405\n", - "Surface training t=17685, loss=0.018170285038650036\n", - "Surface training t=17686, loss=0.023150810040533543\n", - "Surface training t=17687, loss=0.02342415601015091\n", - "Surface training t=17688, loss=0.037733010947704315\n", - "Surface training t=17689, loss=0.03090058360248804\n", - "Surface training t=17690, loss=0.03305715508759022\n", - "Surface training t=17691, loss=0.03309973794966936\n", - "Surface training t=17692, loss=0.0305550554767251\n", - "Surface training t=17693, loss=0.025747214443981647\n", - "Surface training t=17694, loss=0.030545050278306007\n", - "Surface training t=17695, loss=0.023376737721264362\n", - "Surface training t=17696, loss=0.03251137025654316\n", - "Surface training t=17697, loss=0.028675026260316372\n", - "Surface training t=17698, loss=0.03383418545126915\n", - "Surface training t=17699, loss=0.03529137372970581\n", - "Surface training t=17700, loss=0.035067118704319\n", - "Surface training t=17701, loss=0.038909804075956345\n", - "Surface training t=17702, loss=0.03161101136356592\n", - "Surface training t=17703, loss=0.031214511953294277\n", - "Surface training t=17704, loss=0.03007975034415722\n", - "Surface training t=17705, loss=0.027529625222086906\n", - "Surface training t=17706, loss=0.022912712767720222\n", - "Surface training t=17707, loss=0.021745421923696995\n", - "Surface training t=17708, loss=0.01882587093859911\n", - "Surface training t=17709, loss=0.021903540939092636\n", - "Surface training t=17710, loss=0.01650030631572008\n", - "Surface training t=17711, loss=0.014988831710070372\n", - "Surface training t=17712, loss=0.022638323716819286\n", - "Surface training t=17713, loss=0.031124775297939777\n", - "Surface training t=17714, loss=0.031372408382594585\n", - "Surface training t=17715, loss=0.03236761316657066\n", - "Surface training t=17716, loss=0.02835831791162491\n", - "Surface training t=17717, loss=0.03731299750506878\n", - "Surface training t=17718, loss=0.05453605204820633\n", - "Surface training t=17719, loss=0.03495601378381252\n", - "Surface training t=17720, loss=0.03968465328216553\n", - "Surface training t=17721, loss=0.03706164471805096\n", - "Surface training t=17722, loss=0.04377802088856697\n", - "Surface training t=17723, loss=0.05560874752700329\n", - "Surface training t=17724, loss=0.03220159187912941\n", - "Surface training t=17725, loss=0.04003124125301838\n", - "Surface training t=17726, loss=0.04401226155459881\n", - "Surface training t=17727, loss=0.04407153092324734\n", - "Surface training t=17728, loss=0.03041328862309456\n", - "Surface training t=17729, loss=0.029502195306122303\n", - "Surface training t=17730, loss=0.03543359600007534\n", - "Surface training t=17731, loss=0.029280662536621094\n", - "Surface training t=17732, loss=0.028593353927135468\n", - "Surface training t=17733, loss=0.024006841704249382\n", - "Surface training t=17734, loss=0.026371633633971214\n", - "Surface training t=17735, loss=0.020354412496089935\n", - "Surface training t=17736, loss=0.02292540017515421\n", - "Surface training t=17737, loss=0.033205388113856316\n", - "Surface training t=17738, loss=0.03991683945059776\n", - "Surface training t=17739, loss=0.05543776787817478\n", - "Surface training t=17740, loss=0.04458486568182707\n", - "Surface training t=17741, loss=0.05127907544374466\n", - "Surface training t=17742, loss=0.06551926583051682\n", - "Surface training t=17743, loss=0.04454567842185497\n", - "Surface training t=17744, loss=0.0409510750323534\n", - "Surface training t=17745, loss=0.029974041506648064\n", - "Surface training t=17746, loss=0.06429845467209816\n", - "Surface training t=17747, loss=0.04788583144545555\n", - "Surface training t=17748, loss=0.06159549206495285\n", - "Surface training t=17749, loss=0.0576750785112381\n", - "Surface training t=17750, loss=0.042414652183651924\n", - "Surface training t=17751, loss=0.05094937141984701\n", - "Surface training t=17752, loss=0.0631623063236475\n", - "Surface training t=17753, loss=0.0513322539627552\n", - "Surface training t=17754, loss=0.0457290718331933\n", - "Surface training t=17755, loss=0.03910844400525093\n", - "Surface training t=17756, loss=0.03757173381745815\n", - "Surface training t=17757, loss=0.03211945202201605\n", - "Surface training t=17758, loss=0.023888584226369858\n", - "Surface training t=17759, loss=0.027682323940098286\n", - "Surface training t=17760, loss=0.02095501683652401\n", - "Surface training t=17761, loss=0.022261444479227066\n", - "Surface training t=17762, loss=0.031366665847599506\n", - "Surface training t=17763, loss=0.03117667604237795\n", - "Surface training t=17764, loss=0.026165545918047428\n", - "Surface training t=17765, loss=0.0299757095053792\n", - "Surface training t=17766, loss=0.02789277397096157\n", - "Surface training t=17767, loss=0.026251248084008694\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=17768, loss=0.028248817659914494\n", - "Surface training t=17769, loss=0.021743816323578358\n", - "Surface training t=17770, loss=0.027908464893698692\n", - "Surface training t=17771, loss=0.019529948011040688\n", - "Surface training t=17772, loss=0.027213802561163902\n", - "Surface training t=17773, loss=0.02108877431601286\n", - "Surface training t=17774, loss=0.02040100283920765\n", - "Surface training t=17775, loss=0.02895673830062151\n", - "Surface training t=17776, loss=0.03615056350827217\n", - "Surface training t=17777, loss=0.0299901831895113\n", - "Surface training t=17778, loss=0.021968121640384197\n", - "Surface training t=17779, loss=0.022568381391465664\n", - "Surface training t=17780, loss=0.019900239072740078\n", - "Surface training t=17781, loss=0.021449970081448555\n", - "Surface training t=17782, loss=0.028934646397829056\n", - "Surface training t=17783, loss=0.025254049338400364\n", - "Surface training t=17784, loss=0.02491874247789383\n", - "Surface training t=17785, loss=0.03180280700325966\n", - "Surface training t=17786, loss=0.03231982234865427\n", - "Surface training t=17787, loss=0.030084618367254734\n", - "Surface training t=17788, loss=0.025596588850021362\n", - "Surface training t=17789, loss=0.02725768554955721\n", - "Surface training t=17790, loss=0.02421305514872074\n", - "Surface training t=17791, loss=0.024892060086131096\n", - "Surface training t=17792, loss=0.025528867729008198\n", - "Surface training t=17793, loss=0.024992002174258232\n", - "Surface training t=17794, loss=0.026928987354040146\n", - "Surface training t=17795, loss=0.022275525145232677\n", - "Surface training t=17796, loss=0.023716717027127743\n", - "Surface training t=17797, loss=0.02475676778703928\n", - "Surface training t=17798, loss=0.019272944889962673\n", - "Surface training t=17799, loss=0.027737009339034557\n", - "Surface training t=17800, loss=0.030777888372540474\n", - "Surface training t=17801, loss=0.05813452787697315\n", - "Surface training t=17802, loss=0.04203322436660528\n", - "Surface training t=17803, loss=0.04748054500669241\n", - "Surface training t=17804, loss=0.05431228317320347\n", - "Surface training t=17805, loss=0.04421943612396717\n", - "Surface training t=17806, loss=0.040792882442474365\n", - "Surface training t=17807, loss=0.058487094938755035\n", - "Surface training t=17808, loss=0.04850354231894016\n", - "Surface training t=17809, loss=0.03955302946269512\n", - "Surface training t=17810, loss=0.03656096197664738\n", - "Surface training t=17811, loss=0.052983902394771576\n", - "Surface training t=17812, loss=0.029888607561588287\n", - "Surface training t=17813, loss=0.02850282471626997\n", - "Surface training t=17814, loss=0.040367985144257545\n", - "Surface training t=17815, loss=0.049097223207354546\n", - "Surface training t=17816, loss=0.06283377669751644\n", - "Surface training t=17817, loss=0.043246316723525524\n", - "Surface training t=17818, loss=0.0619044154882431\n", - "Surface training t=17819, loss=0.06902073137462139\n", - "Surface training t=17820, loss=0.04639125056564808\n", - "Surface training t=17821, loss=0.05726386979222298\n", - "Surface training t=17822, loss=0.04029383137822151\n", - "Surface training t=17823, loss=0.032494429498910904\n", - "Surface training t=17824, loss=0.042814772576093674\n", - "Surface training t=17825, loss=0.036403427831828594\n", - "Surface training t=17826, loss=0.035680899396538734\n", - "Surface training t=17827, loss=0.03962968848645687\n", - "Surface training t=17828, loss=0.033831361681222916\n", - "Surface training t=17829, loss=0.03488772641867399\n", - "Surface training t=17830, loss=0.03554348088800907\n", - "Surface training t=17831, loss=0.0372008141130209\n", - "Surface training t=17832, loss=0.03948610369116068\n", - "Surface training t=17833, loss=0.02964515145868063\n", - "Surface training t=17834, loss=0.03165290132164955\n", - "Surface training t=17835, loss=0.02866766508668661\n", - "Surface training t=17836, loss=0.028297943994402885\n", - "Surface training t=17837, loss=0.04536001197993755\n", - "Surface training t=17838, loss=0.023334010504186153\n", - "Surface training t=17839, loss=0.027265489101409912\n", - "Surface training t=17840, loss=0.03141337167471647\n", - "Surface training t=17841, loss=0.02732884045690298\n", - "Surface training t=17842, loss=0.019451663829386234\n", - "Surface training t=17843, loss=0.02875613421201706\n", - "Surface training t=17844, loss=0.023008487187325954\n", - "Surface training t=17845, loss=0.03434720262885094\n", - "Surface training t=17846, loss=0.02722785808146\n", - "Surface training t=17847, loss=0.024411375634372234\n", - "Surface training t=17848, loss=0.03204190079122782\n", - "Surface training t=17849, loss=0.03235703334212303\n", - "Surface training t=17850, loss=0.02824560645967722\n", - "Surface training t=17851, loss=0.022696349769830704\n", - "Surface training t=17852, loss=0.0388849712908268\n", - "Surface training t=17853, loss=0.028597813099622726\n", - "Surface training t=17854, loss=0.023103726096451283\n", - "Surface training t=17855, loss=0.03624747321009636\n", - "Surface training t=17856, loss=0.028940636664628983\n", - "Surface training t=17857, loss=0.0306516382843256\n", - "Surface training t=17858, loss=0.04388798028230667\n", - "Surface training t=17859, loss=0.0323566785082221\n", - "Surface training t=17860, loss=0.03330915793776512\n", - "Surface training t=17861, loss=0.03656549192965031\n", - "Surface training t=17862, loss=0.03232339024543762\n", - "Surface training t=17863, loss=0.06332739070057869\n", - "Surface training t=17864, loss=0.048918768763542175\n", - "Surface training t=17865, loss=0.053193556144833565\n", - "Surface training t=17866, loss=0.0639621801674366\n", - "Surface training t=17867, loss=0.04738760832697153\n", - "Surface training t=17868, loss=0.06623800098896027\n", - "Surface training t=17869, loss=0.05561392568051815\n", - "Surface training t=17870, loss=0.03786285035312176\n", - "Surface training t=17871, loss=0.03493858501315117\n", - "Surface training t=17872, loss=0.05440016835927963\n", - "Surface training t=17873, loss=0.03892962448298931\n", - "Surface training t=17874, loss=0.03918742947280407\n", - "Surface training t=17875, loss=0.036437079310417175\n", - "Surface training t=17876, loss=0.03454060200601816\n", - "Surface training t=17877, loss=0.02438165806233883\n", - "Surface training t=17878, loss=0.03897893987596035\n", - "Surface training t=17879, loss=0.026369398459792137\n", - "Surface training t=17880, loss=0.029417000710964203\n", - "Surface training t=17881, loss=0.033766936510801315\n", - "Surface training t=17882, loss=0.03034079074859619\n", - "Surface training t=17883, loss=0.03721642307937145\n", - "Surface training t=17884, loss=0.023467705585062504\n", - "Surface training t=17885, loss=0.03022089321166277\n", - "Surface training t=17886, loss=0.03226070664823055\n", - "Surface training t=17887, loss=0.029836060479283333\n", - "Surface training t=17888, loss=0.033236680552363396\n", - "Surface training t=17889, loss=0.030564159154891968\n", - "Surface training t=17890, loss=0.02556326612830162\n", - "Surface training t=17891, loss=0.026120699010789394\n", - "Surface training t=17892, loss=0.026853195391595364\n", - "Surface training t=17893, loss=0.027485807426273823\n", - "Surface training t=17894, loss=0.02174867782741785\n", - "Surface training t=17895, loss=0.02315988764166832\n", - "Surface training t=17896, loss=0.02650373335927725\n", - "Surface training t=17897, loss=0.024947986006736755\n", - "Surface training t=17898, loss=0.023701999336481094\n", - "Surface training t=17899, loss=0.02809617668390274\n", - "Surface training t=17900, loss=0.02845568861812353\n", - "Surface training t=17901, loss=0.029718289151787758\n", - "Surface training t=17902, loss=0.025448927655816078\n", - "Surface training t=17903, loss=0.02602567244321108\n", - "Surface training t=17904, loss=0.039237501099705696\n", - "Surface training t=17905, loss=0.03399512730538845\n", - "Surface training t=17906, loss=0.026670705527067184\n", - "Surface training t=17907, loss=0.025237534195184708\n", - "Surface training t=17908, loss=0.023416370153427124\n", - "Surface training t=17909, loss=0.022011134773492813\n", - "Surface training t=17910, loss=0.023835083469748497\n", - "Surface training t=17911, loss=0.02503287047147751\n", - "Surface training t=17912, loss=0.01819029077887535\n", - "Surface training t=17913, loss=0.01587840262800455\n", - "Surface training t=17914, loss=0.01469701761379838\n", - "Surface training t=17915, loss=0.021831488236784935\n", - "Surface training t=17916, loss=0.027196519076824188\n", - "Surface training t=17917, loss=0.024103544652462006\n", - "Surface training t=17918, loss=0.025577218271791935\n", - "Surface training t=17919, loss=0.022875898517668247\n", - "Surface training t=17920, loss=0.020158737897872925\n", - "Surface training t=17921, loss=0.02042584214359522\n", - "Surface training t=17922, loss=0.022109901532530785\n", - "Surface training t=17923, loss=0.018247220665216446\n", - "Surface training t=17924, loss=0.02625998854637146\n", - "Surface training t=17925, loss=0.02578243613243103\n", - "Surface training t=17926, loss=0.03104033600538969\n", - "Surface training t=17927, loss=0.029652445577085018\n", - "Surface training t=17928, loss=0.02563496772199869\n", - "Surface training t=17929, loss=0.026352793909609318\n", - "Surface training t=17930, loss=0.02643794845789671\n", - "Surface training t=17931, loss=0.027184821665287018\n", - "Surface training t=17932, loss=0.0264376662671566\n", - "Surface training t=17933, loss=0.03044825792312622\n", - "Surface training t=17934, loss=0.02801379654556513\n", - "Surface training t=17935, loss=0.02748573198914528\n", - "Surface training t=17936, loss=0.03476467914879322\n", - "Surface training t=17937, loss=0.027200723066926003\n", - "Surface training t=17938, loss=0.03055497631430626\n", - "Surface training t=17939, loss=0.050029342994093895\n", - "Surface training t=17940, loss=0.038702432066202164\n", - "Surface training t=17941, loss=0.027218366973102093\n", - "Surface training t=17942, loss=0.027480307035148144\n", - "Surface training t=17943, loss=0.023476825561374426\n", - "Surface training t=17944, loss=0.02906135655939579\n", - "Surface training t=17945, loss=0.026317426934838295\n", - "Surface training t=17946, loss=0.02850835770368576\n", - "Surface training t=17947, loss=0.02266205009073019\n", - "Surface training t=17948, loss=0.028324109502136707\n", - "Surface training t=17949, loss=0.044641006737947464\n", - "Surface training t=17950, loss=0.03698128089308739\n", - "Surface training t=17951, loss=0.03726501949131489\n", - "Surface training t=17952, loss=0.03560264594852924\n", - "Surface training t=17953, loss=0.026044785045087337\n", - "Surface training t=17954, loss=0.026331686414778233\n", - "Surface training t=17955, loss=0.03677871264517307\n", - "Surface training t=17956, loss=0.0301291448995471\n", - "Surface training t=17957, loss=0.030676023103296757\n", - "Surface training t=17958, loss=0.0506410151720047\n", - "Surface training t=17959, loss=0.028936798684298992\n", - "Surface training t=17960, loss=0.04662579298019409\n", - "Surface training t=17961, loss=0.03393208608031273\n", - "Surface training t=17962, loss=0.044426921755075455\n", - "Surface training t=17963, loss=0.029530711472034454\n", - "Surface training t=17964, loss=0.02775432448834181\n", - "Surface training t=17965, loss=0.031946687027812004\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=17966, loss=0.037903361953794956\n", - "Surface training t=17967, loss=0.02964609768241644\n", - "Surface training t=17968, loss=0.029368171468377113\n", - "Surface training t=17969, loss=0.03515288420021534\n", - "Surface training t=17970, loss=0.025575612671673298\n", - "Surface training t=17971, loss=0.03275744989514351\n", - "Surface training t=17972, loss=0.030722894705832005\n", - "Surface training t=17973, loss=0.02333466336131096\n", - "Surface training t=17974, loss=0.03740040957927704\n", - "Surface training t=17975, loss=0.037162731401622295\n", - "Surface training t=17976, loss=0.036664512008428574\n", - "Surface training t=17977, loss=0.030927698127925396\n", - "Surface training t=17978, loss=0.0277615524828434\n", - "Surface training t=17979, loss=0.036489889957010746\n", - "Surface training t=17980, loss=0.05584465153515339\n", - "Surface training t=17981, loss=0.03223780356347561\n", - "Surface training t=17982, loss=0.03786332160234451\n", - "Surface training t=17983, loss=0.06433053314685822\n", - "Surface training t=17984, loss=0.04068547394126654\n", - "Surface training t=17985, loss=0.04745004419237375\n", - "Surface training t=17986, loss=0.07073527202010155\n", - "Surface training t=17987, loss=0.050762370228767395\n", - "Surface training t=17988, loss=0.04066086560487747\n", - "Surface training t=17989, loss=0.05611717887222767\n", - "Surface training t=17990, loss=0.03784526325762272\n", - "Surface training t=17991, loss=0.03563736379146576\n", - "Surface training t=17992, loss=0.024563302285969257\n", - "Surface training t=17993, loss=0.02338550705462694\n", - "Surface training t=17994, loss=0.022443898022174835\n", - "Surface training t=17995, loss=0.02523454651236534\n", - "Surface training t=17996, loss=0.017277492210268974\n", - "Surface training t=17997, loss=0.029210451059043407\n", - "Surface training t=17998, loss=0.037940409034490585\n", - "Surface training t=17999, loss=0.027523049153387547\n", - "Surface training t=18000, loss=0.028114658780395985\n", - "Surface training t=18001, loss=0.026966584846377373\n", - "Surface training t=18002, loss=0.02430914994329214\n", - "Surface training t=18003, loss=0.020764497108757496\n", - "Surface training t=18004, loss=0.02265132963657379\n", - "Surface training t=18005, loss=0.030728894285857677\n", - "Surface training t=18006, loss=0.022790223360061646\n", - "Surface training t=18007, loss=0.017420602031052113\n", - "Surface training t=18008, loss=0.019421330653131008\n", - "Surface training t=18009, loss=0.024856338277459145\n", - "Surface training t=18010, loss=0.02618524618446827\n", - "Surface training t=18011, loss=0.020855328999459743\n", - "Surface training t=18012, loss=0.02655002661049366\n", - "Surface training t=18013, loss=0.0289041455835104\n", - "Surface training t=18014, loss=0.0501827597618103\n", - "Surface training t=18015, loss=0.03519007097929716\n", - "Surface training t=18016, loss=0.0382359866052866\n", - "Surface training t=18017, loss=0.029769178479909897\n", - "Surface training t=18018, loss=0.022110670804977417\n", - "Surface training t=18019, loss=0.024619010742753744\n", - "Surface training t=18020, loss=0.04268037900328636\n", - "Surface training t=18021, loss=0.03708009421825409\n", - "Surface training t=18022, loss=0.024282910861074924\n", - "Surface training t=18023, loss=0.023949154652655125\n", - "Surface training t=18024, loss=0.025203119032084942\n", - "Surface training t=18025, loss=0.026100369170308113\n", - "Surface training t=18026, loss=0.016279933974146843\n", - "Surface training t=18027, loss=0.02697630226612091\n", - "Surface training t=18028, loss=0.02350408025085926\n", - "Surface training t=18029, loss=0.020522860810160637\n", - "Surface training t=18030, loss=0.028006592765450478\n", - "Surface training t=18031, loss=0.021608326584100723\n", - "Surface training t=18032, loss=0.03051812667399645\n", - "Surface training t=18033, loss=0.03800046443939209\n", - "Surface training t=18034, loss=0.031153101474046707\n", - "Surface training t=18035, loss=0.03953980840742588\n", - "Surface training t=18036, loss=0.03322505485266447\n", - "Surface training t=18037, loss=0.04402460716664791\n", - "Surface training t=18038, loss=0.04266052879393101\n", - "Surface training t=18039, loss=0.050993598997592926\n", - "Surface training t=18040, loss=0.04240964353084564\n", - "Surface training t=18041, loss=0.06308578699827194\n", - "Surface training t=18042, loss=0.03776656370609999\n", - "Surface training t=18043, loss=0.05978359840810299\n", - "Surface training t=18044, loss=0.04124217480421066\n", - "Surface training t=18045, loss=0.06313358806073666\n", - "Surface training t=18046, loss=0.0351015068590641\n", - "Surface training t=18047, loss=0.05254237726330757\n", - "Surface training t=18048, loss=0.03546858951449394\n", - "Surface training t=18049, loss=0.032643530517816544\n", - "Surface training t=18050, loss=0.0362392570823431\n", - "Surface training t=18051, loss=0.02695672120898962\n", - "Surface training t=18052, loss=0.03012349270284176\n", - "Surface training t=18053, loss=0.029030563309788704\n", - "Surface training t=18054, loss=0.025538154877722263\n", - "Surface training t=18055, loss=0.02430107071995735\n", - "Surface training t=18056, loss=0.02742779813706875\n", - "Surface training t=18057, loss=0.0293081346899271\n", - "Surface training t=18058, loss=0.03135046362876892\n", - "Surface training t=18059, loss=0.027042748406529427\n", - "Surface training t=18060, loss=0.030244121327996254\n", - "Surface training t=18061, loss=0.021171975880861282\n", - "Surface training t=18062, loss=0.03516763914376497\n", - "Surface training t=18063, loss=0.022069995291531086\n", - "Surface training t=18064, loss=0.019984466023743153\n", - "Surface training t=18065, loss=0.0309737641364336\n", - "Surface training t=18066, loss=0.02742875274270773\n", - "Surface training t=18067, loss=0.03186504915356636\n", - "Surface training t=18068, loss=0.0322917178273201\n", - "Surface training t=18069, loss=0.03151393588632345\n", - "Surface training t=18070, loss=0.029535308480262756\n", - "Surface training t=18071, loss=0.03437256067991257\n", - "Surface training t=18072, loss=0.028100788593292236\n", - "Surface training t=18073, loss=0.038303861394524574\n", - "Surface training t=18074, loss=0.04121713526546955\n", - "Surface training t=18075, loss=0.039477039128541946\n", - "Surface training t=18076, loss=0.034951248206198215\n", - "Surface training t=18077, loss=0.04275836609303951\n", - "Surface training t=18078, loss=0.04585766792297363\n", - "Surface training t=18079, loss=0.03898410499095917\n", - "Surface training t=18080, loss=0.031953239813447\n", - "Surface training t=18081, loss=0.02706417627632618\n", - "Surface training t=18082, loss=0.030592131428420544\n", - "Surface training t=18083, loss=0.031486536376178265\n", - "Surface training t=18084, loss=0.03284759446978569\n", - "Surface training t=18085, loss=0.035426429472863674\n", - "Surface training t=18086, loss=0.037831418216228485\n", - "Surface training t=18087, loss=0.03880752809345722\n", - "Surface training t=18088, loss=0.027921481989324093\n", - "Surface training t=18089, loss=0.021276511251926422\n", - "Surface training t=18090, loss=0.03163769096136093\n", - "Surface training t=18091, loss=0.01869419915601611\n", - "Surface training t=18092, loss=0.025506709702312946\n", - "Surface training t=18093, loss=0.029045388102531433\n", - "Surface training t=18094, loss=0.028430012986063957\n", - "Surface training t=18095, loss=0.03209620341658592\n", - "Surface training t=18096, loss=0.027241968549787998\n", - "Surface training t=18097, loss=0.02568646753206849\n", - "Surface training t=18098, loss=0.036317262798547745\n", - "Surface training t=18099, loss=0.02371145784854889\n", - "Surface training t=18100, loss=0.024983879178762436\n", - "Surface training t=18101, loss=0.028831559233367443\n", - "Surface training t=18102, loss=0.022707059979438782\n", - "Surface training t=18103, loss=0.027168930508196354\n", - "Surface training t=18104, loss=0.02808204386383295\n", - "Surface training t=18105, loss=0.0232708603143692\n", - "Surface training t=18106, loss=0.02397774439305067\n", - "Surface training t=18107, loss=0.02679489180445671\n", - "Surface training t=18108, loss=0.023902573622763157\n", - "Surface training t=18109, loss=0.02449156530201435\n", - "Surface training t=18110, loss=0.02138742245733738\n", - "Surface training t=18111, loss=0.022423764690756798\n", - "Surface training t=18112, loss=0.027371468022465706\n", - "Surface training t=18113, loss=0.048440221697092056\n", - "Surface training t=18114, loss=0.04110567271709442\n", - "Surface training t=18115, loss=0.04076877236366272\n", - "Surface training t=18116, loss=0.042261785827577114\n", - "Surface training t=18117, loss=0.05359257012605667\n", - "Surface training t=18118, loss=0.03701692633330822\n", - "Surface training t=18119, loss=0.030177478678524494\n", - "Surface training t=18120, loss=0.03221895918250084\n", - "Surface training t=18121, loss=0.027434222400188446\n", - "Surface training t=18122, loss=0.02500599715858698\n", - "Surface training t=18123, loss=0.02442264650017023\n", - "Surface training t=18124, loss=0.020164810121059418\n", - "Surface training t=18125, loss=0.024249390698969364\n", - "Surface training t=18126, loss=0.02702342066913843\n", - "Surface training t=18127, loss=0.02379583567380905\n", - "Surface training t=18128, loss=0.03272007219493389\n", - "Surface training t=18129, loss=0.027071957476437092\n", - "Surface training t=18130, loss=0.022160744294524193\n", - "Surface training t=18131, loss=0.021936064586043358\n", - "Surface training t=18132, loss=0.023906310088932514\n", - "Surface training t=18133, loss=0.024449662305414677\n", - "Surface training t=18134, loss=0.02073570527136326\n", - "Surface training t=18135, loss=0.03396477736532688\n", - "Surface training t=18136, loss=0.0314508443698287\n", - "Surface training t=18137, loss=0.02684288751333952\n", - "Surface training t=18138, loss=0.023275715298950672\n", - "Surface training t=18139, loss=0.02456589788198471\n", - "Surface training t=18140, loss=0.024739598855376244\n", - "Surface training t=18141, loss=0.028992008417844772\n", - "Surface training t=18142, loss=0.024001735262572765\n", - "Surface training t=18143, loss=0.019855891354382038\n", - "Surface training t=18144, loss=0.024664527736604214\n", - "Surface training t=18145, loss=0.021919923834502697\n", - "Surface training t=18146, loss=0.02970554679632187\n", - "Surface training t=18147, loss=0.020591710694134235\n", - "Surface training t=18148, loss=0.022646416909992695\n", - "Surface training t=18149, loss=0.025329903699457645\n", - "Surface training t=18150, loss=0.024315398186445236\n", - "Surface training t=18151, loss=0.02482045814394951\n", - "Surface training t=18152, loss=0.024820080026984215\n", - "Surface training t=18153, loss=0.019724013283848763\n", - "Surface training t=18154, loss=0.02918710559606552\n", - "Surface training t=18155, loss=0.029781104065477848\n", - "Surface training t=18156, loss=0.022322550415992737\n", - "Surface training t=18157, loss=0.02373044192790985\n", - "Surface training t=18158, loss=0.029990151524543762\n", - "Surface training t=18159, loss=0.024073628708720207\n", - "Surface training t=18160, loss=0.027485277503728867\n", - "Surface training t=18161, loss=0.032859452068805695\n", - "Surface training t=18162, loss=0.018940279260277748\n", - "Surface training t=18163, loss=0.02348163165152073\n", - "Surface training t=18164, loss=0.029491914436221123\n", - "Surface training t=18165, loss=0.02950236015021801\n", - "Surface training t=18166, loss=0.031514834612607956\n", - "Surface training t=18167, loss=0.03145487979054451\n", - "Surface training t=18168, loss=0.034922205843031406\n", - "Surface training t=18169, loss=0.030261196196079254\n", - "Surface training t=18170, loss=0.039368148893117905\n", - "Surface training t=18171, loss=0.05386165715754032\n", - "Surface training t=18172, loss=0.043772172182798386\n", - "Surface training t=18173, loss=0.04409675486385822\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=18174, loss=0.062160808593034744\n", - "Surface training t=18175, loss=0.04066605865955353\n", - "Surface training t=18176, loss=0.04708436504006386\n", - "Surface training t=18177, loss=0.07333500683307648\n", - "Surface training t=18178, loss=0.04807300865650177\n", - "Surface training t=18179, loss=0.06718670204281807\n", - "Surface training t=18180, loss=0.057420914992690086\n", - "Surface training t=18181, loss=0.042869674041867256\n", - "Surface training t=18182, loss=0.05236964114010334\n", - "Surface training t=18183, loss=0.04377138987183571\n", - "Surface training t=18184, loss=0.033941312693059444\n", - "Surface training t=18185, loss=0.03709413390606642\n", - "Surface training t=18186, loss=0.030761657282710075\n", - "Surface training t=18187, loss=0.04120021127164364\n", - "Surface training t=18188, loss=0.03851923253387213\n", - "Surface training t=18189, loss=0.02996982168406248\n", - "Surface training t=18190, loss=0.05568835698068142\n", - "Surface training t=18191, loss=0.0611927080899477\n", - "Surface training t=18192, loss=0.04119708016514778\n", - "Surface training t=18193, loss=0.04919114708900452\n", - "Surface training t=18194, loss=0.03419191762804985\n", - "Surface training t=18195, loss=0.033337485045194626\n", - "Surface training t=18196, loss=0.02946324273943901\n", - "Surface training t=18197, loss=0.022061594761908054\n", - "Surface training t=18198, loss=0.024163429625332355\n", - "Surface training t=18199, loss=0.024423004128038883\n", - "Surface training t=18200, loss=0.020508707500994205\n", - "Surface training t=18201, loss=0.01722982805222273\n", - "Surface training t=18202, loss=0.015759858768433332\n", - "Surface training t=18203, loss=0.017889101058244705\n", - "Surface training t=18204, loss=0.0243051890283823\n", - "Surface training t=18205, loss=0.028118858113884926\n", - "Surface training t=18206, loss=0.025264764204621315\n", - "Surface training t=18207, loss=0.02664338145405054\n", - "Surface training t=18208, loss=0.044540053233504295\n", - "Surface training t=18209, loss=0.030007985420525074\n", - "Surface training t=18210, loss=0.0449665654450655\n", - "Surface training t=18211, loss=0.03885788004845381\n", - "Surface training t=18212, loss=0.05542089603841305\n", - "Surface training t=18213, loss=0.034995125606656075\n", - "Surface training t=18214, loss=0.045516710728406906\n", - "Surface training t=18215, loss=0.04831496439874172\n", - "Surface training t=18216, loss=0.042774369940161705\n", - "Surface training t=18217, loss=0.03473977278918028\n", - "Surface training t=18218, loss=0.033152601681649685\n", - "Surface training t=18219, loss=0.024315258488059044\n", - "Surface training t=18220, loss=0.0293319346383214\n", - "Surface training t=18221, loss=0.02871120534837246\n", - "Surface training t=18222, loss=0.025994270108640194\n", - "Surface training t=18223, loss=0.017391699831932783\n", - "Surface training t=18224, loss=0.02424163930118084\n", - "Surface training t=18225, loss=0.02339857630431652\n", - "Surface training t=18226, loss=0.022161215543746948\n", - "Surface training t=18227, loss=0.02260524034500122\n", - "Surface training t=18228, loss=0.01873728260397911\n", - "Surface training t=18229, loss=0.02806886751204729\n", - "Surface training t=18230, loss=0.018529538996517658\n", - "Surface training t=18231, loss=0.023399430327117443\n", - "Surface training t=18232, loss=0.019476591609418392\n", - "Surface training t=18233, loss=0.024820171296596527\n", - "Surface training t=18234, loss=0.03274345677345991\n", - "Surface training t=18235, loss=0.026013514958322048\n", - "Surface training t=18236, loss=0.02175194025039673\n", - "Surface training t=18237, loss=0.024721545167267323\n", - "Surface training t=18238, loss=0.029974515549838543\n", - "Surface training t=18239, loss=0.025089633651077747\n", - "Surface training t=18240, loss=0.02036797534674406\n", - "Surface training t=18241, loss=0.02462714072316885\n", - "Surface training t=18242, loss=0.02151736244559288\n", - "Surface training t=18243, loss=0.03453364968299866\n", - "Surface training t=18244, loss=0.028486053459346294\n", - "Surface training t=18245, loss=0.026252547279000282\n", - "Surface training t=18246, loss=0.03675534389913082\n", - "Surface training t=18247, loss=0.029119259677827358\n", - "Surface training t=18248, loss=0.056822769343853\n", - "Surface training t=18249, loss=0.036504896357655525\n", - "Surface training t=18250, loss=0.04993618279695511\n", - "Surface training t=18251, loss=0.02651187777519226\n", - "Surface training t=18252, loss=0.037272870540618896\n", - "Surface training t=18253, loss=0.023008735850453377\n", - "Surface training t=18254, loss=0.028286460787057877\n", - "Surface training t=18255, loss=0.021945565938949585\n", - "Surface training t=18256, loss=0.024929724633693695\n", - "Surface training t=18257, loss=0.02638804353773594\n", - "Surface training t=18258, loss=0.029981344006955624\n", - "Surface training t=18259, loss=0.02972597163170576\n", - "Surface training t=18260, loss=0.029245968908071518\n", - "Surface training t=18261, loss=0.023549162782728672\n", - "Surface training t=18262, loss=0.02075570821762085\n", - "Surface training t=18263, loss=0.022442622110247612\n", - "Surface training t=18264, loss=0.02408040501177311\n", - "Surface training t=18265, loss=0.02725983876734972\n", - "Surface training t=18266, loss=0.02679560799151659\n", - "Surface training t=18267, loss=0.0243526604026556\n", - "Surface training t=18268, loss=0.0269414521753788\n", - "Surface training t=18269, loss=0.03206673730164766\n", - "Surface training t=18270, loss=0.026094593107700348\n", - "Surface training t=18271, loss=0.020633134059607983\n", - "Surface training t=18272, loss=0.01641716994345188\n", - "Surface training t=18273, loss=0.02061273669824004\n", - "Surface training t=18274, loss=0.02378447912633419\n", - "Surface training t=18275, loss=0.022665218450129032\n", - "Surface training t=18276, loss=0.02898708824068308\n", - "Surface training t=18277, loss=0.030555238015949726\n", - "Surface training t=18278, loss=0.029431059956550598\n", - "Surface training t=18279, loss=0.03574497438967228\n", - "Surface training t=18280, loss=0.04119456186890602\n", - "Surface training t=18281, loss=0.031845541670918465\n", - "Surface training t=18282, loss=0.03449724428355694\n", - "Surface training t=18283, loss=0.02601052261888981\n", - "Surface training t=18284, loss=0.025927905924618244\n", - "Surface training t=18285, loss=0.021202232223004103\n", - "Surface training t=18286, loss=0.03441699966788292\n", - "Surface training t=18287, loss=0.027939417399466038\n", - "Surface training t=18288, loss=0.02528529055416584\n", - "Surface training t=18289, loss=0.02223123051226139\n", - "Surface training t=18290, loss=0.02585942205041647\n", - "Surface training t=18291, loss=0.025106502696871758\n", - "Surface training t=18292, loss=0.021117773838341236\n", - "Surface training t=18293, loss=0.021846404299139977\n", - "Surface training t=18294, loss=0.023655250668525696\n", - "Surface training t=18295, loss=0.019908953458070755\n", - "Surface training t=18296, loss=0.02266254834830761\n", - "Surface training t=18297, loss=0.018895622342824936\n", - "Surface training t=18298, loss=0.019157310016453266\n", - "Surface training t=18299, loss=0.02418000902980566\n", - "Surface training t=18300, loss=0.02783010248094797\n", - "Surface training t=18301, loss=0.027544904500246048\n", - "Surface training t=18302, loss=0.02545651327818632\n", - "Surface training t=18303, loss=0.02868082281202078\n", - "Surface training t=18304, loss=0.028846907429397106\n", - "Surface training t=18305, loss=0.024726164527237415\n", - "Surface training t=18306, loss=0.025336842983961105\n", - "Surface training t=18307, loss=0.020265724509954453\n", - "Surface training t=18308, loss=0.017343408428132534\n", - "Surface training t=18309, loss=0.021138882264494896\n", - "Surface training t=18310, loss=0.022956675849854946\n", - "Surface training t=18311, loss=0.019530667923390865\n", - "Surface training t=18312, loss=0.04063816927373409\n", - "Surface training t=18313, loss=0.03003042284399271\n", - "Surface training t=18314, loss=0.02992120571434498\n", - "Surface training t=18315, loss=0.01731855794787407\n", - "Surface training t=18316, loss=0.023824007250368595\n", - "Surface training t=18317, loss=0.02050016913563013\n", - "Surface training t=18318, loss=0.029667217284440994\n", - "Surface training t=18319, loss=0.020182565785944462\n", - "Surface training t=18320, loss=0.027305408380925655\n", - "Surface training t=18321, loss=0.027041321620345116\n", - "Surface training t=18322, loss=0.028231922537088394\n", - "Surface training t=18323, loss=0.02951520960777998\n", - "Surface training t=18324, loss=0.022759780287742615\n", - "Surface training t=18325, loss=0.028607694432139397\n", - "Surface training t=18326, loss=0.023136483039706945\n", - "Surface training t=18327, loss=0.02059780340641737\n", - "Surface training t=18328, loss=0.02986151911318302\n", - "Surface training t=18329, loss=0.022891209460794926\n", - "Surface training t=18330, loss=0.019905694760382175\n", - "Surface training t=18331, loss=0.017779388464987278\n", - "Surface training t=18332, loss=0.019928774796426296\n", - "Surface training t=18333, loss=0.01763929147273302\n", - "Surface training t=18334, loss=0.01833837479352951\n", - "Surface training t=18335, loss=0.025946238078176975\n", - "Surface training t=18336, loss=0.030638396739959717\n", - "Surface training t=18337, loss=0.037979092448949814\n", - "Surface training t=18338, loss=0.02772899717092514\n", - "Surface training t=18339, loss=0.034315651282668114\n", - "Surface training t=18340, loss=0.0328924935311079\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=18341, loss=0.024600500240921974\n", - "Surface training t=18342, loss=0.025121472775936127\n", - "Surface training t=18343, loss=0.030995416454970837\n", - "Surface training t=18344, loss=0.03255679830908775\n", - "Surface training t=18345, loss=0.03082861937582493\n", - "Surface training t=18346, loss=0.024720555171370506\n", - "Surface training t=18347, loss=0.026947983540594578\n", - "Surface training t=18348, loss=0.020093580707907677\n", - "Surface training t=18349, loss=0.025640763342380524\n", - "Surface training t=18350, loss=0.02505845297127962\n", - "Surface training t=18351, loss=0.028290786780416965\n", - "Surface training t=18352, loss=0.027735530398786068\n", - "Surface training t=18353, loss=0.029311434365808964\n", - "Surface training t=18354, loss=0.028070508502423763\n", - "Surface training t=18355, loss=0.01956935692578554\n", - "Surface training t=18356, loss=0.019121830817312002\n", - "Surface training t=18357, loss=0.023871883749961853\n", - "Surface training t=18358, loss=0.04226556420326233\n", - "Surface training t=18359, loss=0.04345606081187725\n", - "Surface training t=18360, loss=0.025216604582965374\n", - "Surface training t=18361, loss=0.021093467250466347\n", - "Surface training t=18362, loss=0.03816403076052666\n", - "Surface training t=18363, loss=0.04297618940472603\n", - "Surface training t=18364, loss=0.032757566310465336\n", - "Surface training t=18365, loss=0.042384300380945206\n", - "Surface training t=18366, loss=0.03634089231491089\n", - "Surface training t=18367, loss=0.03809553198516369\n", - "Surface training t=18368, loss=0.04890288971364498\n", - "Surface training t=18369, loss=0.03857133351266384\n", - "Surface training t=18370, loss=0.04815148003399372\n", - "Surface training t=18371, loss=0.04109025001525879\n", - "Surface training t=18372, loss=0.048971595242619514\n", - "Surface training t=18373, loss=0.04486672207713127\n", - "Surface training t=18374, loss=0.045394591987133026\n", - "Surface training t=18375, loss=0.033068316988646984\n", - "Surface training t=18376, loss=0.035339782014489174\n", - "Surface training t=18377, loss=0.024017926305532455\n", - "Surface training t=18378, loss=0.030710852704942226\n", - "Surface training t=18379, loss=0.030751433223485947\n", - "Surface training t=18380, loss=0.025278053246438503\n", - "Surface training t=18381, loss=0.028423073701560497\n", - "Surface training t=18382, loss=0.029204637743532658\n", - "Surface training t=18383, loss=0.028566574677824974\n", - "Surface training t=18384, loss=0.03974452055990696\n", - "Surface training t=18385, loss=0.03992028348147869\n", - "Surface training t=18386, loss=0.030072699300944805\n", - "Surface training t=18387, loss=0.032301148399710655\n", - "Surface training t=18388, loss=0.035325199365615845\n", - "Surface training t=18389, loss=0.04899907298386097\n", - "Surface training t=18390, loss=0.037285326048731804\n", - "Surface training t=18391, loss=0.026776570826768875\n", - "Surface training t=18392, loss=0.0291263647377491\n", - "Surface training t=18393, loss=0.031140493229031563\n", - "Surface training t=18394, loss=0.024764331057667732\n", - "Surface training t=18395, loss=0.027282300405204296\n", - "Surface training t=18396, loss=0.02803061716258526\n", - "Surface training t=18397, loss=0.02436337899416685\n", - "Surface training t=18398, loss=0.023536776192486286\n", - "Surface training t=18399, loss=0.020783510990440845\n", - "Surface training t=18400, loss=0.013768672943115234\n", - "Surface training t=18401, loss=0.022480282001197338\n", - "Surface training t=18402, loss=0.02173678856343031\n", - "Surface training t=18403, loss=0.017990015912801027\n", - "Surface training t=18404, loss=0.02120573353022337\n", - "Surface training t=18405, loss=0.022063057869672775\n", - "Surface training t=18406, loss=0.02437820378690958\n", - "Surface training t=18407, loss=0.023956427350640297\n", - "Surface training t=18408, loss=0.02472088113427162\n", - "Surface training t=18409, loss=0.029708381742239\n", - "Surface training t=18410, loss=0.02785038761794567\n", - "Surface training t=18411, loss=0.03313133213669062\n", - "Surface training t=18412, loss=0.034745652228593826\n", - "Surface training t=18413, loss=0.026890048757195473\n", - "Surface training t=18414, loss=0.03057989478111267\n", - "Surface training t=18415, loss=0.029295346699655056\n", - "Surface training t=18416, loss=0.04401092976331711\n", - "Surface training t=18417, loss=0.04756644181907177\n", - "Surface training t=18418, loss=0.0380328893661499\n", - "Surface training t=18419, loss=0.05976167321205139\n", - "Surface training t=18420, loss=0.0505708996206522\n", - "Surface training t=18421, loss=0.04091780260205269\n", - "Surface training t=18422, loss=0.028335344046354294\n", - "Surface training t=18423, loss=0.0389114823192358\n", - "Surface training t=18424, loss=0.028693833388388157\n", - "Surface training t=18425, loss=0.04053913801908493\n", - "Surface training t=18426, loss=0.03487199358642101\n", - "Surface training t=18427, loss=0.04560473375022411\n", - "Surface training t=18428, loss=0.03641730919480324\n", - "Surface training t=18429, loss=0.04698866792023182\n", - "Surface training t=18430, loss=0.037276034243404865\n", - "Surface training t=18431, loss=0.058399030938744545\n", - "Surface training t=18432, loss=0.03655400685966015\n", - "Surface training t=18433, loss=0.031462715938687325\n", - "Surface training t=18434, loss=0.02624756284058094\n", - "Surface training t=18435, loss=0.03651177976280451\n", - "Surface training t=18436, loss=0.028148232959210873\n", - "Surface training t=18437, loss=0.025857017375528812\n", - "Surface training t=18438, loss=0.024639561772346497\n", - "Surface training t=18439, loss=0.023820560425519943\n", - "Surface training t=18440, loss=0.03353814594447613\n", - "Surface training t=18441, loss=0.03979232534766197\n", - "Surface training t=18442, loss=0.04093479923903942\n", - "Surface training t=18443, loss=0.03352319821715355\n", - "Surface training t=18444, loss=0.034058403223752975\n", - "Surface training t=18445, loss=0.04935048148036003\n", - "Surface training t=18446, loss=0.037556158378720284\n", - "Surface training t=18447, loss=0.03809955529868603\n", - "Surface training t=18448, loss=0.04861313849687576\n", - "Surface training t=18449, loss=0.05986964330077171\n", - "Surface training t=18450, loss=0.04273863509297371\n", - "Surface training t=18451, loss=0.04314294643700123\n", - "Surface training t=18452, loss=0.08294949866831303\n", - "Surface training t=18453, loss=0.047547440975904465\n", - "Surface training t=18454, loss=0.037908753380179405\n", - "Surface training t=18455, loss=0.037552230060100555\n", - "Surface training t=18456, loss=0.04738234356045723\n", - "Surface training t=18457, loss=0.039432477205991745\n", - "Surface training t=18458, loss=0.052370110526680946\n", - "Surface training t=18459, loss=0.04278913885354996\n", - "Surface training t=18460, loss=0.03478042408823967\n", - "Surface training t=18461, loss=0.03543051518499851\n", - "Surface training t=18462, loss=0.02617967827245593\n", - "Surface training t=18463, loss=0.0377980787307024\n", - "Surface training t=18464, loss=0.029010397382080555\n", - "Surface training t=18465, loss=0.03736218996345997\n", - "Surface training t=18466, loss=0.02957933582365513\n", - "Surface training t=18467, loss=0.027168055064976215\n", - "Surface training t=18468, loss=0.03159619867801666\n", - "Surface training t=18469, loss=0.031138278543949127\n", - "Surface training t=18470, loss=0.031919363886117935\n", - "Surface training t=18471, loss=0.037851533852517605\n", - "Surface training t=18472, loss=0.051274511963129044\n", - "Surface training t=18473, loss=0.0371808297932148\n", - "Surface training t=18474, loss=0.03745945729315281\n", - "Surface training t=18475, loss=0.028224140405654907\n", - "Surface training t=18476, loss=0.030688087455928326\n", - "Surface training t=18477, loss=0.0435821209102869\n", - "Surface training t=18478, loss=0.0316021665930748\n", - "Surface training t=18479, loss=0.019687194377183914\n", - "Surface training t=18480, loss=0.02748957183212042\n", - "Surface training t=18481, loss=0.032393477857112885\n", - "Surface training t=18482, loss=0.034747449681162834\n", - "Surface training t=18483, loss=0.025946239940822124\n", - "Surface training t=18484, loss=0.025525451637804508\n", - "Surface training t=18485, loss=0.027421499602496624\n", - "Surface training t=18486, loss=0.026810327544808388\n", - "Surface training t=18487, loss=0.02754884585738182\n", - "Surface training t=18488, loss=0.02918890491127968\n", - "Surface training t=18489, loss=0.026532883755862713\n", - "Surface training t=18490, loss=0.016931507736444473\n", - "Surface training t=18491, loss=0.025519177317619324\n", - "Surface training t=18492, loss=0.03244328033179045\n", - "Surface training t=18493, loss=0.023582860827445984\n", - "Surface training t=18494, loss=0.03246194776147604\n", - "Surface training t=18495, loss=0.0314780306071043\n", - "Surface training t=18496, loss=0.02993699163198471\n", - "Surface training t=18497, loss=0.03952104412019253\n", - "Surface training t=18498, loss=0.034083892591297626\n", - "Surface training t=18499, loss=0.030421167612075806\n", - "Surface training t=18500, loss=0.02841702103614807\n", - "Surface training t=18501, loss=0.02685073111206293\n", - "Surface training t=18502, loss=0.023380734957754612\n", - "Surface training t=18503, loss=0.022284272126853466\n", - "Surface training t=18504, loss=0.025873213075101376\n", - "Surface training t=18505, loss=0.03109641559422016\n", - "Surface training t=18506, loss=0.023979168385267258\n", - "Surface training t=18507, loss=0.02173769287765026\n", - "Surface training t=18508, loss=0.021354339085519314\n", - "Surface training t=18509, loss=0.02102862298488617\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=18510, loss=0.015847364906221628\n", - "Surface training t=18511, loss=0.017850205302238464\n", - "Surface training t=18512, loss=0.01714788656681776\n", - "Surface training t=18513, loss=0.018144166097044945\n", - "Surface training t=18514, loss=0.019437896087765694\n", - "Surface training t=18515, loss=0.03365481365472078\n", - "Surface training t=18516, loss=0.025523042306303978\n", - "Surface training t=18517, loss=0.02125887479633093\n", - "Surface training t=18518, loss=0.024969463236629963\n", - "Surface training t=18519, loss=0.029237879440188408\n", - "Surface training t=18520, loss=0.036710746586322784\n", - "Surface training t=18521, loss=0.031062012538313866\n", - "Surface training t=18522, loss=0.03281337022781372\n", - "Surface training t=18523, loss=0.028150811791419983\n", - "Surface training t=18524, loss=0.032516008242964745\n", - "Surface training t=18525, loss=0.027305521070957184\n", - "Surface training t=18526, loss=0.026687242090702057\n", - "Surface training t=18527, loss=0.02526533044874668\n", - "Surface training t=18528, loss=0.02999833971261978\n", - "Surface training t=18529, loss=0.03659799974411726\n", - "Surface training t=18530, loss=0.02995496615767479\n", - "Surface training t=18531, loss=0.04961103014647961\n", - "Surface training t=18532, loss=0.03805346693843603\n", - "Surface training t=18533, loss=0.06355192698538303\n", - "Surface training t=18534, loss=0.035579707473516464\n", - "Surface training t=18535, loss=0.03038726281374693\n", - "Surface training t=18536, loss=0.02895805798470974\n", - "Surface training t=18537, loss=0.028910853900015354\n", - "Surface training t=18538, loss=0.03151147998869419\n", - "Surface training t=18539, loss=0.028743465431034565\n", - "Surface training t=18540, loss=0.035273099318146706\n", - "Surface training t=18541, loss=0.03673746809363365\n", - "Surface training t=18542, loss=0.054621100425720215\n", - "Surface training t=18543, loss=0.05401116982102394\n", - "Surface training t=18544, loss=0.042640868574380875\n", - "Surface training t=18545, loss=0.04152565822005272\n", - "Surface training t=18546, loss=0.043508950620889664\n", - "Surface training t=18547, loss=0.029646960087120533\n", - "Surface training t=18548, loss=0.027537398040294647\n", - "Surface training t=18549, loss=0.033965473994612694\n", - "Surface training t=18550, loss=0.025351736694574356\n", - "Surface training t=18551, loss=0.023482020013034344\n", - "Surface training t=18552, loss=0.0217939130961895\n", - "Surface training t=18553, loss=0.04495716653764248\n", - "Surface training t=18554, loss=0.030701416544616222\n", - "Surface training t=18555, loss=0.027192319743335247\n", - "Surface training t=18556, loss=0.023107540793716908\n", - "Surface training t=18557, loss=0.026374979875981808\n", - "Surface training t=18558, loss=0.025630848482251167\n", - "Surface training t=18559, loss=0.02185603603720665\n", - "Surface training t=18560, loss=0.01973490323871374\n", - "Surface training t=18561, loss=0.02568892017006874\n", - "Surface training t=18562, loss=0.02217678166925907\n", - "Surface training t=18563, loss=0.02010231837630272\n", - "Surface training t=18564, loss=0.02130728494375944\n", - "Surface training t=18565, loss=0.025918020866811275\n", - "Surface training t=18566, loss=0.029267863370478153\n", - "Surface training t=18567, loss=0.017581273801624775\n", - "Surface training t=18568, loss=0.0237486744299531\n", - "Surface training t=18569, loss=0.025077766738831997\n", - "Surface training t=18570, loss=0.025924263522028923\n", - "Surface training t=18571, loss=0.027636516839265823\n", - "Surface training t=18572, loss=0.024400427006185055\n", - "Surface training t=18573, loss=0.022725502029061317\n", - "Surface training t=18574, loss=0.025552667677402496\n", - "Surface training t=18575, loss=0.033635860309004784\n", - "Surface training t=18576, loss=0.02309450227767229\n", - "Surface training t=18577, loss=0.02271289099007845\n", - "Surface training t=18578, loss=0.021747115068137646\n", - "Surface training t=18579, loss=0.022293899208307266\n", - "Surface training t=18580, loss=0.02411291655153036\n", - "Surface training t=18581, loss=0.026871069334447384\n", - "Surface training t=18582, loss=0.027426132932305336\n", - "Surface training t=18583, loss=0.02063809521496296\n", - "Surface training t=18584, loss=0.017419646959751844\n", - "Surface training t=18585, loss=0.021268350072205067\n", - "Surface training t=18586, loss=0.025565428659319878\n", - "Surface training t=18587, loss=0.0204881327226758\n", - "Surface training t=18588, loss=0.020791797898709774\n", - "Surface training t=18589, loss=0.02607191726565361\n", - "Surface training t=18590, loss=0.02347339689731598\n", - "Surface training t=18591, loss=0.015353268012404442\n", - "Surface training t=18592, loss=0.01775519922375679\n", - "Surface training t=18593, loss=0.01914398930966854\n", - "Surface training t=18594, loss=0.016553625464439392\n", - "Surface training t=18595, loss=0.024094752967357635\n", - "Surface training t=18596, loss=0.022169554606080055\n", - "Surface training t=18597, loss=0.01906159520149231\n", - "Surface training t=18598, loss=0.029201542027294636\n", - "Surface training t=18599, loss=0.026032278314232826\n", - "Surface training t=18600, loss=0.03488859534263611\n", - "Surface training t=18601, loss=0.029549487866461277\n", - "Surface training t=18602, loss=0.02616945467889309\n", - "Surface training t=18603, loss=0.03138568624854088\n", - "Surface training t=18604, loss=0.034086731262505054\n", - "Surface training t=18605, loss=0.026634950190782547\n", - "Surface training t=18606, loss=0.04181336611509323\n", - "Surface training t=18607, loss=0.026860413141548634\n", - "Surface training t=18608, loss=0.02896197885274887\n", - "Surface training t=18609, loss=0.03626945521682501\n", - "Surface training t=18610, loss=0.02430787868797779\n", - "Surface training t=18611, loss=0.03013077098876238\n", - "Surface training t=18612, loss=0.021692092530429363\n", - "Surface training t=18613, loss=0.029750248417258263\n", - "Surface training t=18614, loss=0.02929709479212761\n", - "Surface training t=18615, loss=0.02619398944079876\n", - "Surface training t=18616, loss=0.026649726554751396\n", - "Surface training t=18617, loss=0.028850805014371872\n", - "Surface training t=18618, loss=0.02828103955835104\n", - "Surface training t=18619, loss=0.028276885859668255\n", - "Surface training t=18620, loss=0.023547050543129444\n", - "Surface training t=18621, loss=0.029152074828743935\n", - "Surface training t=18622, loss=0.028472357429564\n", - "Surface training t=18623, loss=0.04454333521425724\n", - "Surface training t=18624, loss=0.03483757749199867\n", - "Surface training t=18625, loss=0.03837522678077221\n", - "Surface training t=18626, loss=0.04272212367504835\n", - "Surface training t=18627, loss=0.0787598043680191\n", - "Surface training t=18628, loss=0.0443638376891613\n", - "Surface training t=18629, loss=0.04185775574296713\n", - "Surface training t=18630, loss=0.06506333500146866\n", - "Surface training t=18631, loss=0.045039646327495575\n", - "Surface training t=18632, loss=0.06660114228725433\n", - "Surface training t=18633, loss=0.04925644397735596\n", - "Surface training t=18634, loss=0.0439608059823513\n", - "Surface training t=18635, loss=0.0647890493273735\n", - "Surface training t=18636, loss=0.03567009698599577\n", - "Surface training t=18637, loss=0.037252217531204224\n", - "Surface training t=18638, loss=0.04045472014695406\n", - "Surface training t=18639, loss=0.06625533476471901\n", - "Surface training t=18640, loss=0.04151199944317341\n", - "Surface training t=18641, loss=0.04578777216374874\n", - "Surface training t=18642, loss=0.04950896464288235\n", - "Surface training t=18643, loss=0.04193062521517277\n", - "Surface training t=18644, loss=0.06258884817361832\n", - "Surface training t=18645, loss=0.05134893022477627\n", - "Surface training t=18646, loss=0.036865007132291794\n", - "Surface training t=18647, loss=0.04819844290614128\n", - "Surface training t=18648, loss=0.03537999652326107\n", - "Surface training t=18649, loss=0.03675224632024765\n", - "Surface training t=18650, loss=0.03832534700632095\n", - "Surface training t=18651, loss=0.026155244559049606\n", - "Surface training t=18652, loss=0.04487971216440201\n", - "Surface training t=18653, loss=0.04187550209462643\n", - "Surface training t=18654, loss=0.047136737033724785\n", - "Surface training t=18655, loss=0.043899646028876305\n", - "Surface training t=18656, loss=0.047745002433657646\n", - "Surface training t=18657, loss=0.034292712807655334\n", - "Surface training t=18658, loss=0.02715173549950123\n", - "Surface training t=18659, loss=0.038953643292188644\n", - "Surface training t=18660, loss=0.02494427841156721\n", - "Surface training t=18661, loss=0.032467687502503395\n", - "Surface training t=18662, loss=0.026328331790864468\n", - "Surface training t=18663, loss=0.030854140408337116\n", - "Surface training t=18664, loss=0.022205153480172157\n", - "Surface training t=18665, loss=0.025530772283673286\n", - "Surface training t=18666, loss=0.028284178115427494\n", - "Surface training t=18667, loss=0.030320914462208748\n", - "Surface training t=18668, loss=0.027068505063652992\n", - "Surface training t=18669, loss=0.022084505297243595\n", - "Surface training t=18670, loss=0.022850333712995052\n", - "Surface training t=18671, loss=0.019302785396575928\n", - "Surface training t=18672, loss=0.02120122406631708\n", - "Surface training t=18673, loss=0.030235828831791878\n", - "Surface training t=18674, loss=0.020824491046369076\n", - "Surface training t=18675, loss=0.019288803450763226\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=18676, loss=0.02115009445697069\n", - "Surface training t=18677, loss=0.018843035213649273\n", - "Surface training t=18678, loss=0.02752424031496048\n", - "Surface training t=18679, loss=0.02271966263651848\n", - "Surface training t=18680, loss=0.020018701441586018\n", - "Surface training t=18681, loss=0.020950933918356895\n", - "Surface training t=18682, loss=0.02124528679996729\n", - "Surface training t=18683, loss=0.027882032096385956\n", - "Surface training t=18684, loss=0.020635759457945824\n", - "Surface training t=18685, loss=0.026532446034252644\n", - "Surface training t=18686, loss=0.01938468124717474\n", - "Surface training t=18687, loss=0.02834183257073164\n", - "Surface training t=18688, loss=0.018460676074028015\n", - "Surface training t=18689, loss=0.02785992342978716\n", - "Surface training t=18690, loss=0.01999308168888092\n", - "Surface training t=18691, loss=0.028115435503423214\n", - "Surface training t=18692, loss=0.018935689702630043\n", - "Surface training t=18693, loss=0.01685618096962571\n", - "Surface training t=18694, loss=0.018733719363808632\n", - "Surface training t=18695, loss=0.018669508397579193\n", - "Surface training t=18696, loss=0.02277335338294506\n", - "Surface training t=18697, loss=0.026319502852857113\n", - "Surface training t=18698, loss=0.032651206478476524\n", - "Surface training t=18699, loss=0.024798449128866196\n", - "Surface training t=18700, loss=0.04374583996832371\n", - "Surface training t=18701, loss=0.0349369328469038\n", - "Surface training t=18702, loss=0.026337455958127975\n", - "Surface training t=18703, loss=0.02362640853971243\n", - "Surface training t=18704, loss=0.015300127677619457\n", - "Surface training t=18705, loss=0.019371533766388893\n", - "Surface training t=18706, loss=0.01830014493316412\n", - "Surface training t=18707, loss=0.01947220414876938\n", - "Surface training t=18708, loss=0.023420230485498905\n", - "Surface training t=18709, loss=0.023797782137989998\n", - "Surface training t=18710, loss=0.024843464139848948\n", - "Surface training t=18711, loss=0.025271172635257244\n", - "Surface training t=18712, loss=0.029001510702073574\n", - "Surface training t=18713, loss=0.025752251967787743\n", - "Surface training t=18714, loss=0.02509206160902977\n", - "Surface training t=18715, loss=0.027396051213145256\n", - "Surface training t=18716, loss=0.03837398253381252\n", - "Surface training t=18717, loss=0.0277751162648201\n", - "Surface training t=18718, loss=0.028240512125194073\n", - "Surface training t=18719, loss=0.04099300503730774\n", - "Surface training t=18720, loss=0.030685195699334145\n", - "Surface training t=18721, loss=0.03986651822924614\n", - "Surface training t=18722, loss=0.03423864208161831\n", - "Surface training t=18723, loss=0.03356047533452511\n", - "Surface training t=18724, loss=0.03955840319395065\n", - "Surface training t=18725, loss=0.033946478739380836\n", - "Surface training t=18726, loss=0.03949507884681225\n", - "Surface training t=18727, loss=0.03498780820518732\n", - "Surface training t=18728, loss=0.029735164251178503\n", - "Surface training t=18729, loss=0.06765133887529373\n", - "Surface training t=18730, loss=0.04497847333550453\n", - "Surface training t=18731, loss=0.06142117455601692\n", - "Surface training t=18732, loss=0.036660932935774326\n", - "Surface training t=18733, loss=0.04735780507326126\n", - "Surface training t=18734, loss=0.030804005451500416\n", - "Surface training t=18735, loss=0.03842867538332939\n", - "Surface training t=18736, loss=0.021809504367411137\n", - "Surface training t=18737, loss=0.024078725837171078\n", - "Surface training t=18738, loss=0.030078189447522163\n", - "Surface training t=18739, loss=0.02887069433927536\n", - "Surface training t=18740, loss=0.03171989321708679\n", - "Surface training t=18741, loss=0.025746146216988564\n", - "Surface training t=18742, loss=0.02148358430713415\n", - "Surface training t=18743, loss=0.03011000994592905\n", - "Surface training t=18744, loss=0.04600195214152336\n", - "Surface training t=18745, loss=0.03792464919388294\n", - "Surface training t=18746, loss=0.03901875205338001\n", - "Surface training t=18747, loss=0.05808126740157604\n", - "Surface training t=18748, loss=0.049986908212304115\n", - "Surface training t=18749, loss=0.038150353357195854\n", - "Surface training t=18750, loss=0.06960175558924675\n", - "Surface training t=18751, loss=0.049874911084771156\n", - "Surface training t=18752, loss=0.044361162930727005\n", - "Surface training t=18753, loss=0.08221209794282913\n", - "Surface training t=18754, loss=0.04191631264984608\n", - "Surface training t=18755, loss=0.05198961682617664\n", - "Surface training t=18756, loss=0.031561958603560925\n", - "Surface training t=18757, loss=0.04087106790393591\n", - "Surface training t=18758, loss=0.03945896588265896\n", - "Surface training t=18759, loss=0.03148580342531204\n", - "Surface training t=18760, loss=0.03730293456465006\n", - "Surface training t=18761, loss=0.03186099324375391\n", - "Surface training t=18762, loss=0.025312711484730244\n", - "Surface training t=18763, loss=0.03346697520464659\n", - "Surface training t=18764, loss=0.03419573325663805\n", - "Surface training t=18765, loss=0.02814518753439188\n", - "Surface training t=18766, loss=0.037886811420321465\n", - "Surface training t=18767, loss=0.033369033597409725\n", - "Surface training t=18768, loss=0.02485367190092802\n", - "Surface training t=18769, loss=0.028745213523507118\n", - "Surface training t=18770, loss=0.03261330258101225\n", - "Surface training t=18771, loss=0.03074295725673437\n", - "Surface training t=18772, loss=0.05141402594745159\n", - "Surface training t=18773, loss=0.03234943933784962\n", - "Surface training t=18774, loss=0.038301607593894005\n", - "Surface training t=18775, loss=0.028593608178198338\n", - "Surface training t=18776, loss=0.02382540423423052\n", - "Surface training t=18777, loss=0.0313392523676157\n", - "Surface training t=18778, loss=0.03386366553604603\n", - "Surface training t=18779, loss=0.03397518489509821\n", - "Surface training t=18780, loss=0.01978888362646103\n", - "Surface training t=18781, loss=0.03445352241396904\n", - "Surface training t=18782, loss=0.028940286487340927\n", - "Surface training t=18783, loss=0.023430073633790016\n", - "Surface training t=18784, loss=0.03164389077574015\n", - "Surface training t=18785, loss=0.023162376135587692\n", - "Surface training t=18786, loss=0.023447985760867596\n", - "Surface training t=18787, loss=0.024286204017698765\n", - "Surface training t=18788, loss=0.027869317680597305\n", - "Surface training t=18789, loss=0.026135160587728024\n", - "Surface training t=18790, loss=0.02094560954719782\n", - "Surface training t=18791, loss=0.022930964827537537\n", - "Surface training t=18792, loss=0.020749136805534363\n", - "Surface training t=18793, loss=0.034146866761147976\n", - "Surface training t=18794, loss=0.024546343833208084\n", - "Surface training t=18795, loss=0.018783917650580406\n", - "Surface training t=18796, loss=0.02622848190367222\n", - "Surface training t=18797, loss=0.0227441368624568\n", - "Surface training t=18798, loss=0.020590429194271564\n", - "Surface training t=18799, loss=0.02337688487023115\n", - "Surface training t=18800, loss=0.02293708547949791\n", - "Surface training t=18801, loss=0.021785754710435867\n", - "Surface training t=18802, loss=0.019401359371840954\n", - "Surface training t=18803, loss=0.02227720059454441\n", - "Surface training t=18804, loss=0.023386049084365368\n", - "Surface training t=18805, loss=0.02371781598776579\n", - "Surface training t=18806, loss=0.02056167833507061\n", - "Surface training t=18807, loss=0.025775520130991936\n", - "Surface training t=18808, loss=0.03224503993988037\n", - "Surface training t=18809, loss=0.035646671429276466\n", - "Surface training t=18810, loss=0.03166641015559435\n", - "Surface training t=18811, loss=0.03975506126880646\n", - "Surface training t=18812, loss=0.044764185324311256\n", - "Surface training t=18813, loss=0.0399506650865078\n", - "Surface training t=18814, loss=0.04165147617459297\n", - "Surface training t=18815, loss=0.02887207828462124\n", - "Surface training t=18816, loss=0.026319258380681276\n", - "Surface training t=18817, loss=0.037272462621331215\n", - "Surface training t=18818, loss=0.022339219227433205\n", - "Surface training t=18819, loss=0.024952784180641174\n", - "Surface training t=18820, loss=0.04340929165482521\n", - "Surface training t=18821, loss=0.03311527706682682\n", - "Surface training t=18822, loss=0.04832680709660053\n", - "Surface training t=18823, loss=0.03499872516840696\n", - "Surface training t=18824, loss=0.027716964948922396\n", - "Surface training t=18825, loss=0.03023180365562439\n", - "Surface training t=18826, loss=0.025820380076766014\n", - "Surface training t=18827, loss=0.02458053082227707\n", - "Surface training t=18828, loss=0.029101979918777943\n", - "Surface training t=18829, loss=0.028917621821165085\n", - "Surface training t=18830, loss=0.029857251793146133\n", - "Surface training t=18831, loss=0.029256150126457214\n", - "Surface training t=18832, loss=0.021969717927277088\n", - "Surface training t=18833, loss=0.030414171516895294\n", - "Surface training t=18834, loss=0.03563578426837921\n", - "Surface training t=18835, loss=0.04469364695250988\n", - "Surface training t=18836, loss=0.0336257740855217\n", - "Surface training t=18837, loss=0.036103349179029465\n", - "Surface training t=18838, loss=0.03832100797444582\n", - "Surface training t=18839, loss=0.0511623527854681\n", - "Surface training t=18840, loss=0.02903411816805601\n", - "Surface training t=18841, loss=0.027191045694053173\n", - "Surface training t=18842, loss=0.022555979900062084\n", - "Surface training t=18843, loss=0.024393292143940926\n", - "Surface training t=18844, loss=0.015176340937614441\n", - "Surface training t=18845, loss=0.02599063701927662\n", - "Surface training t=18846, loss=0.0344246756285429\n", - "Surface training t=18847, loss=0.02260720729827881\n", - "Surface training t=18848, loss=0.0223252447322011\n", - "Surface training t=18849, loss=0.02134440839290619\n", - "Surface training t=18850, loss=0.023541906848549843\n", - "Surface training t=18851, loss=0.020399161614477634\n", - "Surface training t=18852, loss=0.021923599764704704\n", - "Surface training t=18853, loss=0.02788413967937231\n", - "Surface training t=18854, loss=0.02594965137541294\n", - "Surface training t=18855, loss=0.02521556429564953\n", - "Surface training t=18856, loss=0.025332010351121426\n", - "Surface training t=18857, loss=0.02357518393546343\n", - "Surface training t=18858, loss=0.0237817894667387\n", - "Surface training t=18859, loss=0.02481988538056612\n", - "Surface training t=18860, loss=0.022992650978267193\n", - "Surface training t=18861, loss=0.027764925733208656\n", - "Surface training t=18862, loss=0.024480539374053478\n", - "Surface training t=18863, loss=0.019419833086431026\n", - "Surface training t=18864, loss=0.021134653128683567\n", - "Surface training t=18865, loss=0.02175941225141287\n", - "Surface training t=18866, loss=0.027035115286707878\n", - "Surface training t=18867, loss=0.030989437364041805\n", - "Surface training t=18868, loss=0.0259628901258111\n", - "Surface training t=18869, loss=0.043772364035248756\n", - "Surface training t=18870, loss=0.0292910598218441\n", - "Surface training t=18871, loss=0.046014873310923576\n", - "Surface training t=18872, loss=0.024500688537955284\n", - "Surface training t=18873, loss=0.02897549606859684\n", - "Surface training t=18874, loss=0.04064350388944149\n", - "Surface training t=18875, loss=0.023641126230359077\n", - "Surface training t=18876, loss=0.02839219756424427\n", - "Surface training t=18877, loss=0.02630164846777916\n", - "Surface training t=18878, loss=0.02129045221954584\n", - "Surface training t=18879, loss=0.017505998723208904\n", - "Surface training t=18880, loss=0.016750623006373644\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=18881, loss=0.017429271712899208\n", - "Surface training t=18882, loss=0.022142193280160427\n", - "Surface training t=18883, loss=0.032253109849989414\n", - "Surface training t=18884, loss=0.028555527329444885\n", - "Surface training t=18885, loss=0.03711826354265213\n", - "Surface training t=18886, loss=0.027229532599449158\n", - "Surface training t=18887, loss=0.027973131276667118\n", - "Surface training t=18888, loss=0.0380180012434721\n", - "Surface training t=18889, loss=0.028255286626517773\n", - "Surface training t=18890, loss=0.02844485081732273\n", - "Surface training t=18891, loss=0.02289173472672701\n", - "Surface training t=18892, loss=0.022212697193026543\n", - "Surface training t=18893, loss=0.023199154995381832\n", - "Surface training t=18894, loss=0.024914350360631943\n", - "Surface training t=18895, loss=0.022639752365648746\n", - "Surface training t=18896, loss=0.024117582477629185\n", - "Surface training t=18897, loss=0.021953603252768517\n", - "Surface training t=18898, loss=0.02325343620032072\n", - "Surface training t=18899, loss=0.0200877133756876\n", - "Surface training t=18900, loss=0.022645659744739532\n", - "Surface training t=18901, loss=0.02278866060078144\n", - "Surface training t=18902, loss=0.022661618888378143\n", - "Surface training t=18903, loss=0.027754975482821465\n", - "Surface training t=18904, loss=0.025807475671172142\n", - "Surface training t=18905, loss=0.02495165914297104\n", - "Surface training t=18906, loss=0.030142350122332573\n", - "Surface training t=18907, loss=0.029443666338920593\n", - "Surface training t=18908, loss=0.02575782686471939\n", - "Surface training t=18909, loss=0.030280835926532745\n", - "Surface training t=18910, loss=0.02747551165521145\n", - "Surface training t=18911, loss=0.04446673393249512\n", - "Surface training t=18912, loss=0.0486226174980402\n", - "Surface training t=18913, loss=0.0355975367128849\n", - "Surface training t=18914, loss=0.04024174343794584\n", - "Surface training t=18915, loss=0.03931163623929024\n", - "Surface training t=18916, loss=0.05585422366857529\n", - "Surface training t=18917, loss=0.03529421426355839\n", - "Surface training t=18918, loss=0.039031242951750755\n", - "Surface training t=18919, loss=0.041717540472745895\n", - "Surface training t=18920, loss=0.07481301575899124\n", - "Surface training t=18921, loss=0.04764499142765999\n", - "Surface training t=18922, loss=0.056064389646053314\n", - "Surface training t=18923, loss=0.05562265403568745\n", - "Surface training t=18924, loss=0.040097981691360474\n", - "Surface training t=18925, loss=0.047261301428079605\n", - "Surface training t=18926, loss=0.03741220198571682\n", - "Surface training t=18927, loss=0.035765497013926506\n", - "Surface training t=18928, loss=0.03236268553882837\n", - "Surface training t=18929, loss=0.02770307846367359\n", - "Surface training t=18930, loss=0.029136299155652523\n", - "Surface training t=18931, loss=0.029435131698846817\n", - "Surface training t=18932, loss=0.03789007477462292\n", - "Surface training t=18933, loss=0.05435854755342007\n", - "Surface training t=18934, loss=0.04402298480272293\n", - "Surface training t=18935, loss=0.05732870101928711\n", - "Surface training t=18936, loss=0.060965776443481445\n", - "Surface training t=18937, loss=0.045448074117302895\n", - "Surface training t=18938, loss=0.06609497591853142\n", - "Surface training t=18939, loss=0.04888562671840191\n", - "Surface training t=18940, loss=0.045384667813777924\n", - "Surface training t=18941, loss=0.05420570820569992\n", - "Surface training t=18942, loss=0.05204097740352154\n", - "Surface training t=18943, loss=0.03871411457657814\n", - "Surface training t=18944, loss=0.033522460609674454\n", - "Surface training t=18945, loss=0.04077109880745411\n", - "Surface training t=18946, loss=0.037702725268900394\n", - "Surface training t=18947, loss=0.027097743935883045\n", - "Surface training t=18948, loss=0.025615090504288673\n", - "Surface training t=18949, loss=0.034057583659887314\n", - "Surface training t=18950, loss=0.027170151472091675\n", - "Surface training t=18951, loss=0.028939847834408283\n", - "Surface training t=18952, loss=0.021833787206560373\n", - "Surface training t=18953, loss=0.02039904799312353\n", - "Surface training t=18954, loss=0.03054851107299328\n", - "Surface training t=18955, loss=0.027583337388932705\n", - "Surface training t=18956, loss=0.032002244144678116\n", - "Surface training t=18957, loss=0.04097872041165829\n", - "Surface training t=18958, loss=0.03285280056297779\n", - "Surface training t=18959, loss=0.03436691965907812\n", - "Surface training t=18960, loss=0.034409102983772755\n", - "Surface training t=18961, loss=0.030863418243825436\n", - "Surface training t=18962, loss=0.03558726795017719\n", - "Surface training t=18963, loss=0.03652810491621494\n", - "Surface training t=18964, loss=0.046750202775001526\n", - "Surface training t=18965, loss=0.03029068000614643\n", - "Surface training t=18966, loss=0.03370555862784386\n", - "Surface training t=18967, loss=0.03276167716830969\n", - "Surface training t=18968, loss=0.026946444995701313\n", - "Surface training t=18969, loss=0.026128536090254784\n", - "Surface training t=18970, loss=0.038851646706461906\n", - "Surface training t=18971, loss=0.043342528864741325\n", - "Surface training t=18972, loss=0.03181721828877926\n", - "Surface training t=18973, loss=0.0330179650336504\n", - "Surface training t=18974, loss=0.028910587541759014\n", - "Surface training t=18975, loss=0.035690028220415115\n", - "Surface training t=18976, loss=0.025335107930004597\n", - "Surface training t=18977, loss=0.025532055646181107\n", - "Surface training t=18978, loss=0.01705573871731758\n", - "Surface training t=18979, loss=0.017565938644111156\n", - "Surface training t=18980, loss=0.021235072053968906\n", - "Surface training t=18981, loss=0.01812368631362915\n", - "Surface training t=18982, loss=0.01653917506337166\n", - "Surface training t=18983, loss=0.026706106960773468\n", - "Surface training t=18984, loss=0.02098600286990404\n", - "Surface training t=18985, loss=0.016805690713226795\n", - "Surface training t=18986, loss=0.0215713232755661\n", - "Surface training t=18987, loss=0.01811239216476679\n", - "Surface training t=18988, loss=0.02789194416254759\n", - "Surface training t=18989, loss=0.02292050141841173\n", - "Surface training t=18990, loss=0.02329422067850828\n", - "Surface training t=18991, loss=0.0172975012101233\n", - "Surface training t=18992, loss=0.04594063013792038\n", - "Surface training t=18993, loss=0.04090814106166363\n", - "Surface training t=18994, loss=0.03299967758357525\n", - "Surface training t=18995, loss=0.03850477747619152\n", - "Surface training t=18996, loss=0.04544902592897415\n", - "Surface training t=18997, loss=0.02567806839942932\n", - "Surface training t=18998, loss=0.027447249740362167\n", - "Surface training t=18999, loss=0.030159685760736465\n", - "Surface training t=19000, loss=0.024505932815372944\n", - "Surface training t=19001, loss=0.026733938604593277\n", - "Surface training t=19002, loss=0.028243886306881905\n", - "Surface training t=19003, loss=0.029591457918286324\n", - "Surface training t=19004, loss=0.024710020050406456\n", - "Surface training t=19005, loss=0.0252009816467762\n", - "Surface training t=19006, loss=0.027314890176057816\n", - "Surface training t=19007, loss=0.02054622210562229\n", - "Surface training t=19008, loss=0.019934196956455708\n", - "Surface training t=19009, loss=0.020376003347337246\n", - "Surface training t=19010, loss=0.0238143359310925\n", - "Surface training t=19011, loss=0.02397319208830595\n", - "Surface training t=19012, loss=0.022830097004771233\n", - "Surface training t=19013, loss=0.019352066330611706\n", - "Surface training t=19014, loss=0.021434461697936058\n", - "Surface training t=19015, loss=0.02137142326682806\n", - "Surface training t=19016, loss=0.017395148053765297\n", - "Surface training t=19017, loss=0.01896187663078308\n", - "Surface training t=19018, loss=0.02598575409501791\n", - "Surface training t=19019, loss=0.026220115832984447\n", - "Surface training t=19020, loss=0.022493084892630577\n", - "Surface training t=19021, loss=0.029542313888669014\n", - "Surface training t=19022, loss=0.02148790005594492\n", - "Surface training t=19023, loss=0.028432182036340237\n", - "Surface training t=19024, loss=0.029207834042608738\n", - "Surface training t=19025, loss=0.02852700836956501\n", - "Surface training t=19026, loss=0.030867887660861015\n", - "Surface training t=19027, loss=0.053568992763757706\n", - "Surface training t=19028, loss=0.034973192028701305\n", - "Surface training t=19029, loss=0.03541976399719715\n", - "Surface training t=19030, loss=0.04263397678732872\n", - "Surface training t=19031, loss=0.03761918283998966\n", - "Surface training t=19032, loss=0.03213353734463453\n", - "Surface training t=19033, loss=0.02547448966652155\n", - "Surface training t=19034, loss=0.020208336412906647\n", - "Surface training t=19035, loss=0.019191103987395763\n", - "Surface training t=19036, loss=0.02145093772560358\n", - "Surface training t=19037, loss=0.03977985866367817\n", - "Surface training t=19038, loss=0.030263572931289673\n", - "Surface training t=19039, loss=0.02931718435138464\n", - "Surface training t=19040, loss=0.02963598072528839\n", - "Surface training t=19041, loss=0.035046108067035675\n", - "Surface training t=19042, loss=0.03797571919858456\n", - "Surface training t=19043, loss=0.02751684281975031\n", - "Surface training t=19044, loss=0.026170900091528893\n", - "Surface training t=19045, loss=0.028021381236612797\n", - "Surface training t=19046, loss=0.02968095149844885\n", - "Surface training t=19047, loss=0.028403368778526783\n", - "Surface training t=19048, loss=0.033318581990897655\n", - "Surface training t=19049, loss=0.03302058484405279\n", - "Surface training t=19050, loss=0.03357721120119095\n", - "Surface training t=19051, loss=0.042969752103090286\n", - "Surface training t=19052, loss=0.041171303018927574\n", - "Surface training t=19053, loss=0.032633643597364426\n", - "Surface training t=19054, loss=0.027277770452201366\n", - "Surface training t=19055, loss=0.02983943372964859\n", - "Surface training t=19056, loss=0.032796407118439674\n", - "Surface training t=19057, loss=0.025472331792116165\n", - "Surface training t=19058, loss=0.03508869558572769\n", - "Surface training t=19059, loss=0.033767344430089\n", - "Surface training t=19060, loss=0.0242653526365757\n", - "Surface training t=19061, loss=0.024310613051056862\n", - "Surface training t=19062, loss=0.020598609931766987\n", - "Surface training t=19063, loss=0.016629491467028856\n", - "Surface training t=19064, loss=0.028572346083819866\n", - "Surface training t=19065, loss=0.019403060898184776\n", - "Surface training t=19066, loss=0.021537100430577993\n", - "Surface training t=19067, loss=0.014654605649411678\n", - "Surface training t=19068, loss=0.021582706831395626\n", - "Surface training t=19069, loss=0.02388667780905962\n", - "Surface training t=19070, loss=0.03000995609909296\n", - "Surface training t=19071, loss=0.033065360970795155\n", - "Surface training t=19072, loss=0.030495325103402138\n", - "Surface training t=19073, loss=0.03851735778152943\n", - "Surface training t=19074, loss=0.03968777507543564\n", - "Surface training t=19075, loss=0.031158058904111385\n", - "Surface training t=19076, loss=0.030926182866096497\n", - "Surface training t=19077, loss=0.031129665672779083\n", - "Surface training t=19078, loss=0.028784306719899178\n", - "Surface training t=19079, loss=0.03035545628517866\n", - "Surface training t=19080, loss=0.024246003478765488\n", - "Surface training t=19081, loss=0.029911172576248646\n", - "Surface training t=19082, loss=0.021003120578825474\n", - "Surface training t=19083, loss=0.02831923682242632\n", - "Surface training t=19084, loss=0.023224356584250927\n", - "Surface training t=19085, loss=0.025723563507199287\n", - "Surface training t=19086, loss=0.0260881083086133\n", - "Surface training t=19087, loss=0.018076074309647083\n", - "Surface training t=19088, loss=0.02346659917384386\n", - "Surface training t=19089, loss=0.029275108128786087\n", - "Surface training t=19090, loss=0.021462520584464073\n", - "Surface training t=19091, loss=0.031269386410713196\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=19092, loss=0.03615706413984299\n", - "Surface training t=19093, loss=0.023903701454401016\n", - "Surface training t=19094, loss=0.020472198724746704\n", - "Surface training t=19095, loss=0.02122425101697445\n", - "Surface training t=19096, loss=0.022273295558989048\n", - "Surface training t=19097, loss=0.017736688256263733\n", - "Surface training t=19098, loss=0.017408220563083887\n", - "Surface training t=19099, loss=0.018550795502960682\n", - "Surface training t=19100, loss=0.015434825327247381\n", - "Surface training t=19101, loss=0.018655038438737392\n", - "Surface training t=19102, loss=0.01646647183224559\n", - "Surface training t=19103, loss=0.020238052122294903\n", - "Surface training t=19104, loss=0.018946785479784012\n", - "Surface training t=19105, loss=0.02833507675677538\n", - "Surface training t=19106, loss=0.024136608466506004\n", - "Surface training t=19107, loss=0.044293906539678574\n", - "Surface training t=19108, loss=0.029435252770781517\n", - "Surface training t=19109, loss=0.03055785410106182\n", - "Surface training t=19110, loss=0.03223545663058758\n", - "Surface training t=19111, loss=0.031048309057950974\n", - "Surface training t=19112, loss=0.02756099496036768\n", - "Surface training t=19113, loss=0.029111037030816078\n", - "Surface training t=19114, loss=0.03543538972735405\n", - "Surface training t=19115, loss=0.03471940942108631\n", - "Surface training t=19116, loss=0.04563688300549984\n", - "Surface training t=19117, loss=0.02831685170531273\n", - "Surface training t=19118, loss=0.03406645264476538\n", - "Surface training t=19119, loss=0.027395923621952534\n", - "Surface training t=19120, loss=0.02776081208139658\n", - "Surface training t=19121, loss=0.02402978576719761\n", - "Surface training t=19122, loss=0.034776993095874786\n", - "Surface training t=19123, loss=0.04196866415441036\n", - "Surface training t=19124, loss=0.03649330697953701\n", - "Surface training t=19125, loss=0.03184235282242298\n", - "Surface training t=19126, loss=0.04911557212471962\n", - "Surface training t=19127, loss=0.04847564361989498\n", - "Surface training t=19128, loss=0.03976357541978359\n", - "Surface training t=19129, loss=0.03604034520685673\n", - "Surface training t=19130, loss=0.044068533927202225\n", - "Surface training t=19131, loss=0.039944084361195564\n", - "Surface training t=19132, loss=0.03600706346333027\n", - "Surface training t=19133, loss=0.04589940793812275\n", - "Surface training t=19134, loss=0.03749215230345726\n", - "Surface training t=19135, loss=0.049797214567661285\n", - "Surface training t=19136, loss=0.03460821695625782\n", - "Surface training t=19137, loss=0.030932284891605377\n", - "Surface training t=19138, loss=0.024078717455267906\n", - "Surface training t=19139, loss=0.023693881928920746\n", - "Surface training t=19140, loss=0.02513246238231659\n", - "Surface training t=19141, loss=0.022038709372282028\n", - "Surface training t=19142, loss=0.0247573284432292\n", - "Surface training t=19143, loss=0.020874561741948128\n", - "Surface training t=19144, loss=0.02510414458811283\n", - "Surface training t=19145, loss=0.020452280528843403\n", - "Surface training t=19146, loss=0.029370601288974285\n", - "Surface training t=19147, loss=0.026082689873874187\n", - "Surface training t=19148, loss=0.020683715119957924\n", - "Surface training t=19149, loss=0.02185728494077921\n", - "Surface training t=19150, loss=0.020829422399401665\n", - "Surface training t=19151, loss=0.026475748978555202\n", - "Surface training t=19152, loss=0.028519033454358578\n", - "Surface training t=19153, loss=0.022654651664197445\n", - "Surface training t=19154, loss=0.022825533524155617\n", - "Surface training t=19155, loss=0.04463471285998821\n", - "Surface training t=19156, loss=0.03180287592113018\n", - "Surface training t=19157, loss=0.037955776788294315\n", - "Surface training t=19158, loss=0.03857298195362091\n", - "Surface training t=19159, loss=0.048513902351260185\n", - "Surface training t=19160, loss=0.037938106805086136\n", - "Surface training t=19161, loss=0.043704356998205185\n", - "Surface training t=19162, loss=0.05773773789405823\n", - "Surface training t=19163, loss=0.030609571374952793\n", - "Surface training t=19164, loss=0.03406004048883915\n", - "Surface training t=19165, loss=0.033122096210718155\n", - "Surface training t=19166, loss=0.03160632401704788\n", - "Surface training t=19167, loss=0.03308836556971073\n", - "Surface training t=19168, loss=0.024262364022433758\n", - "Surface training t=19169, loss=0.026386890560388565\n", - "Surface training t=19170, loss=0.03752793371677399\n", - "Surface training t=19171, loss=0.026675083674490452\n", - "Surface training t=19172, loss=0.026026331819593906\n", - "Surface training t=19173, loss=0.019411684945225716\n", - "Surface training t=19174, loss=0.03172595053911209\n", - "Surface training t=19175, loss=0.03081009816378355\n", - "Surface training t=19176, loss=0.030044586397707462\n", - "Surface training t=19177, loss=0.02238901238888502\n", - "Surface training t=19178, loss=0.02147562988102436\n", - "Surface training t=19179, loss=0.029254302382469177\n", - "Surface training t=19180, loss=0.025563081726431847\n", - "Surface training t=19181, loss=0.02931112702935934\n", - "Surface training t=19182, loss=0.029101126827299595\n", - "Surface training t=19183, loss=0.027683992870151997\n", - "Surface training t=19184, loss=0.023417015559971333\n", - "Surface training t=19185, loss=0.019166339188814163\n", - "Surface training t=19186, loss=0.018393070437014103\n", - "Surface training t=19187, loss=0.02566620334982872\n", - "Surface training t=19188, loss=0.0290741128847003\n", - "Surface training t=19189, loss=0.0240536630153656\n", - "Surface training t=19190, loss=0.027449660934507847\n", - "Surface training t=19191, loss=0.03352315351366997\n", - "Surface training t=19192, loss=0.026516113430261612\n", - "Surface training t=19193, loss=0.029328633099794388\n", - "Surface training t=19194, loss=0.03056559432297945\n", - "Surface training t=19195, loss=0.024257829412817955\n", - "Surface training t=19196, loss=0.0206381157040596\n", - "Surface training t=19197, loss=0.030937512405216694\n", - "Surface training t=19198, loss=0.02397564984858036\n", - "Surface training t=19199, loss=0.027800027281045914\n", - "Surface training t=19200, loss=0.031235181726515293\n", - "Surface training t=19201, loss=0.03257554117590189\n", - "Surface training t=19202, loss=0.04369224235415459\n", - "Surface training t=19203, loss=0.028446029871702194\n", - "Surface training t=19204, loss=0.024179335683584213\n", - "Surface training t=19205, loss=0.030148879624903202\n", - "Surface training t=19206, loss=0.02660804335027933\n", - "Surface training t=19207, loss=0.028223682194948196\n", - "Surface training t=19208, loss=0.025977613404393196\n", - "Surface training t=19209, loss=0.027489778585731983\n", - "Surface training t=19210, loss=0.025674641132354736\n", - "Surface training t=19211, loss=0.026796423364430666\n", - "Surface training t=19212, loss=0.028354616835713387\n", - "Surface training t=19213, loss=0.020008115097880363\n", - "Surface training t=19214, loss=0.01853172294795513\n", - "Surface training t=19215, loss=0.02290862612426281\n", - "Surface training t=19216, loss=0.01757931150496006\n", - "Surface training t=19217, loss=0.017027053982019424\n", - "Surface training t=19218, loss=0.01784997433423996\n", - "Surface training t=19219, loss=0.01688218768686056\n", - "Surface training t=19220, loss=0.018333817832171917\n", - "Surface training t=19221, loss=0.01767820492386818\n", - "Surface training t=19222, loss=0.02189108356833458\n", - "Surface training t=19223, loss=0.02142186276614666\n", - "Surface training t=19224, loss=0.022831613197922707\n", - "Surface training t=19225, loss=0.02449890226125717\n", - "Surface training t=19226, loss=0.026714827865362167\n", - "Surface training t=19227, loss=0.04083384573459625\n", - "Surface training t=19228, loss=0.029601246118545532\n", - "Surface training t=19229, loss=0.03571154549717903\n", - "Surface training t=19230, loss=0.020862650126218796\n", - "Surface training t=19231, loss=0.02115015033632517\n", - "Surface training t=19232, loss=0.026160159148275852\n", - "Surface training t=19233, loss=0.02020571567118168\n", - "Surface training t=19234, loss=0.028247028589248657\n", - "Surface training t=19235, loss=0.01932372711598873\n", - "Surface training t=19236, loss=0.026316866278648376\n", - "Surface training t=19237, loss=0.023290676064789295\n", - "Surface training t=19238, loss=0.028041056357324123\n", - "Surface training t=19239, loss=0.030392072163522243\n", - "Surface training t=19240, loss=0.027677814476191998\n", - "Surface training t=19241, loss=0.02289648074656725\n", - "Surface training t=19242, loss=0.02484931517392397\n", - "Surface training t=19243, loss=0.02955257147550583\n", - "Surface training t=19244, loss=0.0285806804895401\n", - "Surface training t=19245, loss=0.03050590679049492\n", - "Surface training t=19246, loss=0.037342462688684464\n", - "Surface training t=19247, loss=0.026953776367008686\n", - "Surface training t=19248, loss=0.02214324939996004\n", - "Surface training t=19249, loss=0.02450560498982668\n", - "Surface training t=19250, loss=0.03043030109256506\n", - "Surface training t=19251, loss=0.03340025618672371\n", - "Surface training t=19252, loss=0.026595214381814003\n", - "Surface training t=19253, loss=0.0234891539439559\n", - "Surface training t=19254, loss=0.03526457957923412\n", - "Surface training t=19255, loss=0.029568816535174847\n", - "Surface training t=19256, loss=0.031076451763510704\n", - "Surface training t=19257, loss=0.02662674244493246\n", - "Surface training t=19258, loss=0.027867138385772705\n", - "Surface training t=19259, loss=0.020509891211986542\n", - "Surface training t=19260, loss=0.021179776173084974\n", - "Surface training t=19261, loss=0.020655019208788872\n", - "Surface training t=19262, loss=0.02035499829798937\n", - "Surface training t=19263, loss=0.01623894553631544\n", - "Surface training t=19264, loss=0.01714291237294674\n", - "Surface training t=19265, loss=0.02190880011767149\n", - "Surface training t=19266, loss=0.025836714543402195\n", - "Surface training t=19267, loss=0.025963726453483105\n", - "Surface training t=19268, loss=0.020615827292203903\n", - "Surface training t=19269, loss=0.02698405645787716\n", - "Surface training t=19270, loss=0.019047516398131847\n", - "Surface training t=19271, loss=0.020186602603644133\n", - "Surface training t=19272, loss=0.031631721183657646\n", - "Surface training t=19273, loss=0.02563111949712038\n", - "Surface training t=19274, loss=0.023626320995390415\n", - "Surface training t=19275, loss=0.026431974954903126\n", - "Surface training t=19276, loss=0.033789169043302536\n", - "Surface training t=19277, loss=0.027607234194874763\n", - "Surface training t=19278, loss=0.03461205214262009\n", - "Surface training t=19279, loss=0.04737929254770279\n", - "Surface training t=19280, loss=0.028718690387904644\n", - "Surface training t=19281, loss=0.04045366868376732\n", - "Surface training t=19282, loss=0.030837549827992916\n", - "Surface training t=19283, loss=0.03313680551946163\n", - "Surface training t=19284, loss=0.039962952956557274\n", - "Surface training t=19285, loss=0.028910471126437187\n", - "Surface training t=19286, loss=0.032651268877089024\n", - "Surface training t=19287, loss=0.03325558826327324\n", - "Surface training t=19288, loss=0.03371916804462671\n", - "Surface training t=19289, loss=0.05401771888136864\n", - "Surface training t=19290, loss=0.03617572411894798\n", - "Surface training t=19291, loss=0.033295552246272564\n", - "Surface training t=19292, loss=0.02998028416186571\n", - "Surface training t=19293, loss=0.020198252983391285\n", - "Surface training t=19294, loss=0.022224433720111847\n", - "Surface training t=19295, loss=0.022347104735672474\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=19296, loss=0.02447976917028427\n", - "Surface training t=19297, loss=0.029814566485583782\n", - "Surface training t=19298, loss=0.022829878143966198\n", - "Surface training t=19299, loss=0.04576869308948517\n", - "Surface training t=19300, loss=0.03373274486511946\n", - "Surface training t=19301, loss=0.030826931819319725\n", - "Surface training t=19302, loss=0.025882665067911148\n", - "Surface training t=19303, loss=0.027226727455854416\n", - "Surface training t=19304, loss=0.028325943276286125\n", - "Surface training t=19305, loss=0.028030688874423504\n", - "Surface training t=19306, loss=0.02899343241006136\n", - "Surface training t=19307, loss=0.029543538577854633\n", - "Surface training t=19308, loss=0.02783307060599327\n", - "Surface training t=19309, loss=0.02601524256169796\n", - "Surface training t=19310, loss=0.023929317481815815\n", - "Surface training t=19311, loss=0.023244080133736134\n", - "Surface training t=19312, loss=0.02839100919663906\n", - "Surface training t=19313, loss=0.02098469901829958\n", - "Surface training t=19314, loss=0.02213306911289692\n", - "Surface training t=19315, loss=0.01885297428816557\n", - "Surface training t=19316, loss=0.02316190116107464\n", - "Surface training t=19317, loss=0.0205710856243968\n", - "Surface training t=19318, loss=0.02530717384070158\n", - "Surface training t=19319, loss=0.018518184311687946\n", - "Surface training t=19320, loss=0.021182998083531857\n", - "Surface training t=19321, loss=0.024036847054958344\n", - "Surface training t=19322, loss=0.02577818837016821\n", - "Surface training t=19323, loss=0.02845878340303898\n", - "Surface training t=19324, loss=0.03657008521258831\n", - "Surface training t=19325, loss=0.03661796636879444\n", - "Surface training t=19326, loss=0.04245118796825409\n", - "Surface training t=19327, loss=0.04328353889286518\n", - "Surface training t=19328, loss=0.036425478756427765\n", - "Surface training t=19329, loss=0.0294626597315073\n", - "Surface training t=19330, loss=0.02626338880509138\n", - "Surface training t=19331, loss=0.028149543330073357\n", - "Surface training t=19332, loss=0.023397342301905155\n", - "Surface training t=19333, loss=0.022198372520506382\n", - "Surface training t=19334, loss=0.022817404009401798\n", - "Surface training t=19335, loss=0.018077381886541843\n", - "Surface training t=19336, loss=0.016365982592105865\n", - "Surface training t=19337, loss=0.021529544610530138\n", - "Surface training t=19338, loss=0.029676794074475765\n", - "Surface training t=19339, loss=0.028146888129413128\n", - "Surface training t=19340, loss=0.021589046344161034\n", - "Surface training t=19341, loss=0.025886324234306812\n", - "Surface training t=19342, loss=0.02884708810597658\n", - "Surface training t=19343, loss=0.020028342492878437\n", - "Surface training t=19344, loss=0.02219773642718792\n", - "Surface training t=19345, loss=0.02023110631853342\n", - "Surface training t=19346, loss=0.024101569317281246\n", - "Surface training t=19347, loss=0.0304489154368639\n", - "Surface training t=19348, loss=0.038420746102929115\n", - "Surface training t=19349, loss=0.028760971501469612\n", - "Surface training t=19350, loss=0.02952750027179718\n", - "Surface training t=19351, loss=0.03150566574186087\n", - "Surface training t=19352, loss=0.022263416089117527\n", - "Surface training t=19353, loss=0.03050842694938183\n", - "Surface training t=19354, loss=0.022971199825406075\n", - "Surface training t=19355, loss=0.023024702444672585\n", - "Surface training t=19356, loss=0.02835483942180872\n", - "Surface training t=19357, loss=0.033629460260272026\n", - "Surface training t=19358, loss=0.029037853702902794\n", - "Surface training t=19359, loss=0.032216114923357964\n", - "Surface training t=19360, loss=0.02451338618993759\n", - "Surface training t=19361, loss=0.019606771413236856\n", - "Surface training t=19362, loss=0.02702682465314865\n", - "Surface training t=19363, loss=0.025476640090346336\n", - "Surface training t=19364, loss=0.026128814555704594\n", - "Surface training t=19365, loss=0.024757666513323784\n", - "Surface training t=19366, loss=0.01987969595938921\n", - "Surface training t=19367, loss=0.02877496089786291\n", - "Surface training t=19368, loss=0.027093540877103806\n", - "Surface training t=19369, loss=0.025671612471342087\n", - "Surface training t=19370, loss=0.028261461295187473\n", - "Surface training t=19371, loss=0.04267270676791668\n", - "Surface training t=19372, loss=0.025515795685350895\n", - "Surface training t=19373, loss=0.023439793847501278\n", - "Surface training t=19374, loss=0.03676370903849602\n", - "Surface training t=19375, loss=0.0334645789116621\n", - "Surface training t=19376, loss=0.046198904514312744\n", - "Surface training t=19377, loss=0.0312954131513834\n", - "Surface training t=19378, loss=0.026194040663540363\n", - "Surface training t=19379, loss=0.029205639846622944\n", - "Surface training t=19380, loss=0.0289579126983881\n", - "Surface training t=19381, loss=0.0251217819750309\n", - "Surface training t=19382, loss=0.03371494077146053\n", - "Surface training t=19383, loss=0.027349389158189297\n", - "Surface training t=19384, loss=0.03069189004600048\n", - "Surface training t=19385, loss=0.03391957562416792\n", - "Surface training t=19386, loss=0.04859808459877968\n", - "Surface training t=19387, loss=0.03388134576380253\n", - "Surface training t=19388, loss=0.03438736870884895\n", - "Surface training t=19389, loss=0.03253296576440334\n", - "Surface training t=19390, loss=0.0447763167321682\n", - "Surface training t=19391, loss=0.03985609859228134\n", - "Surface training t=19392, loss=0.03173111937940121\n", - "Surface training t=19393, loss=0.04596828669309616\n", - "Surface training t=19394, loss=0.03623471409082413\n", - "Surface training t=19395, loss=0.04021142236888409\n", - "Surface training t=19396, loss=0.041860468685626984\n", - "Surface training t=19397, loss=0.0442796815186739\n", - "Surface training t=19398, loss=0.037842157296836376\n", - "Surface training t=19399, loss=0.051211657002568245\n", - "Surface training t=19400, loss=0.04016480594873428\n", - "Surface training t=19401, loss=0.04028371535241604\n", - "Surface training t=19402, loss=0.03332239203155041\n", - "Surface training t=19403, loss=0.034619370475411415\n", - "Surface training t=19404, loss=0.03406895138323307\n", - "Surface training t=19405, loss=0.02888004668056965\n", - "Surface training t=19406, loss=0.033275581896305084\n", - "Surface training t=19407, loss=0.029052920639514923\n", - "Surface training t=19408, loss=0.02530544623732567\n", - "Surface training t=19409, loss=0.02732988353818655\n", - "Surface training t=19410, loss=0.02382023259997368\n", - "Surface training t=19411, loss=0.026460700668394566\n", - "Surface training t=19412, loss=0.022820642217993736\n", - "Surface training t=19413, loss=0.031747532077133656\n", - "Surface training t=19414, loss=0.019290735013782978\n", - "Surface training t=19415, loss=0.027305505238473415\n", - "Surface training t=19416, loss=0.03248198144137859\n", - "Surface training t=19417, loss=0.03345273435115814\n", - "Surface training t=19418, loss=0.02773681003600359\n", - "Surface training t=19419, loss=0.021839470602571964\n", - "Surface training t=19420, loss=0.016915570944547653\n", - "Surface training t=19421, loss=0.020700477063655853\n", - "Surface training t=19422, loss=0.023242519237101078\n", - "Surface training t=19423, loss=0.022175646387040615\n", - "Surface training t=19424, loss=0.02331089973449707\n", - "Surface training t=19425, loss=0.024734560400247574\n", - "Surface training t=19426, loss=0.018624682910740376\n", - "Surface training t=19427, loss=0.02284873090684414\n", - "Surface training t=19428, loss=0.023898297920823097\n", - "Surface training t=19429, loss=0.03459697961807251\n", - "Surface training t=19430, loss=0.025426615960896015\n", - "Surface training t=19431, loss=0.025044475682079792\n", - "Surface training t=19432, loss=0.03547689691185951\n", - "Surface training t=19433, loss=0.032982755452394485\n", - "Surface training t=19434, loss=0.025450490415096283\n", - "Surface training t=19435, loss=0.029838792979717255\n", - "Surface training t=19436, loss=0.0337908286601305\n", - "Surface training t=19437, loss=0.05501899681985378\n", - "Surface training t=19438, loss=0.043433827348053455\n", - "Surface training t=19439, loss=0.03514071926474571\n", - "Surface training t=19440, loss=0.025876703672111034\n", - "Surface training t=19441, loss=0.02167054172605276\n", - "Surface training t=19442, loss=0.026432267390191555\n", - "Surface training t=19443, loss=0.022547940723598003\n", - "Surface training t=19444, loss=0.019999329932034016\n", - "Surface training t=19445, loss=0.021983565762639046\n", - "Surface training t=19446, loss=0.024414289742708206\n", - "Surface training t=19447, loss=0.02298723254352808\n", - "Surface training t=19448, loss=0.028274724259972572\n", - "Surface training t=19449, loss=0.025289690122008324\n", - "Surface training t=19450, loss=0.021683918312191963\n", - "Surface training t=19451, loss=0.025138703174889088\n", - "Surface training t=19452, loss=0.01796992216259241\n", - "Surface training t=19453, loss=0.022608019411563873\n", - "Surface training t=19454, loss=0.021338392049074173\n", - "Surface training t=19455, loss=0.020556552335619926\n", - "Surface training t=19456, loss=0.017112883739173412\n", - "Surface training t=19457, loss=0.026120107620954514\n", - "Surface training t=19458, loss=0.05430148355662823\n", - "Surface training t=19459, loss=0.041624920442700386\n", - "Surface training t=19460, loss=0.043177319690585136\n", - "Surface training t=19461, loss=0.05180440470576286\n", - "Surface training t=19462, loss=0.043242571875452995\n", - "Surface training t=19463, loss=0.03984127193689346\n", - "Surface training t=19464, loss=0.0556504912674427\n", - "Surface training t=19465, loss=0.034627677872776985\n", - "Surface training t=19466, loss=0.03464563749730587\n", - "Surface training t=19467, loss=0.0383713785558939\n", - "Surface training t=19468, loss=0.029910223558545113\n", - "Surface training t=19469, loss=0.02062078472226858\n", - "Surface training t=19470, loss=0.021939309779554605\n", - "Surface training t=19471, loss=0.019615568220615387\n", - "Surface training t=19472, loss=0.02159689925611019\n", - "Surface training t=19473, loss=0.02369664516299963\n", - "Surface training t=19474, loss=0.035506490617990494\n", - "Surface training t=19475, loss=0.03860236518085003\n", - "Surface training t=19476, loss=0.03430892713367939\n", - "Surface training t=19477, loss=0.025002523325383663\n", - "Surface training t=19478, loss=0.030371760949492455\n", - "Surface training t=19479, loss=0.015447060577571392\n", - "Surface training t=19480, loss=0.025279270485043526\n", - "Surface training t=19481, loss=0.02307813335210085\n", - "Surface training t=19482, loss=0.030262066051363945\n", - "Surface training t=19483, loss=0.021297624334692955\n", - "Surface training t=19484, loss=0.027406301349401474\n", - "Surface training t=19485, loss=0.024243115447461605\n", - "Surface training t=19486, loss=0.02309860661625862\n", - "Surface training t=19487, loss=0.0192884411662817\n", - "Surface training t=19488, loss=0.02297057770192623\n", - "Surface training t=19489, loss=0.018986367620527744\n", - "Surface training t=19490, loss=0.03017263486981392\n", - "Surface training t=19491, loss=0.026713968254625797\n", - "Surface training t=19492, loss=0.026383090764284134\n", - "Surface training t=19493, loss=0.02639754954725504\n", - "Surface training t=19494, loss=0.025862018577754498\n", - "Surface training t=19495, loss=0.024143866263329983\n", - "Surface training t=19496, loss=0.026979731395840645\n", - "Surface training t=19497, loss=0.019693786278367043\n", - "Surface training t=19498, loss=0.02710024919360876\n", - "Surface training t=19499, loss=0.026461567729711533\n", - "Surface training t=19500, loss=0.023237657733261585\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=19501, loss=0.02547570690512657\n", - "Surface training t=19502, loss=0.03973386250436306\n", - "Surface training t=19503, loss=0.038252104073762894\n", - "Surface training t=19504, loss=0.026176366955041885\n", - "Surface training t=19505, loss=0.0277692973613739\n", - "Surface training t=19506, loss=0.026869427412748337\n", - "Surface training t=19507, loss=0.025520281866192818\n", - "Surface training t=19508, loss=0.031609621830284595\n", - "Surface training t=19509, loss=0.039483508095145226\n", - "Surface training t=19510, loss=0.02842501923441887\n", - "Surface training t=19511, loss=0.027374531142413616\n", - "Surface training t=19512, loss=0.04866071976721287\n", - "Surface training t=19513, loss=0.03815426863729954\n", - "Surface training t=19514, loss=0.029949449002742767\n", - "Surface training t=19515, loss=0.01974374521523714\n", - "Surface training t=19516, loss=0.019890709780156612\n", - "Surface training t=19517, loss=0.022236154414713383\n", - "Surface training t=19518, loss=0.028264986351132393\n", - "Surface training t=19519, loss=0.028531979769468307\n", - "Surface training t=19520, loss=0.026388377882540226\n", - "Surface training t=19521, loss=0.024993100203573704\n", - "Surface training t=19522, loss=0.026495336554944515\n", - "Surface training t=19523, loss=0.020444068126380444\n", - "Surface training t=19524, loss=0.025158556178212166\n", - "Surface training t=19525, loss=0.02256264165043831\n", - "Surface training t=19526, loss=0.023679266683757305\n", - "Surface training t=19527, loss=0.024801489897072315\n", - "Surface training t=19528, loss=0.027632719837129116\n", - "Surface training t=19529, loss=0.032899050042033195\n", - "Surface training t=19530, loss=0.026995113119482994\n", - "Surface training t=19531, loss=0.02980896458029747\n", - "Surface training t=19532, loss=0.04118333198130131\n", - "Surface training t=19533, loss=0.03665469214320183\n", - "Surface training t=19534, loss=0.03488632570952177\n", - "Surface training t=19535, loss=0.035017864778637886\n", - "Surface training t=19536, loss=0.044708194211125374\n", - "Surface training t=19537, loss=0.029411708936095238\n", - "Surface training t=19538, loss=0.025516540743410587\n", - "Surface training t=19539, loss=0.025521536357700825\n", - "Surface training t=19540, loss=0.023894071578979492\n", - "Surface training t=19541, loss=0.02260194532573223\n", - "Surface training t=19542, loss=0.02383573353290558\n", - "Surface training t=19543, loss=0.01724785892292857\n", - "Surface training t=19544, loss=0.01839348115026951\n", - "Surface training t=19545, loss=0.020735333673655987\n", - "Surface training t=19546, loss=0.018429258838295937\n", - "Surface training t=19547, loss=0.021532380022108555\n", - "Surface training t=19548, loss=0.02431392576545477\n", - "Surface training t=19549, loss=0.021190320141613483\n", - "Surface training t=19550, loss=0.03057187795639038\n", - "Surface training t=19551, loss=0.02886067144572735\n", - "Surface training t=19552, loss=0.04239766299724579\n", - "Surface training t=19553, loss=0.043503524735569954\n", - "Surface training t=19554, loss=0.03638637810945511\n", - "Surface training t=19555, loss=0.038552237674593925\n", - "Surface training t=19556, loss=0.037705209106206894\n", - "Surface training t=19557, loss=0.03596346452832222\n", - "Surface training t=19558, loss=0.03551306203007698\n", - "Surface training t=19559, loss=0.03741758596152067\n", - "Surface training t=19560, loss=0.05913885682821274\n", - "Surface training t=19561, loss=0.03717963956296444\n", - "Surface training t=19562, loss=0.034054556861519814\n", - "Surface training t=19563, loss=0.052296265959739685\n", - "Surface training t=19564, loss=0.03333461098372936\n", - "Surface training t=19565, loss=0.03592217434197664\n", - "Surface training t=19566, loss=0.03484339267015457\n", - "Surface training t=19567, loss=0.027928801253437996\n", - "Surface training t=19568, loss=0.024153235368430614\n", - "Surface training t=19569, loss=0.022618619725108147\n", - "Surface training t=19570, loss=0.025050165131688118\n", - "Surface training t=19571, loss=0.022024688310921192\n", - "Surface training t=19572, loss=0.026687820442020893\n", - "Surface training t=19573, loss=0.03210266958922148\n", - "Surface training t=19574, loss=0.02979058399796486\n", - "Surface training t=19575, loss=0.04091889411211014\n", - "Surface training t=19576, loss=0.028226489201188087\n", - "Surface training t=19577, loss=0.03201435413211584\n", - "Surface training t=19578, loss=0.025726772844791412\n", - "Surface training t=19579, loss=0.028903039172291756\n", - "Surface training t=19580, loss=0.028404378332197666\n", - "Surface training t=19581, loss=0.0268010376021266\n", - "Surface training t=19582, loss=0.020877699367702007\n", - "Surface training t=19583, loss=0.02798857819288969\n", - "Surface training t=19584, loss=0.025239943526685238\n", - "Surface training t=19585, loss=0.022002043202519417\n", - "Surface training t=19586, loss=0.02285071089863777\n", - "Surface training t=19587, loss=0.02524685114622116\n", - "Surface training t=19588, loss=0.02133702114224434\n", - "Surface training t=19589, loss=0.02632372733205557\n", - "Surface training t=19590, loss=0.03036053478717804\n", - "Surface training t=19591, loss=0.025878879241645336\n", - "Surface training t=19592, loss=0.033571356907486916\n", - "Surface training t=19593, loss=0.037881530821323395\n", - "Surface training t=19594, loss=0.025909249670803547\n", - "Surface training t=19595, loss=0.03266176115721464\n", - "Surface training t=19596, loss=0.0485939085483551\n", - "Surface training t=19597, loss=0.04569156467914581\n", - "Surface training t=19598, loss=0.043865425512194633\n", - "Surface training t=19599, loss=0.04680367931723595\n", - "Surface training t=19600, loss=0.03898940980434418\n", - "Surface training t=19601, loss=0.028811215423047543\n", - "Surface training t=19602, loss=0.03371504135429859\n", - "Surface training t=19603, loss=0.04378622584044933\n", - "Surface training t=19604, loss=0.053857600316405296\n", - "Surface training t=19605, loss=0.03366341441869736\n", - "Surface training t=19606, loss=0.04692118056118488\n", - "Surface training t=19607, loss=0.04566270671784878\n", - "Surface training t=19608, loss=0.04587176255881786\n", - "Surface training t=19609, loss=0.038807155564427376\n", - "Surface training t=19610, loss=0.031542375683784485\n", - "Surface training t=19611, loss=0.06554282642900944\n", - "Surface training t=19612, loss=0.041639034636318684\n", - "Surface training t=19613, loss=0.04523713234812021\n", - "Surface training t=19614, loss=0.06879374757409096\n", - "Surface training t=19615, loss=0.04584375023841858\n", - "Surface training t=19616, loss=0.06348884291946888\n", - "Surface training t=19617, loss=0.05423019826412201\n", - "Surface training t=19618, loss=0.04569404013454914\n", - "Surface training t=19619, loss=0.08542434871196747\n", - "Surface training t=19620, loss=0.04896957986056805\n", - "Surface training t=19621, loss=0.06946088746190071\n", - "Surface training t=19622, loss=0.053547944873571396\n", - "Surface training t=19623, loss=0.04226197022944689\n", - "Surface training t=19624, loss=0.030489327386021614\n", - "Surface training t=19625, loss=0.029040959663689137\n", - "Surface training t=19626, loss=0.023618209175765514\n", - "Surface training t=19627, loss=0.02168319933116436\n", - "Surface training t=19628, loss=0.018970392644405365\n", - "Surface training t=19629, loss=0.017494848929345608\n", - "Surface training t=19630, loss=0.02200852893292904\n", - "Surface training t=19631, loss=0.02517597284168005\n", - "Surface training t=19632, loss=0.021976413205266\n", - "Surface training t=19633, loss=0.018494999036192894\n", - "Surface training t=19634, loss=0.017255883663892746\n", - "Surface training t=19635, loss=0.01791500113904476\n", - "Surface training t=19636, loss=0.01831412361934781\n", - "Surface training t=19637, loss=0.019870320335030556\n", - "Surface training t=19638, loss=0.02404002845287323\n", - "Surface training t=19639, loss=0.01790825743228197\n", - "Surface training t=19640, loss=0.017331678420305252\n", - "Surface training t=19641, loss=0.021096698939800262\n", - "Surface training t=19642, loss=0.020176433958113194\n", - "Surface training t=19643, loss=0.021525898948311806\n", - "Surface training t=19644, loss=0.022268161177635193\n", - "Surface training t=19645, loss=0.028686881065368652\n", - "Surface training t=19646, loss=0.021233491599559784\n", - "Surface training t=19647, loss=0.02457241527736187\n", - "Surface training t=19648, loss=0.02275815512984991\n", - "Surface training t=19649, loss=0.027205375023186207\n", - "Surface training t=19650, loss=0.024565573781728745\n", - "Surface training t=19651, loss=0.027104566805064678\n", - "Surface training t=19652, loss=0.026237049140036106\n", - "Surface training t=19653, loss=0.030543417669832706\n", - "Surface training t=19654, loss=0.020851739682257175\n", - "Surface training t=19655, loss=0.019159969873726368\n", - "Surface training t=19656, loss=0.02601812407374382\n", - "Surface training t=19657, loss=0.030852576717734337\n", - "Surface training t=19658, loss=0.03211180865764618\n", - "Surface training t=19659, loss=0.029768583364784718\n", - "Surface training t=19660, loss=0.03702030889689922\n", - "Surface training t=19661, loss=0.031364805996418\n", - "Surface training t=19662, loss=0.02833712939172983\n", - "Surface training t=19663, loss=0.03918788395822048\n", - "Surface training t=19664, loss=0.023858854547142982\n", - "Surface training t=19665, loss=0.023354959674179554\n", - "Surface training t=19666, loss=0.03806021064519882\n", - "Surface training t=19667, loss=0.02096517849713564\n", - "Surface training t=19668, loss=0.025253308936953545\n", - "Surface training t=19669, loss=0.025212152861058712\n", - "Surface training t=19670, loss=0.03407724015414715\n", - "Surface training t=19671, loss=0.02796074002981186\n", - "Surface training t=19672, loss=0.0344418128952384\n", - "Surface training t=19673, loss=0.032227372750639915\n", - "Surface training t=19674, loss=0.04016759991645813\n", - "Surface training t=19675, loss=0.034564822912216187\n", - "Surface training t=19676, loss=0.029072237201035023\n", - "Surface training t=19677, loss=0.02579261176288128\n", - "Surface training t=19678, loss=0.026324551552534103\n", - "Surface training t=19679, loss=0.025725715793669224\n", - "Surface training t=19680, loss=0.02215744834393263\n", - "Surface training t=19681, loss=0.023262828588485718\n", - "Surface training t=19682, loss=0.0290099261328578\n", - "Surface training t=19683, loss=0.04219382256269455\n", - "Surface training t=19684, loss=0.03449461981654167\n", - "Surface training t=19685, loss=0.029957205057144165\n", - "Surface training t=19686, loss=0.03204443212598562\n", - "Surface training t=19687, loss=0.027501462027430534\n", - "Surface training t=19688, loss=0.026149646379053593\n", - "Surface training t=19689, loss=0.018088663928210735\n", - "Surface training t=19690, loss=0.019529838114976883\n", - "Surface training t=19691, loss=0.024151286110281944\n", - "Surface training t=19692, loss=0.02129034884274006\n", - "Surface training t=19693, loss=0.023205921053886414\n", - "Surface training t=19694, loss=0.021647012792527676\n", - "Surface training t=19695, loss=0.020861783996224403\n", - "Surface training t=19696, loss=0.025825245305895805\n", - "Surface training t=19697, loss=0.02674736361950636\n", - "Surface training t=19698, loss=0.01774397911503911\n", - "Surface training t=19699, loss=0.02431863360106945\n", - "Surface training t=19700, loss=0.030031434260308743\n", - "Surface training t=19701, loss=0.023238827474415302\n", - "Surface training t=19702, loss=0.02863071858882904\n", - "Surface training t=19703, loss=0.029459835030138493\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=19704, loss=0.026273265480995178\n", - "Surface training t=19705, loss=0.025782075710594654\n", - "Surface training t=19706, loss=0.04873053357005119\n", - "Surface training t=19707, loss=0.03233227878808975\n", - "Surface training t=19708, loss=0.04664721339941025\n", - "Surface training t=19709, loss=0.03716982156038284\n", - "Surface training t=19710, loss=0.030978089198470116\n", - "Surface training t=19711, loss=0.03650231659412384\n", - "Surface training t=19712, loss=0.037811778485774994\n", - "Surface training t=19713, loss=0.026196717284619808\n", - "Surface training t=19714, loss=0.03628969192504883\n", - "Surface training t=19715, loss=0.0249154232442379\n", - "Surface training t=19716, loss=0.026327433064579964\n", - "Surface training t=19717, loss=0.028633534908294678\n", - "Surface training t=19718, loss=0.020853273570537567\n", - "Surface training t=19719, loss=0.031143469735980034\n", - "Surface training t=19720, loss=0.02386366855353117\n", - "Surface training t=19721, loss=0.022000232711434364\n", - "Surface training t=19722, loss=0.023435145616531372\n", - "Surface training t=19723, loss=0.024534614756703377\n", - "Surface training t=19724, loss=0.02617180533707142\n", - "Surface training t=19725, loss=0.025195871479809284\n", - "Surface training t=19726, loss=0.021479297429323196\n", - "Surface training t=19727, loss=0.019250732846558094\n", - "Surface training t=19728, loss=0.02165449783205986\n", - "Surface training t=19729, loss=0.02125107403844595\n", - "Surface training t=19730, loss=0.026020357385277748\n", - "Surface training t=19731, loss=0.023079835809767246\n", - "Surface training t=19732, loss=0.027998545207083225\n", - "Surface training t=19733, loss=0.023626376874744892\n", - "Surface training t=19734, loss=0.028254774399101734\n", - "Surface training t=19735, loss=0.025285979732871056\n", - "Surface training t=19736, loss=0.028096113353967667\n", - "Surface training t=19737, loss=0.02712654136121273\n", - "Surface training t=19738, loss=0.026221266016364098\n", - "Surface training t=19739, loss=0.024728257209062576\n", - "Surface training t=19740, loss=0.025031135417521\n", - "Surface training t=19741, loss=0.0595815796405077\n", - "Surface training t=19742, loss=0.03604660090059042\n", - "Surface training t=19743, loss=0.041529832407832146\n", - "Surface training t=19744, loss=0.0383134251460433\n", - "Surface training t=19745, loss=0.03931148909032345\n", - "Surface training t=19746, loss=0.02446055691689253\n", - "Surface training t=19747, loss=0.024211146868765354\n", - "Surface training t=19748, loss=0.02645346149802208\n", - "Surface training t=19749, loss=0.0293277595192194\n", - "Surface training t=19750, loss=0.02491725981235504\n", - "Surface training t=19751, loss=0.029299181886017323\n", - "Surface training t=19752, loss=0.018806911073625088\n", - "Surface training t=19753, loss=0.017505604308098555\n", - "Surface training t=19754, loss=0.017430076375603676\n", - "Surface training t=19755, loss=0.015008797869086266\n", - "Surface training t=19756, loss=0.02608901634812355\n", - "Surface training t=19757, loss=0.03580537438392639\n", - "Surface training t=19758, loss=0.030205126851797104\n", - "Surface training t=19759, loss=0.03464826941490173\n", - "Surface training t=19760, loss=0.025060313753783703\n", - "Surface training t=19761, loss=0.02982878591865301\n", - "Surface training t=19762, loss=0.035444462671875954\n", - "Surface training t=19763, loss=0.029030509293079376\n", - "Surface training t=19764, loss=0.04078814573585987\n", - "Surface training t=19765, loss=0.038604941219091415\n", - "Surface training t=19766, loss=0.034080617129802704\n", - "Surface training t=19767, loss=0.029555155895650387\n", - "Surface training t=19768, loss=0.020983416587114334\n", - "Surface training t=19769, loss=0.018283925019204617\n", - "Surface training t=19770, loss=0.01968110166490078\n", - "Surface training t=19771, loss=0.01683575939387083\n", - "Surface training t=19772, loss=0.020872389897704124\n", - "Surface training t=19773, loss=0.018281958997249603\n", - "Surface training t=19774, loss=0.02702986355870962\n", - "Surface training t=19775, loss=0.028435250744223595\n", - "Surface training t=19776, loss=0.024004449136555195\n", - "Surface training t=19777, loss=0.022321108728647232\n", - "Surface training t=19778, loss=0.02381033729761839\n", - "Surface training t=19779, loss=0.02514536678791046\n", - "Surface training t=19780, loss=0.025209720246493816\n", - "Surface training t=19781, loss=0.02374451607465744\n", - "Surface training t=19782, loss=0.025771464221179485\n", - "Surface training t=19783, loss=0.02969326637685299\n", - "Surface training t=19784, loss=0.023514853790402412\n", - "Surface training t=19785, loss=0.027218112722039223\n", - "Surface training t=19786, loss=0.02459246851503849\n", - "Surface training t=19787, loss=0.024317733012139797\n", - "Surface training t=19788, loss=0.027202134020626545\n", - "Surface training t=19789, loss=0.02331505622714758\n", - "Surface training t=19790, loss=0.02831483632326126\n", - "Surface training t=19791, loss=0.03564136102795601\n", - "Surface training t=19792, loss=0.036221943795681\n", - "Surface training t=19793, loss=0.026875565759837627\n", - "Surface training t=19794, loss=0.02469357941299677\n", - "Surface training t=19795, loss=0.020670829340815544\n", - "Surface training t=19796, loss=0.021747016347944736\n", - "Surface training t=19797, loss=0.022632666863501072\n", - "Surface training t=19798, loss=0.02121836692094803\n", - "Surface training t=19799, loss=0.022506103850901127\n", - "Surface training t=19800, loss=0.02331650350242853\n", - "Surface training t=19801, loss=0.026681887917220592\n", - "Surface training t=19802, loss=0.02470538718625903\n", - "Surface training t=19803, loss=0.034444672986865044\n", - "Surface training t=19804, loss=0.03954175300896168\n", - "Surface training t=19805, loss=0.03822402749210596\n", - "Surface training t=19806, loss=0.038044318556785583\n", - "Surface training t=19807, loss=0.030666240490972996\n", - "Surface training t=19808, loss=0.038296086713671684\n", - "Surface training t=19809, loss=0.03695705533027649\n", - "Surface training t=19810, loss=0.034045591950416565\n", - "Surface training t=19811, loss=0.036077797412872314\n", - "Surface training t=19812, loss=0.03788881190121174\n", - "Surface training t=19813, loss=0.04300547577440739\n", - "Surface training t=19814, loss=0.03813609853386879\n", - "Surface training t=19815, loss=0.039772938936948776\n", - "Surface training t=19816, loss=0.0298797944560647\n", - "Surface training t=19817, loss=0.029055447317659855\n", - "Surface training t=19818, loss=0.036771535873413086\n", - "Surface training t=19819, loss=0.03084552939981222\n", - "Surface training t=19820, loss=0.05143975466489792\n", - "Surface training t=19821, loss=0.031352031044662\n", - "Surface training t=19822, loss=0.048918696120381355\n", - "Surface training t=19823, loss=0.02971815038472414\n", - "Surface training t=19824, loss=0.03373763244599104\n", - "Surface training t=19825, loss=0.04523997940123081\n", - "Surface training t=19826, loss=0.02945621684193611\n", - "Surface training t=19827, loss=0.03414612449705601\n", - "Surface training t=19828, loss=0.0321211377158761\n", - "Surface training t=19829, loss=0.034771865233778954\n", - "Surface training t=19830, loss=0.03805803321301937\n", - "Surface training t=19831, loss=0.03765817731618881\n", - "Surface training t=19832, loss=0.032491923309862614\n", - "Surface training t=19833, loss=0.038432011380791664\n", - "Surface training t=19834, loss=0.0413817185908556\n", - "Surface training t=19835, loss=0.04521712101995945\n", - "Surface training t=19836, loss=0.0373597526922822\n", - "Surface training t=19837, loss=0.03462142311036587\n", - "Surface training t=19838, loss=0.03048372082412243\n", - "Surface training t=19839, loss=0.04123618267476559\n", - "Surface training t=19840, loss=0.03527805395424366\n", - "Surface training t=19841, loss=0.03417506814002991\n", - "Surface training t=19842, loss=0.03133593872189522\n", - "Surface training t=19843, loss=0.031601312570273876\n", - "Surface training t=19844, loss=0.028356259688735008\n", - "Surface training t=19845, loss=0.037555987015366554\n", - "Surface training t=19846, loss=0.031588380225002766\n", - "Surface training t=19847, loss=0.02234977949410677\n", - "Surface training t=19848, loss=0.01954750344157219\n", - "Surface training t=19849, loss=0.028191322460770607\n", - "Surface training t=19850, loss=0.021893804892897606\n", - "Surface training t=19851, loss=0.01811556238681078\n", - "Surface training t=19852, loss=0.02055931370705366\n", - "Surface training t=19853, loss=0.025850224308669567\n", - "Surface training t=19854, loss=0.02492122072726488\n", - "Surface training t=19855, loss=0.023446492850780487\n", - "Surface training t=19856, loss=0.024679601192474365\n", - "Surface training t=19857, loss=0.021765530109405518\n", - "Surface training t=19858, loss=0.01901791524142027\n", - "Surface training t=19859, loss=0.017934351228177547\n", - "Surface training t=19860, loss=0.02161113079637289\n", - "Surface training t=19861, loss=0.024407136254012585\n", - "Surface training t=19862, loss=0.025717997923493385\n", - "Surface training t=19863, loss=0.02072080969810486\n", - "Surface training t=19864, loss=0.020590134896337986\n", - "Surface training t=19865, loss=0.025572399608790874\n", - "Surface training t=19866, loss=0.02307821996510029\n", - "Surface training t=19867, loss=0.02008369006216526\n", - "Surface training t=19868, loss=0.016688343603163958\n", - "Surface training t=19869, loss=0.02722254302352667\n", - "Surface training t=19870, loss=0.030045119114220142\n", - "Surface training t=19871, loss=0.02984856255352497\n", - "Surface training t=19872, loss=0.033417679369449615\n", - "Surface training t=19873, loss=0.02608050685375929\n", - "Surface training t=19874, loss=0.023945797234773636\n", - "Surface training t=19875, loss=0.029103949666023254\n", - "Surface training t=19876, loss=0.026390031911432743\n", - "Surface training t=19877, loss=0.02526350226253271\n", - "Surface training t=19878, loss=0.02231062389910221\n", - "Surface training t=19879, loss=0.023337936960160732\n", - "Surface training t=19880, loss=0.028263083659112453\n", - "Surface training t=19881, loss=0.01926576904952526\n", - "Surface training t=19882, loss=0.021781710907816887\n", - "Surface training t=19883, loss=0.01923274528235197\n", - "Surface training t=19884, loss=0.02559772226959467\n", - "Surface training t=19885, loss=0.02038942091166973\n", - "Surface training t=19886, loss=0.019600551575422287\n", - "Surface training t=19887, loss=0.025264158844947815\n", - "Surface training t=19888, loss=0.022113682702183723\n", - "Surface training t=19889, loss=0.022033164277672768\n", - "Surface training t=19890, loss=0.019643012434244156\n", - "Surface training t=19891, loss=0.022095692344009876\n", - "Surface training t=19892, loss=0.02223256789147854\n", - "Surface training t=19893, loss=0.021027175709605217\n", - "Surface training t=19894, loss=0.019419527612626553\n", - "Surface training t=19895, loss=0.021282052621245384\n", - "Surface training t=19896, loss=0.026704804971814156\n", - "Surface training t=19897, loss=0.019499696791172028\n", - "Surface training t=19898, loss=0.02349590603262186\n", - "Surface training t=19899, loss=0.019888989627361298\n", - "Surface training t=19900, loss=0.022141320630908012\n", - "Surface training t=19901, loss=0.02079333458095789\n", - "Surface training t=19902, loss=0.021933529525995255\n", - "Surface training t=19903, loss=0.02116916235536337\n", - "Surface training t=19904, loss=0.01785162091255188\n", - "Surface training t=19905, loss=0.03094605077058077\n", - "Surface training t=19906, loss=0.025389312766492367\n", - "Surface training t=19907, loss=0.021682722494006157\n", - "Surface training t=19908, loss=0.01673178654164076\n", - "Surface training t=19909, loss=0.019736606627702713\n", - "Surface training t=19910, loss=0.018141007982194424\n", - "Surface training t=19911, loss=0.019349319860339165\n", - "Surface training t=19912, loss=0.023355137556791306\n", - "Surface training t=19913, loss=0.02296228799968958\n", - "Surface training t=19914, loss=0.02063751593232155\n", - "Surface training t=19915, loss=0.02450063731521368\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=19916, loss=0.03659021481871605\n", - "Surface training t=19917, loss=0.031647466123104095\n", - "Surface training t=19918, loss=0.03639490343630314\n", - "Surface training t=19919, loss=0.022159265354275703\n", - "Surface training t=19920, loss=0.02242245990782976\n", - "Surface training t=19921, loss=0.04344047233462334\n", - "Surface training t=19922, loss=0.035775721073150635\n", - "Surface training t=19923, loss=0.044285472482442856\n", - "Surface training t=19924, loss=0.023743455298244953\n", - "Surface training t=19925, loss=0.02708072680979967\n", - "Surface training t=19926, loss=0.0324779162183404\n", - "Surface training t=19927, loss=0.045582856982946396\n", - "Surface training t=19928, loss=0.04598384350538254\n", - "Surface training t=19929, loss=0.03670616168528795\n", - "Surface training t=19930, loss=0.032670581713318825\n", - "Surface training t=19931, loss=0.042905136942863464\n", - "Surface training t=19932, loss=0.029250883497297764\n", - "Surface training t=19933, loss=0.03374652564525604\n", - "Surface training t=19934, loss=0.03155357018113136\n", - "Surface training t=19935, loss=0.030170083977282047\n", - "Surface training t=19936, loss=0.05843422748148441\n", - "Surface training t=19937, loss=0.03910988196730614\n", - "Surface training t=19938, loss=0.031219374388456345\n", - "Surface training t=19939, loss=0.026401625014841557\n", - "Surface training t=19940, loss=0.02891353890299797\n", - "Surface training t=19941, loss=0.025164960883557796\n", - "Surface training t=19942, loss=0.020179198123514652\n", - "Surface training t=19943, loss=0.02022415306419134\n", - "Surface training t=19944, loss=0.023006537929177284\n", - "Surface training t=19945, loss=0.018944283947348595\n", - "Surface training t=19946, loss=0.022498883306980133\n", - "Surface training t=19947, loss=0.02745607402175665\n", - "Surface training t=19948, loss=0.019018887542188168\n", - "Surface training t=19949, loss=0.031663802452385426\n", - "Surface training t=19950, loss=0.020102398470044136\n", - "Surface training t=19951, loss=0.020532277412712574\n", - "Surface training t=19952, loss=0.01628805883228779\n", - "Surface training t=19953, loss=0.018744218163192272\n", - "Surface training t=19954, loss=0.022283810190856457\n", - "Surface training t=19955, loss=0.02043162379413843\n", - "Surface training t=19956, loss=0.018334662541747093\n", - "Surface training t=19957, loss=0.019129562191665173\n", - "Surface training t=19958, loss=0.02046904806047678\n", - "Surface training t=19959, loss=0.02230812329798937\n", - "Surface training t=19960, loss=0.025470728054642677\n", - "Surface training t=19961, loss=0.027074233628809452\n", - "Surface training t=19962, loss=0.021214314736425877\n", - "Surface training t=19963, loss=0.023387078195810318\n", - "Surface training t=19964, loss=0.024557641707360744\n", - "Surface training t=19965, loss=0.028060123324394226\n", - "Surface training t=19966, loss=0.01661314070224762\n", - "Surface training t=19967, loss=0.018225226551294327\n", - "Surface training t=19968, loss=0.03983119875192642\n", - "Surface training t=19969, loss=0.03735828399658203\n", - "Surface training t=19970, loss=0.03432184923440218\n", - "Surface training t=19971, loss=0.03821852430701256\n", - "Surface training t=19972, loss=0.04551827721297741\n", - "Surface training t=19973, loss=0.035549335181713104\n", - "Surface training t=19974, loss=0.03300528600811958\n", - "Surface training t=19975, loss=0.029824632219970226\n", - "Surface training t=19976, loss=0.02251691371202469\n", - "Surface training t=19977, loss=0.02852354757487774\n", - "Surface training t=19978, loss=0.027623982168734074\n", - "Surface training t=19979, loss=0.029263311997056007\n", - "Surface training t=19980, loss=0.03138424828648567\n", - "Surface training t=19981, loss=0.026231421157717705\n", - "Surface training t=19982, loss=0.02476012520492077\n", - "Surface training t=19983, loss=0.024424515664577484\n", - "Surface training t=19984, loss=0.027892548590898514\n", - "Surface training t=19985, loss=0.03332259226590395\n", - "Surface training t=19986, loss=0.03795342333614826\n", - "Surface training t=19987, loss=0.04778651334345341\n", - "Surface training t=19988, loss=0.0344923036172986\n", - "Surface training t=19989, loss=0.02308732643723488\n", - "Surface training t=19990, loss=0.02068356331437826\n", - "Surface training t=19991, loss=0.020106342621147633\n", - "Surface training t=19992, loss=0.020212368108332157\n", - "Surface training t=19993, loss=0.020805472508072853\n", - "Surface training t=19994, loss=0.018375628627836704\n", - "Surface training t=19995, loss=0.014293062966316938\n", - "Surface training t=19996, loss=0.0202871635556221\n", - "Surface training t=19997, loss=0.023852885700762272\n", - "Surface training t=19998, loss=0.027839007787406445\n", - "Surface training t=19999, loss=0.03551662340760231\n", - "Surface training t=20000, loss=0.029339592903852463\n", - "Surface training t=20001, loss=0.030880363658070564\n", - "Surface training t=20002, loss=0.044310782104730606\n", - "Surface training t=20003, loss=0.03333027195185423\n", - "Surface training t=20004, loss=0.04247448034584522\n", - "Surface training t=20005, loss=0.03751538321375847\n", - "Surface training t=20006, loss=0.03550533205270767\n", - "Surface training t=20007, loss=0.026259416714310646\n", - "Surface training t=20008, loss=0.02284063957631588\n", - "Surface training t=20009, loss=0.03236461617052555\n", - "Surface training t=20010, loss=0.030648007057607174\n", - "Surface training t=20011, loss=0.02520463801920414\n", - "Surface training t=20012, loss=0.029340925626456738\n", - "Surface training t=20013, loss=0.022115902975201607\n", - "Surface training t=20014, loss=0.04070243239402771\n", - "Surface training t=20015, loss=0.0315028065815568\n", - "Surface training t=20016, loss=0.030358956195414066\n", - "Surface training t=20017, loss=0.02568539883941412\n", - "Surface training t=20018, loss=0.023835274390876293\n", - "Surface training t=20019, loss=0.022770056035369635\n", - "Surface training t=20020, loss=0.0300398338586092\n", - "Surface training t=20021, loss=0.021922226063907146\n", - "Surface training t=20022, loss=0.0225975438952446\n", - "Surface training t=20023, loss=0.023781973868608475\n", - "Surface training t=20024, loss=0.024779516272246838\n", - "Surface training t=20025, loss=0.024955032393336296\n", - "Surface training t=20026, loss=0.02405533753335476\n", - "Surface training t=20027, loss=0.025003327056765556\n", - "Surface training t=20028, loss=0.025003250688314438\n", - "Surface training t=20029, loss=0.021635047625750303\n", - "Surface training t=20030, loss=0.024226923007518053\n", - "Surface training t=20031, loss=0.031738998368382454\n", - "Surface training t=20032, loss=0.029782839119434357\n", - "Surface training t=20033, loss=0.03099009580910206\n", - "Surface training t=20034, loss=0.0317172110080719\n", - "Surface training t=20035, loss=0.03462965041399002\n", - "Surface training t=20036, loss=0.029010158963501453\n", - "Surface training t=20037, loss=0.02528361137956381\n", - "Surface training t=20038, loss=0.027573492377996445\n", - "Surface training t=20039, loss=0.026452815160155296\n", - "Surface training t=20040, loss=0.022349344566464424\n", - "Surface training t=20041, loss=0.02265013102442026\n", - "Surface training t=20042, loss=0.03181634657084942\n", - "Surface training t=20043, loss=0.027691313065588474\n", - "Surface training t=20044, loss=0.020038985647261143\n", - "Surface training t=20045, loss=0.021483459509909153\n", - "Surface training t=20046, loss=0.02381883468478918\n", - "Surface training t=20047, loss=0.0241236574947834\n", - "Surface training t=20048, loss=0.018162505701184273\n", - "Surface training t=20049, loss=0.018962860107421875\n", - "Surface training t=20050, loss=0.021938307210803032\n", - "Surface training t=20051, loss=0.01742655038833618\n", - "Surface training t=20052, loss=0.021159637719392776\n", - "Surface training t=20053, loss=0.020322157070040703\n", - "Surface training t=20054, loss=0.022766857407987118\n", - "Surface training t=20055, loss=0.023023778572678566\n", - "Surface training t=20056, loss=0.018173612654209137\n", - "Surface training t=20057, loss=0.017583385575562716\n", - "Surface training t=20058, loss=0.0264816889539361\n", - "Surface training t=20059, loss=0.02760987915098667\n", - "Surface training t=20060, loss=0.029146154411137104\n", - "Surface training t=20061, loss=0.032486699521541595\n", - "Surface training t=20062, loss=0.027164896950125694\n", - "Surface training t=20063, loss=0.029340889304876328\n", - "Surface training t=20064, loss=0.027240300551056862\n", - "Surface training t=20065, loss=0.03241738211363554\n", - "Surface training t=20066, loss=0.02500811405479908\n", - "Surface training t=20067, loss=0.0351168867200613\n", - "Surface training t=20068, loss=0.029782342724502087\n", - "Surface training t=20069, loss=0.03393415827304125\n", - "Surface training t=20070, loss=0.03621396981179714\n", - "Surface training t=20071, loss=0.03614359349012375\n", - "Surface training t=20072, loss=0.048618581146001816\n", - "Surface training t=20073, loss=0.029543728567659855\n", - "Surface training t=20074, loss=0.03035399317741394\n", - "Surface training t=20075, loss=0.026242658495903015\n", - "Surface training t=20076, loss=0.0290963901206851\n", - "Surface training t=20077, loss=0.024174486752599478\n", - "Surface training t=20078, loss=0.02201382163912058\n", - "Surface training t=20079, loss=0.028193993493914604\n", - "Surface training t=20080, loss=0.024370728991925716\n", - "Surface training t=20081, loss=0.02108531165868044\n", - "Surface training t=20082, loss=0.029376008547842503\n", - "Surface training t=20083, loss=0.03048190474510193\n", - "Surface training t=20084, loss=0.01762908650562167\n", - "Surface training t=20085, loss=0.018092534504830837\n", - "Surface training t=20086, loss=0.017801177222281694\n", - "Surface training t=20087, loss=0.02138759195804596\n", - "Surface training t=20088, loss=0.025707795284688473\n", - "Surface training t=20089, loss=0.0283293928951025\n", - "Surface training t=20090, loss=0.03306322917342186\n", - "Surface training t=20091, loss=0.035669757053256035\n", - "Surface training t=20092, loss=0.04616696946322918\n", - "Surface training t=20093, loss=0.0398526880890131\n", - "Surface training t=20094, loss=0.03319173771888018\n", - "Surface training t=20095, loss=0.03082333132624626\n", - "Surface training t=20096, loss=0.02525688149034977\n", - "Surface training t=20097, loss=0.03529965691268444\n", - "Surface training t=20098, loss=0.027940521016716957\n", - "Surface training t=20099, loss=0.03850376605987549\n", - "Surface training t=20100, loss=0.026942459866404533\n", - "Surface training t=20101, loss=0.030740205198526382\n", - "Surface training t=20102, loss=0.05291145667433739\n", - "Surface training t=20103, loss=0.038571758195757866\n", - "Surface training t=20104, loss=0.046182382851839066\n", - "Surface training t=20105, loss=0.03795763198286295\n", - "Surface training t=20106, loss=0.04277733154594898\n", - "Surface training t=20107, loss=0.031033179722726345\n", - "Surface training t=20108, loss=0.03369335550814867\n", - "Surface training t=20109, loss=0.05221303179860115\n", - "Surface training t=20110, loss=0.03620783053338528\n", - "Surface training t=20111, loss=0.04774472303688526\n", - "Surface training t=20112, loss=0.030386648140847683\n", - "Surface training t=20113, loss=0.060796692967414856\n", - "Surface training t=20114, loss=0.035533067770302296\n", - "Surface training t=20115, loss=0.03695022128522396\n", - "Surface training t=20116, loss=0.03319763392210007\n", - "Surface training t=20117, loss=0.02440656628459692\n", - "Surface training t=20118, loss=0.029776986688375473\n", - "Surface training t=20119, loss=0.033763784915208817\n", - "Surface training t=20120, loss=0.028579755686223507\n", - "Surface training t=20121, loss=0.023058651480823755\n", - "Surface training t=20122, loss=0.03401780501008034\n", - "Surface training t=20123, loss=0.022261606063693762\n", - "Surface training t=20124, loss=0.02479590754956007\n", - "Surface training t=20125, loss=0.030933852307498455\n", - "Surface training t=20126, loss=0.02616073004901409\n", - "Surface training t=20127, loss=0.017054004594683647\n", - "Surface training t=20128, loss=0.021756443195044994\n", - "Surface training t=20129, loss=0.021558314561843872\n", - "Surface training t=20130, loss=0.024140384048223495\n", - "Surface training t=20131, loss=0.026796058751642704\n", - "Surface training t=20132, loss=0.019224495626986027\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=20133, loss=0.019076397642493248\n", - "Surface training t=20134, loss=0.028185730800032616\n", - "Surface training t=20135, loss=0.028092237189412117\n", - "Surface training t=20136, loss=0.03023108933120966\n", - "Surface training t=20137, loss=0.026698870584368706\n", - "Surface training t=20138, loss=0.027963707223534584\n", - "Surface training t=20139, loss=0.023592823185026646\n", - "Surface training t=20140, loss=0.02354530245065689\n", - "Surface training t=20141, loss=0.020706295035779476\n", - "Surface training t=20142, loss=0.019566360861063004\n", - "Surface training t=20143, loss=0.021430905908346176\n", - "Surface training t=20144, loss=0.015877285040915012\n", - "Surface training t=20145, loss=0.021939842961728573\n", - "Surface training t=20146, loss=0.024923008866608143\n", - "Surface training t=20147, loss=0.01992921344935894\n", - "Surface training t=20148, loss=0.022227061912417412\n", - "Surface training t=20149, loss=0.024896152317523956\n", - "Surface training t=20150, loss=0.02561044692993164\n", - "Surface training t=20151, loss=0.029857716523110867\n", - "Surface training t=20152, loss=0.025795653462409973\n", - "Surface training t=20153, loss=0.027361206710338593\n", - "Surface training t=20154, loss=0.02842830866575241\n", - "Surface training t=20155, loss=0.030270801857113838\n", - "Surface training t=20156, loss=0.03158912807703018\n", - "Surface training t=20157, loss=0.027572307735681534\n", - "Surface training t=20158, loss=0.028927405830472708\n", - "Surface training t=20159, loss=0.041815342381596565\n", - "Surface training t=20160, loss=0.041406163945794106\n", - "Surface training t=20161, loss=0.03471854142844677\n", - "Surface training t=20162, loss=0.03178325481712818\n", - "Surface training t=20163, loss=0.05114220455288887\n", - "Surface training t=20164, loss=0.03265538811683655\n", - "Surface training t=20165, loss=0.03408942185342312\n", - "Surface training t=20166, loss=0.035087740048766136\n", - "Surface training t=20167, loss=0.03126250859349966\n", - "Surface training t=20168, loss=0.0355348140001297\n", - "Surface training t=20169, loss=0.02871963381767273\n", - "Surface training t=20170, loss=0.03610003925859928\n", - "Surface training t=20171, loss=0.04097955487668514\n", - "Surface training t=20172, loss=0.04389617033302784\n", - "Surface training t=20173, loss=0.03963095508515835\n", - "Surface training t=20174, loss=0.048901449888944626\n", - "Surface training t=20175, loss=0.03802751563489437\n", - "Surface training t=20176, loss=0.03310505021363497\n", - "Surface training t=20177, loss=0.03801857028156519\n", - "Surface training t=20178, loss=0.04277381673455238\n", - "Surface training t=20179, loss=0.04667660780251026\n", - "Surface training t=20180, loss=0.03566327877342701\n", - "Surface training t=20181, loss=0.03869590722024441\n", - "Surface training t=20182, loss=0.03711986541748047\n", - "Surface training t=20183, loss=0.025406821630895138\n", - "Surface training t=20184, loss=0.03418097831308842\n", - "Surface training t=20185, loss=0.025703643448650837\n", - "Surface training t=20186, loss=0.028605900704860687\n", - "Surface training t=20187, loss=0.02791641652584076\n", - "Surface training t=20188, loss=0.023338355123996735\n", - "Surface training t=20189, loss=0.030602119863033295\n", - "Surface training t=20190, loss=0.024317385628819466\n", - "Surface training t=20191, loss=0.028992559760808945\n", - "Surface training t=20192, loss=0.024346109479665756\n", - "Surface training t=20193, loss=0.028336748480796814\n", - "Surface training t=20194, loss=0.02938426099717617\n", - "Surface training t=20195, loss=0.029658127576112747\n", - "Surface training t=20196, loss=0.03561241552233696\n", - "Surface training t=20197, loss=0.02479364164173603\n", - "Surface training t=20198, loss=0.027144762687385082\n", - "Surface training t=20199, loss=0.025891108438372612\n", - "Surface training t=20200, loss=0.027374339289963245\n", - "Surface training t=20201, loss=0.02995685674250126\n", - "Surface training t=20202, loss=0.02636506874114275\n", - "Surface training t=20203, loss=0.025019729509949684\n", - "Surface training t=20204, loss=0.022459021769464016\n", - "Surface training t=20205, loss=0.030111970379948616\n", - "Surface training t=20206, loss=0.03154766280204058\n", - "Surface training t=20207, loss=0.031090570613741875\n", - "Surface training t=20208, loss=0.021454112604260445\n", - "Surface training t=20209, loss=0.027566482312977314\n", - "Surface training t=20210, loss=0.028855076991021633\n", - "Surface training t=20211, loss=0.03727484680712223\n", - "Surface training t=20212, loss=0.028705136850476265\n", - "Surface training t=20213, loss=0.03593736235052347\n", - "Surface training t=20214, loss=0.038885992020368576\n", - "Surface training t=20215, loss=0.031499093398451805\n", - "Surface training t=20216, loss=0.03200560435652733\n", - "Surface training t=20217, loss=0.03596420772373676\n", - "Surface training t=20218, loss=0.0288350535556674\n", - "Surface training t=20219, loss=0.03571888245642185\n", - "Surface training t=20220, loss=0.023303011432290077\n", - "Surface training t=20221, loss=0.027457017451524734\n", - "Surface training t=20222, loss=0.028283029794692993\n", - "Surface training t=20223, loss=0.026945320889353752\n", - "Surface training t=20224, loss=0.023071018047630787\n", - "Surface training t=20225, loss=0.023533891886472702\n", - "Surface training t=20226, loss=0.026789122261106968\n", - "Surface training t=20227, loss=0.024200462736189365\n", - "Surface training t=20228, loss=0.025054403580725193\n", - "Surface training t=20229, loss=0.015549805015325546\n", - "Surface training t=20230, loss=0.01801608642563224\n", - "Surface training t=20231, loss=0.021362573839724064\n", - "Surface training t=20232, loss=0.024569197557866573\n", - "Surface training t=20233, loss=0.024079669266939163\n", - "Surface training t=20234, loss=0.026334283873438835\n", - "Surface training t=20235, loss=0.030703308060765266\n", - "Surface training t=20236, loss=0.027569085359573364\n", - "Surface training t=20237, loss=0.030299248173832893\n", - "Surface training t=20238, loss=0.034203458577394485\n", - "Surface training t=20239, loss=0.02344674989581108\n", - "Surface training t=20240, loss=0.028957966715097427\n", - "Surface training t=20241, loss=0.03670883923768997\n", - "Surface training t=20242, loss=0.028676213696599007\n", - "Surface training t=20243, loss=0.06232621520757675\n", - "Surface training t=20244, loss=0.03665483556687832\n", - "Surface training t=20245, loss=0.04058147221803665\n", - "Surface training t=20246, loss=0.03023324627429247\n", - "Surface training t=20247, loss=0.03001983929425478\n", - "Surface training t=20248, loss=0.04379806108772755\n", - "Surface training t=20249, loss=0.047743276692926884\n", - "Surface training t=20250, loss=0.03574923612177372\n", - "Surface training t=20251, loss=0.039736793376505375\n", - "Surface training t=20252, loss=0.045208705589175224\n", - "Surface training t=20253, loss=0.03829490207135677\n", - "Surface training t=20254, loss=0.028050675988197327\n", - "Surface training t=20255, loss=0.02746966853737831\n", - "Surface training t=20256, loss=0.032542912289500237\n", - "Surface training t=20257, loss=0.023331341333687305\n", - "Surface training t=20258, loss=0.022537211887538433\n", - "Surface training t=20259, loss=0.026603251695632935\n", - "Surface training t=20260, loss=0.026912829838693142\n", - "Surface training t=20261, loss=0.024288932792842388\n", - "Surface training t=20262, loss=0.028817668557167053\n", - "Surface training t=20263, loss=0.023065170273184776\n", - "Surface training t=20264, loss=0.033618295565247536\n", - "Surface training t=20265, loss=0.028023780323565006\n", - "Surface training t=20266, loss=0.04167390614748001\n", - "Surface training t=20267, loss=0.029005737975239754\n", - "Surface training t=20268, loss=0.04372112452983856\n", - "Surface training t=20269, loss=0.02888430282473564\n", - "Surface training t=20270, loss=0.02311350591480732\n", - "Surface training t=20271, loss=0.036236075684428215\n", - "Surface training t=20272, loss=0.02613083738833666\n", - "Surface training t=20273, loss=0.028754408471286297\n", - "Surface training t=20274, loss=0.04605506733059883\n", - "Surface training t=20275, loss=0.03701785393059254\n", - "Surface training t=20276, loss=0.03252433426678181\n", - "Surface training t=20277, loss=0.047160739079117775\n", - "Surface training t=20278, loss=0.051862286403775215\n", - "Surface training t=20279, loss=0.03919637016952038\n", - "Surface training t=20280, loss=0.042567165568470955\n", - "Surface training t=20281, loss=0.04461614415049553\n", - "Surface training t=20282, loss=0.034814637154340744\n", - "Surface training t=20283, loss=0.0347845871001482\n", - "Surface training t=20284, loss=0.023212074767798185\n", - "Surface training t=20285, loss=0.025999268516898155\n", - "Surface training t=20286, loss=0.03642270155251026\n", - "Surface training t=20287, loss=0.03201225399971008\n", - "Surface training t=20288, loss=0.03770194388926029\n", - "Surface training t=20289, loss=0.025049599818885326\n", - "Surface training t=20290, loss=0.024156593717634678\n", - "Surface training t=20291, loss=0.03178012743592262\n", - "Surface training t=20292, loss=0.024758552201092243\n", - "Surface training t=20293, loss=0.031365374103188515\n", - "Surface training t=20294, loss=0.025985023006796837\n", - "Surface training t=20295, loss=0.044398630037903786\n", - "Surface training t=20296, loss=0.0372330229729414\n", - "Surface training t=20297, loss=0.038249874487519264\n", - "Surface training t=20298, loss=0.03621509298682213\n", - "Surface training t=20299, loss=0.050207046791911125\n", - "Surface training t=20300, loss=0.044827280566096306\n", - "Surface training t=20301, loss=0.0359586626291275\n", - "Surface training t=20302, loss=0.05334496684372425\n", - "Surface training t=20303, loss=0.03867069445550442\n", - "Surface training t=20304, loss=0.03723360598087311\n", - "Surface training t=20305, loss=0.03612906113266945\n", - "Surface training t=20306, loss=0.058687103912234306\n", - "Surface training t=20307, loss=0.03877768386155367\n", - "Surface training t=20308, loss=0.04630329832434654\n", - "Surface training t=20309, loss=0.06501692906022072\n", - "Surface training t=20310, loss=0.04587544500827789\n", - "Surface training t=20311, loss=0.0567751731723547\n", - "Surface training t=20312, loss=0.054311931133270264\n", - "Surface training t=20313, loss=0.03993340767920017\n", - "Surface training t=20314, loss=0.040089141577482224\n", - "Surface training t=20315, loss=0.030789158307015896\n", - "Surface training t=20316, loss=0.030351976864039898\n", - "Surface training t=20317, loss=0.03396839369088411\n", - "Surface training t=20318, loss=0.031050628051161766\n", - "Surface training t=20319, loss=0.03251381777226925\n", - "Surface training t=20320, loss=0.028538133949041367\n", - "Surface training t=20321, loss=0.02899135183542967\n", - "Surface training t=20322, loss=0.023789169266819954\n", - "Surface training t=20323, loss=0.02851026877760887\n", - "Surface training t=20324, loss=0.024349289014935493\n", - "Surface training t=20325, loss=0.01963112410157919\n", - "Surface training t=20326, loss=0.02052252274006605\n", - "Surface training t=20327, loss=0.023750399239361286\n", - "Surface training t=20328, loss=0.0314504224807024\n", - "Surface training t=20329, loss=0.033943381160497665\n", - "Surface training t=20330, loss=0.025152952410280704\n", - "Surface training t=20331, loss=0.025089601054787636\n", - "Surface training t=20332, loss=0.02772643882781267\n", - "Surface training t=20333, loss=0.026590035296976566\n", - "Surface training t=20334, loss=0.028343725949525833\n", - "Surface training t=20335, loss=0.03752748481929302\n", - "Surface training t=20336, loss=0.04562371410429478\n", - "Surface training t=20337, loss=0.03169416822493076\n", - "Surface training t=20338, loss=0.036279210820794106\n", - "Surface training t=20339, loss=0.025391685776412487\n", - "Surface training t=20340, loss=0.03981561399996281\n", - "Surface training t=20341, loss=0.029887117445468903\n", - "Surface training t=20342, loss=0.0223060492426157\n", - "Surface training t=20343, loss=0.035136107355356216\n", - "Surface training t=20344, loss=0.02372110355645418\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=20345, loss=0.019982176832854748\n", - "Surface training t=20346, loss=0.022860780358314514\n", - "Surface training t=20347, loss=0.019883224740624428\n", - "Surface training t=20348, loss=0.021752397529780865\n", - "Surface training t=20349, loss=0.030461642891168594\n", - "Surface training t=20350, loss=0.02212789934128523\n", - "Surface training t=20351, loss=0.022720126435160637\n", - "Surface training t=20352, loss=0.019647885113954544\n", - "Surface training t=20353, loss=0.02753149438649416\n", - "Surface training t=20354, loss=0.024510550312697887\n", - "Surface training t=20355, loss=0.026400701142847538\n", - "Surface training t=20356, loss=0.032316249795258045\n", - "Surface training t=20357, loss=0.0261352751404047\n", - "Surface training t=20358, loss=0.03338050842285156\n", - "Surface training t=20359, loss=0.041431570425629616\n", - "Surface training t=20360, loss=0.029892430640757084\n", - "Surface training t=20361, loss=0.042469197884202003\n", - "Surface training t=20362, loss=0.03526262938976288\n", - "Surface training t=20363, loss=0.03160012327134609\n", - "Surface training t=20364, loss=0.029747013933956623\n", - "Surface training t=20365, loss=0.027804700657725334\n", - "Surface training t=20366, loss=0.031770781613886356\n", - "Surface training t=20367, loss=0.03591315168887377\n", - "Surface training t=20368, loss=0.03185892663896084\n", - "Surface training t=20369, loss=0.03408539667725563\n", - "Surface training t=20370, loss=0.02183835580945015\n", - "Surface training t=20371, loss=0.039127686992287636\n", - "Surface training t=20372, loss=0.03618542104959488\n", - "Surface training t=20373, loss=0.027116520330309868\n", - "Surface training t=20374, loss=0.027270854450762272\n", - "Surface training t=20375, loss=0.027939146384596825\n", - "Surface training t=20376, loss=0.029128076508641243\n", - "Surface training t=20377, loss=0.030407174490392208\n", - "Surface training t=20378, loss=0.0376689825206995\n", - "Surface training t=20379, loss=0.0422707125544548\n", - "Surface training t=20380, loss=0.03918983414769173\n", - "Surface training t=20381, loss=0.052119866013526917\n", - "Surface training t=20382, loss=0.07507980242371559\n", - "Surface training t=20383, loss=0.04762963578104973\n", - "Surface training t=20384, loss=0.05910881422460079\n", - "Surface training t=20385, loss=0.04402434825897217\n", - "Surface training t=20386, loss=0.03221306763589382\n", - "Surface training t=20387, loss=0.03144186083227396\n", - "Surface training t=20388, loss=0.02593237068504095\n", - "Surface training t=20389, loss=0.04111766070127487\n", - "Surface training t=20390, loss=0.02963760308921337\n", - "Surface training t=20391, loss=0.02989712357521057\n", - "Surface training t=20392, loss=0.03303599450737238\n", - "Surface training t=20393, loss=0.04035675898194313\n", - "Surface training t=20394, loss=0.03945271112024784\n", - "Surface training t=20395, loss=0.029528611339628696\n", - "Surface training t=20396, loss=0.026322852820158005\n", - "Surface training t=20397, loss=0.028531155548989773\n", - "Surface training t=20398, loss=0.033472124487161636\n", - "Surface training t=20399, loss=0.040668344125151634\n", - "Surface training t=20400, loss=0.04220324382185936\n", - "Surface training t=20401, loss=0.028658488765358925\n", - "Surface training t=20402, loss=0.02769116684794426\n", - "Surface training t=20403, loss=0.033596547320485115\n", - "Surface training t=20404, loss=0.035306576639413834\n", - "Surface training t=20405, loss=0.02799694985151291\n", - "Surface training t=20406, loss=0.02027328871190548\n", - "Surface training t=20407, loss=0.019916316494345665\n", - "Surface training t=20408, loss=0.01826518028974533\n", - "Surface training t=20409, loss=0.02268066257238388\n", - "Surface training t=20410, loss=0.023745164275169373\n", - "Surface training t=20411, loss=0.027629390358924866\n", - "Surface training t=20412, loss=0.019702534191310406\n", - "Surface training t=20413, loss=0.018233737908303738\n", - "Surface training t=20414, loss=0.02172186877578497\n", - "Surface training t=20415, loss=0.022285958752036095\n", - "Surface training t=20416, loss=0.019754801876842976\n", - "Surface training t=20417, loss=0.023827049881219864\n", - "Surface training t=20418, loss=0.02043826226145029\n", - "Surface training t=20419, loss=0.01813726406544447\n", - "Surface training t=20420, loss=0.014811324421316385\n", - "Surface training t=20421, loss=0.016846087761223316\n", - "Surface training t=20422, loss=0.016543484292924404\n", - "Surface training t=20423, loss=0.020014988258481026\n", - "Surface training t=20424, loss=0.01893212180584669\n", - "Surface training t=20425, loss=0.016980770509690046\n", - "Surface training t=20426, loss=0.021835933439433575\n", - "Surface training t=20427, loss=0.021864820271730423\n", - "Surface training t=20428, loss=0.026936430484056473\n", - "Surface training t=20429, loss=0.0247690686956048\n", - "Surface training t=20430, loss=0.024178802967071533\n", - "Surface training t=20431, loss=0.03163476847112179\n", - "Surface training t=20432, loss=0.027163577266037464\n", - "Surface training t=20433, loss=0.022608362138271332\n", - "Surface training t=20434, loss=0.03401972260326147\n", - "Surface training t=20435, loss=0.03055666945874691\n", - "Surface training t=20436, loss=0.023154899012297392\n", - "Surface training t=20437, loss=0.04253249242901802\n", - "Surface training t=20438, loss=0.030143569223582745\n", - "Surface training t=20439, loss=0.029613839462399483\n", - "Surface training t=20440, loss=0.01970855798572302\n", - "Surface training t=20441, loss=0.02556877676397562\n", - "Surface training t=20442, loss=0.026561470702290535\n", - "Surface training t=20443, loss=0.02285151556134224\n", - "Surface training t=20444, loss=0.018394489772617817\n", - "Surface training t=20445, loss=0.014586267061531544\n", - "Surface training t=20446, loss=0.021177465096116066\n", - "Surface training t=20447, loss=0.022677425295114517\n", - "Surface training t=20448, loss=0.02797921746969223\n", - "Surface training t=20449, loss=0.02791001694276929\n", - "Surface training t=20450, loss=0.03479485213756561\n", - "Surface training t=20451, loss=0.02303607389330864\n", - "Surface training t=20452, loss=0.023511130828410387\n", - "Surface training t=20453, loss=0.030754616484045982\n", - "Surface training t=20454, loss=0.024778754450380802\n", - "Surface training t=20455, loss=0.02153928391635418\n", - "Surface training t=20456, loss=0.021078813821077347\n", - "Surface training t=20457, loss=0.02243607398122549\n", - "Surface training t=20458, loss=0.028489038348197937\n", - "Surface training t=20459, loss=0.019375475123524666\n", - "Surface training t=20460, loss=0.01780979335308075\n", - "Surface training t=20461, loss=0.020327435806393623\n", - "Surface training t=20462, loss=0.021729699335992336\n", - "Surface training t=20463, loss=0.025314398109912872\n", - "Surface training t=20464, loss=0.02529071643948555\n", - "Surface training t=20465, loss=0.02681918628513813\n", - "Surface training t=20466, loss=0.02767102513462305\n", - "Surface training t=20467, loss=0.02623437624424696\n", - "Surface training t=20468, loss=0.03351369686424732\n", - "Surface training t=20469, loss=0.02612004056572914\n", - "Surface training t=20470, loss=0.029473261907696724\n", - "Surface training t=20471, loss=0.02505799848586321\n", - "Surface training t=20472, loss=0.022236096672713757\n", - "Surface training t=20473, loss=0.017577726393938065\n", - "Surface training t=20474, loss=0.020658467896282673\n", - "Surface training t=20475, loss=0.01931087952107191\n", - "Surface training t=20476, loss=0.025279001332819462\n", - "Surface training t=20477, loss=0.027718684636056423\n", - "Surface training t=20478, loss=0.025537905283272266\n", - "Surface training t=20479, loss=0.023980174213647842\n", - "Surface training t=20480, loss=0.02701470721513033\n", - "Surface training t=20481, loss=0.020359485410153866\n", - "Surface training t=20482, loss=0.0205033291131258\n", - "Surface training t=20483, loss=0.015195590443909168\n", - "Surface training t=20484, loss=0.019215965643525124\n", - "Surface training t=20485, loss=0.029480070807039738\n", - "Surface training t=20486, loss=0.03291884995996952\n", - "Surface training t=20487, loss=0.034194111824035645\n", - "Surface training t=20488, loss=0.03352321032434702\n", - "Surface training t=20489, loss=0.026737903244793415\n", - "Surface training t=20490, loss=0.03924754448235035\n", - "Surface training t=20491, loss=0.03892207145690918\n", - "Surface training t=20492, loss=0.0317848976701498\n", - "Surface training t=20493, loss=0.04723488166928291\n", - "Surface training t=20494, loss=0.03301762975752354\n", - "Surface training t=20495, loss=0.03928246535360813\n", - "Surface training t=20496, loss=0.027385849505662918\n", - "Surface training t=20497, loss=0.024210148490965366\n", - "Surface training t=20498, loss=0.03125778120011091\n", - "Surface training t=20499, loss=0.027563240379095078\n", - "Surface training t=20500, loss=0.032425662502646446\n", - "Surface training t=20501, loss=0.021872430108487606\n", - "Surface training t=20502, loss=0.027242702431976795\n", - "Surface training t=20503, loss=0.021091210655868053\n", - "Surface training t=20504, loss=0.016544760670512915\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=20505, loss=0.025228796526789665\n", - "Surface training t=20506, loss=0.020813601091504097\n", - "Surface training t=20507, loss=0.015674125868827105\n", - "Surface training t=20508, loss=0.018332085572183132\n", - "Surface training t=20509, loss=0.016468352638185024\n", - "Surface training t=20510, loss=0.017580727115273476\n", - "Surface training t=20511, loss=0.0210881307721138\n", - "Surface training t=20512, loss=0.02037572581321001\n", - "Surface training t=20513, loss=0.019968404434621334\n", - "Surface training t=20514, loss=0.02182141598314047\n", - "Surface training t=20515, loss=0.02170114405453205\n", - "Surface training t=20516, loss=0.018330604303628206\n", - "Surface training t=20517, loss=0.018486608751118183\n", - "Surface training t=20518, loss=0.017801275476813316\n", - "Surface training t=20519, loss=0.016334581188857555\n", - "Surface training t=20520, loss=0.01866749208420515\n", - "Surface training t=20521, loss=0.015258261002600193\n", - "Surface training t=20522, loss=0.0189519040286541\n", - "Surface training t=20523, loss=0.01856479560956359\n", - "Surface training t=20524, loss=0.0168069489300251\n", - "Surface training t=20525, loss=0.018259460106492043\n", - "Surface training t=20526, loss=0.016329155303537846\n", - "Surface training t=20527, loss=0.016406390815973282\n", - "Surface training t=20528, loss=0.01672972459346056\n", - "Surface training t=20529, loss=0.018923972733318806\n", - "Surface training t=20530, loss=0.02040155977010727\n", - "Surface training t=20531, loss=0.018143839202821255\n", - "Surface training t=20532, loss=0.014189634472131729\n", - "Surface training t=20533, loss=0.01911008544266224\n", - "Surface training t=20534, loss=0.014153762720525265\n", - "Surface training t=20535, loss=0.02372333500534296\n", - "Surface training t=20536, loss=0.02660489547997713\n", - "Surface training t=20537, loss=0.027946291491389275\n", - "Surface training t=20538, loss=0.03306981362402439\n", - "Surface training t=20539, loss=0.041349856182932854\n", - "Surface training t=20540, loss=0.036865755915641785\n", - "Surface training t=20541, loss=0.023511948995292187\n", - "Surface training t=20542, loss=0.02791784144937992\n", - "Surface training t=20543, loss=0.026936324313282967\n", - "Surface training t=20544, loss=0.031005287542939186\n", - "Surface training t=20545, loss=0.025223448872566223\n", - "Surface training t=20546, loss=0.03904411941766739\n", - "Surface training t=20547, loss=0.03262963332235813\n", - "Surface training t=20548, loss=0.035860439762473106\n", - "Surface training t=20549, loss=0.027037962339818478\n", - "Surface training t=20550, loss=0.02832077629864216\n", - "Surface training t=20551, loss=0.025742092169821262\n", - "Surface training t=20552, loss=0.028340015560388565\n", - "Surface training t=20553, loss=0.03351443633437157\n", - "Surface training t=20554, loss=0.029482770711183548\n", - "Surface training t=20555, loss=0.02416868321597576\n", - "Surface training t=20556, loss=0.02639959566295147\n", - "Surface training t=20557, loss=0.025204656645655632\n", - "Surface training t=20558, loss=0.021821776404976845\n", - "Surface training t=20559, loss=0.022273494862020016\n", - "Surface training t=20560, loss=0.027029333636164665\n", - "Surface training t=20561, loss=0.02694527618587017\n", - "Surface training t=20562, loss=0.025972407311201096\n", - "Surface training t=20563, loss=0.03569401055574417\n", - "Surface training t=20564, loss=0.0290315393358469\n", - "Surface training t=20565, loss=0.027715016156435013\n", - "Surface training t=20566, loss=0.017616388853639364\n", - "Surface training t=20567, loss=0.025501050055027008\n", - "Surface training t=20568, loss=0.02465300541371107\n", - "Surface training t=20569, loss=0.019410892389714718\n", - "Surface training t=20570, loss=0.03582334518432617\n", - "Surface training t=20571, loss=0.035212866961956024\n", - "Surface training t=20572, loss=0.03053381573408842\n", - "Surface training t=20573, loss=0.032165151089429855\n", - "Surface training t=20574, loss=0.028979208320379257\n", - "Surface training t=20575, loss=0.03270299732685089\n", - "Surface training t=20576, loss=0.03167400509119034\n", - "Surface training t=20577, loss=0.030437068082392216\n", - "Surface training t=20578, loss=0.03591681458055973\n", - "Surface training t=20579, loss=0.033153923228383064\n", - "Surface training t=20580, loss=0.04772073030471802\n", - "Surface training t=20581, loss=0.027682531625032425\n", - "Surface training t=20582, loss=0.027220288291573524\n", - "Surface training t=20583, loss=0.03284614905714989\n", - "Surface training t=20584, loss=0.038634803146123886\n", - "Surface training t=20585, loss=0.054583990946412086\n", - "Surface training t=20586, loss=0.03783484362065792\n", - "Surface training t=20587, loss=0.036209711804986\n", - "Surface training t=20588, loss=0.02664254419505596\n", - "Surface training t=20589, loss=0.027789552696049213\n", - "Surface training t=20590, loss=0.02301068976521492\n", - "Surface training t=20591, loss=0.030357598327100277\n", - "Surface training t=20592, loss=0.02722999732941389\n", - "Surface training t=20593, loss=0.04586941562592983\n", - "Surface training t=20594, loss=0.03335350379347801\n", - "Surface training t=20595, loss=0.031508262269198895\n", - "Surface training t=20596, loss=0.03847783990204334\n", - "Surface training t=20597, loss=0.025634759105741978\n", - "Surface training t=20598, loss=0.03978290036320686\n", - "Surface training t=20599, loss=0.0275389077141881\n", - "Surface training t=20600, loss=0.023160229437053204\n", - "Surface training t=20601, loss=0.02365154679864645\n", - "Surface training t=20602, loss=0.027614504098892212\n", - "Surface training t=20603, loss=0.018664591945707798\n", - "Surface training t=20604, loss=0.01978880725800991\n", - "Surface training t=20605, loss=0.023612423799932003\n", - "Surface training t=20606, loss=0.024767412804067135\n", - "Surface training t=20607, loss=0.030438479967415333\n", - "Surface training t=20608, loss=0.02575257420539856\n", - "Surface training t=20609, loss=0.024966352619230747\n", - "Surface training t=20610, loss=0.03944513760507107\n", - "Surface training t=20611, loss=0.03278292901813984\n", - "Surface training t=20612, loss=0.029850791208446026\n", - "Surface training t=20613, loss=0.024840237572789192\n", - "Surface training t=20614, loss=0.0279221273958683\n", - "Surface training t=20615, loss=0.024667139165103436\n", - "Surface training t=20616, loss=0.02531229518353939\n", - "Surface training t=20617, loss=0.024636724032461643\n", - "Surface training t=20618, loss=0.02179604023694992\n", - "Surface training t=20619, loss=0.02521125040948391\n", - "Surface training t=20620, loss=0.025196907110512257\n", - "Surface training t=20621, loss=0.02077170042321086\n", - "Surface training t=20622, loss=0.02172455843538046\n", - "Surface training t=20623, loss=0.02661601174622774\n", - "Surface training t=20624, loss=0.028514862060546875\n", - "Surface training t=20625, loss=0.02008743677288294\n", - "Surface training t=20626, loss=0.024658052250742912\n", - "Surface training t=20627, loss=0.023684967309236526\n", - "Surface training t=20628, loss=0.02712428942322731\n", - "Surface training t=20629, loss=0.024202164262533188\n", - "Surface training t=20630, loss=0.02037161961197853\n", - "Surface training t=20631, loss=0.031326331198215485\n", - "Surface training t=20632, loss=0.01911903079599142\n", - "Surface training t=20633, loss=0.027418147772550583\n", - "Surface training t=20634, loss=0.0404457151889801\n", - "Surface training t=20635, loss=0.03125848062336445\n", - "Surface training t=20636, loss=0.03293458092957735\n", - "Surface training t=20637, loss=0.02859581634402275\n", - "Surface training t=20638, loss=0.027836700901389122\n", - "Surface training t=20639, loss=0.026137537322938442\n", - "Surface training t=20640, loss=0.027375416830182076\n", - "Surface training t=20641, loss=0.04269211180508137\n", - "Surface training t=20642, loss=0.046657394617795944\n", - "Surface training t=20643, loss=0.03646354749798775\n", - "Surface training t=20644, loss=0.029064268339425325\n", - "Surface training t=20645, loss=0.04639446176588535\n", - "Surface training t=20646, loss=0.03809581324458122\n", - "Surface training t=20647, loss=0.03972426988184452\n", - "Surface training t=20648, loss=0.048824336379766464\n", - "Surface training t=20649, loss=0.0523833055049181\n", - "Surface training t=20650, loss=0.0477202944457531\n", - "Surface training t=20651, loss=0.04796839505434036\n", - "Surface training t=20652, loss=0.035582590848207474\n", - "Surface training t=20653, loss=0.040594542399048805\n", - "Surface training t=20654, loss=0.04638809338212013\n", - "Surface training t=20655, loss=0.027979616075754166\n", - "Surface training t=20656, loss=0.03533932939171791\n", - "Surface training t=20657, loss=0.025533460080623627\n", - "Surface training t=20658, loss=0.02706314716488123\n", - "Surface training t=20659, loss=0.026731627993285656\n", - "Surface training t=20660, loss=0.01916277315467596\n", - "Surface training t=20661, loss=0.01927812397480011\n", - "Surface training t=20662, loss=0.026904163882136345\n", - "Surface training t=20663, loss=0.02379481215029955\n", - "Surface training t=20664, loss=0.021613663993775845\n", - "Surface training t=20665, loss=0.021422239020466805\n", - "Surface training t=20666, loss=0.022036511451005936\n", - "Surface training t=20667, loss=0.019079644232988358\n", - "Surface training t=20668, loss=0.022556213662028313\n", - "Surface training t=20669, loss=0.02134575042873621\n", - "Surface training t=20670, loss=0.019657387398183346\n", - "Surface training t=20671, loss=0.02775486931204796\n", - "Surface training t=20672, loss=0.032092489302158356\n", - "Surface training t=20673, loss=0.02815714105963707\n", - "Surface training t=20674, loss=0.034236736595630646\n", - "Surface training t=20675, loss=0.02873704582452774\n", - "Surface training t=20676, loss=0.03324972093105316\n", - "Surface training t=20677, loss=0.03821666445583105\n", - "Surface training t=20678, loss=0.03740701451897621\n", - "Surface training t=20679, loss=0.04113827086985111\n", - "Surface training t=20680, loss=0.028520425781607628\n", - "Surface training t=20681, loss=0.025940789841115475\n", - "Surface training t=20682, loss=0.025445235893130302\n", - "Surface training t=20683, loss=0.025660589337348938\n", - "Surface training t=20684, loss=0.025953383184969425\n", - "Surface training t=20685, loss=0.02663864754140377\n", - "Surface training t=20686, loss=0.031252057291567326\n", - "Surface training t=20687, loss=0.030456609092652798\n", - "Surface training t=20688, loss=0.02832004800438881\n", - "Surface training t=20689, loss=0.021527692675590515\n", - "Surface training t=20690, loss=0.031810387037694454\n", - "Surface training t=20691, loss=0.021085046231746674\n", - "Surface training t=20692, loss=0.026041788049042225\n", - "Surface training t=20693, loss=0.041055142879486084\n", - "Surface training t=20694, loss=0.029037308879196644\n", - "Surface training t=20695, loss=0.01862485520541668\n", - "Surface training t=20696, loss=0.02555990032851696\n", - "Surface training t=20697, loss=0.02019003964960575\n", - "Surface training t=20698, loss=0.019615447148680687\n", - "Surface training t=20699, loss=0.02552153542637825\n", - "Surface training t=20700, loss=0.01771371439099312\n", - "Surface training t=20701, loss=0.02642889227718115\n", - "Surface training t=20702, loss=0.031181510537862778\n", - "Surface training t=20703, loss=0.020067522302269936\n", - "Surface training t=20704, loss=0.02737988531589508\n", - "Surface training t=20705, loss=0.024887307547032833\n", - "Surface training t=20706, loss=0.022780545987188816\n", - "Surface training t=20707, loss=0.02237193752080202\n", - "Surface training t=20708, loss=0.025710160844027996\n", - "Surface training t=20709, loss=0.028540014289319515\n", - "Surface training t=20710, loss=0.02634057216346264\n", - "Surface training t=20711, loss=0.025478621944785118\n", - "Surface training t=20712, loss=0.022877161391079426\n", - "Surface training t=20713, loss=0.033913166262209415\n", - "Surface training t=20714, loss=0.030797562561929226\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=20715, loss=0.03216402232646942\n", - "Surface training t=20716, loss=0.03305526450276375\n", - "Surface training t=20717, loss=0.02928946167230606\n", - "Surface training t=20718, loss=0.018947805278003216\n", - "Surface training t=20719, loss=0.021223132498562336\n", - "Surface training t=20720, loss=0.022281911224126816\n", - "Surface training t=20721, loss=0.02253139764070511\n", - "Surface training t=20722, loss=0.021804079413414\n", - "Surface training t=20723, loss=0.01943944115191698\n", - "Surface training t=20724, loss=0.025356893427670002\n", - "Surface training t=20725, loss=0.02765277959406376\n", - "Surface training t=20726, loss=0.03483089804649353\n", - "Surface training t=20727, loss=0.032016571611166\n", - "Surface training t=20728, loss=0.036178089678287506\n", - "Surface training t=20729, loss=0.03938571177423\n", - "Surface training t=20730, loss=0.03169005922973156\n", - "Surface training t=20731, loss=0.031937780790030956\n", - "Surface training t=20732, loss=0.02484313864260912\n", - "Surface training t=20733, loss=0.027461311779916286\n", - "Surface training t=20734, loss=0.030116306617856026\n", - "Surface training t=20735, loss=0.02426969725638628\n", - "Surface training t=20736, loss=0.029675652272999287\n", - "Surface training t=20737, loss=0.031744321808218956\n", - "Surface training t=20738, loss=0.02712931390851736\n", - "Surface training t=20739, loss=0.025108080357313156\n", - "Surface training t=20740, loss=0.031450262293219566\n", - "Surface training t=20741, loss=0.03317440673708916\n", - "Surface training t=20742, loss=0.03049117885529995\n", - "Surface training t=20743, loss=0.03363278601318598\n", - "Surface training t=20744, loss=0.03726934641599655\n", - "Surface training t=20745, loss=0.042140161618590355\n", - "Surface training t=20746, loss=0.024744276888668537\n", - "Surface training t=20747, loss=0.017109776847064495\n", - "Surface training t=20748, loss=0.021464689634740353\n", - "Surface training t=20749, loss=0.021693005692213774\n", - "Surface training t=20750, loss=0.024064429104328156\n", - "Surface training t=20751, loss=0.026006446219980717\n", - "Surface training t=20752, loss=0.026043754070997238\n", - "Surface training t=20753, loss=0.020504246465861797\n", - "Surface training t=20754, loss=0.024055308662354946\n", - "Surface training t=20755, loss=0.023634405806660652\n", - "Surface training t=20756, loss=0.02715793438255787\n", - "Surface training t=20757, loss=0.021318349987268448\n", - "Surface training t=20758, loss=0.01889317436143756\n", - "Surface training t=20759, loss=0.020015298388898373\n", - "Surface training t=20760, loss=0.02934932243078947\n", - "Surface training t=20761, loss=0.03129446506500244\n", - "Surface training t=20762, loss=0.04074283875524998\n", - "Surface training t=20763, loss=0.020507211796939373\n", - "Surface training t=20764, loss=0.021213544066995382\n", - "Surface training t=20765, loss=0.022486278787255287\n", - "Surface training t=20766, loss=0.020757772959768772\n", - "Surface training t=20767, loss=0.02052378561347723\n", - "Surface training t=20768, loss=0.02162877144291997\n", - "Surface training t=20769, loss=0.026446864940226078\n", - "Surface training t=20770, loss=0.025641479529440403\n", - "Surface training t=20771, loss=0.027749430388212204\n", - "Surface training t=20772, loss=0.020536575466394424\n", - "Surface training t=20773, loss=0.02171298209577799\n", - "Surface training t=20774, loss=0.01831859163939953\n", - "Surface training t=20775, loss=0.021115477196872234\n", - "Surface training t=20776, loss=0.029863807372748852\n", - "Surface training t=20777, loss=0.025736669078469276\n", - "Surface training t=20778, loss=0.023724062368273735\n", - "Surface training t=20779, loss=0.018908752128481865\n", - "Surface training t=20780, loss=0.01883106492459774\n", - "Surface training t=20781, loss=0.02428403589874506\n", - "Surface training t=20782, loss=0.02471337281167507\n", - "Surface training t=20783, loss=0.030081331729888916\n", - "Surface training t=20784, loss=0.02043346967548132\n", - "Surface training t=20785, loss=0.020649641752243042\n", - "Surface training t=20786, loss=0.01991023402661085\n", - "Surface training t=20787, loss=0.021644411608576775\n", - "Surface training t=20788, loss=0.04281949624419212\n", - "Surface training t=20789, loss=0.02979943063110113\n", - "Surface training t=20790, loss=0.027449453249573708\n", - "Surface training t=20791, loss=0.02407877705991268\n", - "Surface training t=20792, loss=0.02607161272317171\n", - "Surface training t=20793, loss=0.029780450277030468\n", - "Surface training t=20794, loss=0.02871676441282034\n", - "Surface training t=20795, loss=0.024607861414551735\n", - "Surface training t=20796, loss=0.021776482462882996\n", - "Surface training t=20797, loss=0.024472219869494438\n", - "Surface training t=20798, loss=0.024466448463499546\n", - "Surface training t=20799, loss=0.02833949401974678\n", - "Surface training t=20800, loss=0.026888689026236534\n", - "Surface training t=20801, loss=0.024729772470891476\n", - "Surface training t=20802, loss=0.028692644089460373\n", - "Surface training t=20803, loss=0.035697681829333305\n", - "Surface training t=20804, loss=0.040980011224746704\n", - "Surface training t=20805, loss=0.03165839798748493\n", - "Surface training t=20806, loss=0.03451740834861994\n", - "Surface training t=20807, loss=0.028415916487574577\n", - "Surface training t=20808, loss=0.03167187329381704\n", - "Surface training t=20809, loss=0.028451007790863514\n", - "Surface training t=20810, loss=0.026135762222111225\n", - "Surface training t=20811, loss=0.045116448774933815\n", - "Surface training t=20812, loss=0.027472447603940964\n", - "Surface training t=20813, loss=0.027021522633731365\n", - "Surface training t=20814, loss=0.03253590315580368\n", - "Surface training t=20815, loss=0.03078855387866497\n", - "Surface training t=20816, loss=0.02925784420222044\n", - "Surface training t=20817, loss=0.026409713551402092\n", - "Surface training t=20818, loss=0.03032063879072666\n", - "Surface training t=20819, loss=0.025699716061353683\n", - "Surface training t=20820, loss=0.03256985079497099\n", - "Surface training t=20821, loss=0.02674282155930996\n", - "Surface training t=20822, loss=0.02520196046680212\n", - "Surface training t=20823, loss=0.03031967394053936\n", - "Surface training t=20824, loss=0.03739828243851662\n", - "Surface training t=20825, loss=0.03094286099076271\n", - "Surface training t=20826, loss=0.04089629836380482\n", - "Surface training t=20827, loss=0.03167560324072838\n", - "Surface training t=20828, loss=0.024622402153909206\n", - "Surface training t=20829, loss=0.03445508796721697\n", - "Surface training t=20830, loss=0.026420247741043568\n", - "Surface training t=20831, loss=0.0383475236594677\n", - "Surface training t=20832, loss=0.03462680149823427\n", - "Surface training t=20833, loss=0.036130091175436974\n", - "Surface training t=20834, loss=0.028718091547489166\n", - "Surface training t=20835, loss=0.028851831331849098\n", - "Surface training t=20836, loss=0.030713072046637535\n", - "Surface training t=20837, loss=0.03156670555472374\n", - "Surface training t=20838, loss=0.03228737972676754\n", - "Surface training t=20839, loss=0.04075850732624531\n", - "Surface training t=20840, loss=0.033099555876106024\n", - "Surface training t=20841, loss=0.03683118801563978\n", - "Surface training t=20842, loss=0.05715177208185196\n", - "Surface training t=20843, loss=0.0474043358117342\n", - "Surface training t=20844, loss=0.05333239957690239\n", - "Surface training t=20845, loss=0.056698767468333244\n", - "Surface training t=20846, loss=0.042063500732183456\n", - "Surface training t=20847, loss=0.050249191001057625\n", - "Surface training t=20848, loss=0.04263758659362793\n", - "Surface training t=20849, loss=0.032640259712934494\n", - "Surface training t=20850, loss=0.03419610671699047\n", - "Surface training t=20851, loss=0.03237161412835121\n", - "Surface training t=20852, loss=0.026759304106235504\n", - "Surface training t=20853, loss=0.028745386749505997\n", - "Surface training t=20854, loss=0.028282051905989647\n", - "Surface training t=20855, loss=0.021260748617351055\n", - "Surface training t=20856, loss=0.0231617521494627\n", - "Surface training t=20857, loss=0.01944195106625557\n", - "Surface training t=20858, loss=0.01862121745944023\n", - "Surface training t=20859, loss=0.026444262824952602\n", - "Surface training t=20860, loss=0.02406415529549122\n", - "Surface training t=20861, loss=0.026171665638685226\n", - "Surface training t=20862, loss=0.03973759524524212\n", - "Surface training t=20863, loss=0.029376329854130745\n", - "Surface training t=20864, loss=0.03541799634695053\n", - "Surface training t=20865, loss=0.03428763896226883\n", - "Surface training t=20866, loss=0.025736900977790356\n", - "Surface training t=20867, loss=0.0245898412540555\n", - "Surface training t=20868, loss=0.02223710436373949\n", - "Surface training t=20869, loss=0.024366140365600586\n", - "Surface training t=20870, loss=0.024876183830201626\n", - "Surface training t=20871, loss=0.03080084268003702\n", - "Surface training t=20872, loss=0.02535046823322773\n", - "Surface training t=20873, loss=0.030386989936232567\n", - "Surface training t=20874, loss=0.02520565502345562\n", - "Surface training t=20875, loss=0.021449347026646137\n", - "Surface training t=20876, loss=0.02053904440253973\n", - "Surface training t=20877, loss=0.02681146375834942\n", - "Surface training t=20878, loss=0.020127519965171814\n", - "Surface training t=20879, loss=0.02347178291529417\n", - "Surface training t=20880, loss=0.02923002280294895\n", - "Surface training t=20881, loss=0.03030510526150465\n", - "Surface training t=20882, loss=0.025696461088955402\n", - "Surface training t=20883, loss=0.03149927593767643\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=20884, loss=0.026646414771676064\n", - "Surface training t=20885, loss=0.031570544466376305\n", - "Surface training t=20886, loss=0.03184364549815655\n", - "Surface training t=20887, loss=0.027632676996290684\n", - "Surface training t=20888, loss=0.022828884422779083\n", - "Surface training t=20889, loss=0.02008148282766342\n", - "Surface training t=20890, loss=0.01870668213814497\n", - "Surface training t=20891, loss=0.02172080148011446\n", - "Surface training t=20892, loss=0.01682224916294217\n", - "Surface training t=20893, loss=0.019463942386209965\n", - "Surface training t=20894, loss=0.019789794459939003\n", - "Surface training t=20895, loss=0.0188770922832191\n", - "Surface training t=20896, loss=0.02276946511119604\n", - "Surface training t=20897, loss=0.02768571861088276\n", - "Surface training t=20898, loss=0.025326523929834366\n", - "Surface training t=20899, loss=0.02495846524834633\n", - "Surface training t=20900, loss=0.02025972120463848\n", - "Surface training t=20901, loss=0.01775606069713831\n", - "Surface training t=20902, loss=0.020466658286750317\n", - "Surface training t=20903, loss=0.031364623457193375\n", - "Surface training t=20904, loss=0.023269430734217167\n", - "Surface training t=20905, loss=0.032232675701379776\n", - "Surface training t=20906, loss=0.039407266303896904\n", - "Surface training t=20907, loss=0.03404393047094345\n", - "Surface training t=20908, loss=0.03987481631338596\n", - "Surface training t=20909, loss=0.031689366325736046\n", - "Surface training t=20910, loss=0.046474775299429893\n", - "Surface training t=20911, loss=0.030004380270838737\n", - "Surface training t=20912, loss=0.033948528580367565\n", - "Surface training t=20913, loss=0.03353084158152342\n", - "Surface training t=20914, loss=0.028794651851058006\n", - "Surface training t=20915, loss=0.03218280151486397\n", - "Surface training t=20916, loss=0.03262628335505724\n", - "Surface training t=20917, loss=0.03450444154441357\n", - "Surface training t=20918, loss=0.025066151283681393\n", - "Surface training t=20919, loss=0.04760670103132725\n", - "Surface training t=20920, loss=0.03654247894883156\n", - "Surface training t=20921, loss=0.03957895375788212\n", - "Surface training t=20922, loss=0.028039241209626198\n", - "Surface training t=20923, loss=0.027447648346424103\n", - "Surface training t=20924, loss=0.033381205052137375\n", - "Surface training t=20925, loss=0.031730709597468376\n", - "Surface training t=20926, loss=0.03617124259471893\n", - "Surface training t=20927, loss=0.034046513959765434\n", - "Surface training t=20928, loss=0.03365215193480253\n", - "Surface training t=20929, loss=0.029759394004940987\n", - "Surface training t=20930, loss=0.022900931537151337\n", - "Surface training t=20931, loss=0.03465473745018244\n", - "Surface training t=20932, loss=0.030543294735252857\n", - "Surface training t=20933, loss=0.023177954368293285\n", - "Surface training t=20934, loss=0.02028606366366148\n", - "Surface training t=20935, loss=0.024003885686397552\n", - "Surface training t=20936, loss=0.0252751549705863\n", - "Surface training t=20937, loss=0.026678942143917084\n", - "Surface training t=20938, loss=0.025353585369884968\n", - "Surface training t=20939, loss=0.028774306178092957\n", - "Surface training t=20940, loss=0.018590310588479042\n", - "Surface training t=20941, loss=0.018784047104418278\n", - "Surface training t=20942, loss=0.01951674558222294\n", - "Surface training t=20943, loss=0.022158904932439327\n", - "Surface training t=20944, loss=0.022124996408820152\n", - "Surface training t=20945, loss=0.02778130117803812\n", - "Surface training t=20946, loss=0.027455279603600502\n", - "Surface training t=20947, loss=0.02388747315853834\n", - "Surface training t=20948, loss=0.016877504996955395\n", - "Surface training t=20949, loss=0.025474986992776394\n", - "Surface training t=20950, loss=0.023916004225611687\n", - "Surface training t=20951, loss=0.023117284290492535\n", - "Surface training t=20952, loss=0.029085250571370125\n", - "Surface training t=20953, loss=0.026762887835502625\n", - "Surface training t=20954, loss=0.023023759946227074\n", - "Surface training t=20955, loss=0.03828777000308037\n", - "Surface training t=20956, loss=0.03099399246275425\n", - "Surface training t=20957, loss=0.028864210471510887\n", - "Surface training t=20958, loss=0.029615866020321846\n", - "Surface training t=20959, loss=0.026488805189728737\n", - "Surface training t=20960, loss=0.0256817564368248\n", - "Surface training t=20961, loss=0.023544834926724434\n", - "Surface training t=20962, loss=0.027903683483600616\n", - "Surface training t=20963, loss=0.027906115166842937\n", - "Surface training t=20964, loss=0.024660021997988224\n", - "Surface training t=20965, loss=0.028900972567498684\n", - "Surface training t=20966, loss=0.0306490371003747\n", - "Surface training t=20967, loss=0.027441115118563175\n", - "Surface training t=20968, loss=0.02464826311916113\n", - "Surface training t=20969, loss=0.02147500915452838\n", - "Surface training t=20970, loss=0.0233149491250515\n", - "Surface training t=20971, loss=0.022696238942444324\n", - "Surface training t=20972, loss=0.01967035699635744\n", - "Surface training t=20973, loss=0.02836617548018694\n", - "Surface training t=20974, loss=0.031265187077224255\n", - "Surface training t=20975, loss=0.0246169688180089\n", - "Surface training t=20976, loss=0.029334666207432747\n", - "Surface training t=20977, loss=0.021046594716608524\n", - "Surface training t=20978, loss=0.0396423302590847\n", - "Surface training t=20979, loss=0.03361790254712105\n", - "Surface training t=20980, loss=0.043896911665797234\n", - "Surface training t=20981, loss=0.03894796967506409\n", - "Surface training t=20982, loss=0.032064288854599\n", - "Surface training t=20983, loss=0.03684722259640694\n", - "Surface training t=20984, loss=0.030268486589193344\n", - "Surface training t=20985, loss=0.03314049728214741\n", - "Surface training t=20986, loss=0.036349330097436905\n", - "Surface training t=20987, loss=0.0391242690384388\n", - "Surface training t=20988, loss=0.035471558570861816\n", - "Surface training t=20989, loss=0.038196017034351826\n", - "Surface training t=20990, loss=0.060002025216817856\n", - "Surface training t=20991, loss=0.0451786033809185\n", - "Surface training t=20992, loss=0.05687698908150196\n", - "Surface training t=20993, loss=0.051311070099473\n", - "Surface training t=20994, loss=0.040492646396160126\n", - "Surface training t=20995, loss=0.036154464818537235\n", - "Surface training t=20996, loss=0.048866888508200645\n", - "Surface training t=20997, loss=0.03665111027657986\n", - "Surface training t=20998, loss=0.036302377469837666\n", - "Surface training t=20999, loss=0.04900567606091499\n", - "Surface training t=21000, loss=0.03731474652886391\n", - "Surface training t=21001, loss=0.03544512763619423\n", - "Surface training t=21002, loss=0.05083620920777321\n", - "Surface training t=21003, loss=0.04518536292016506\n", - "Surface training t=21004, loss=0.03289706166833639\n", - "Surface training t=21005, loss=0.036224303767085075\n", - "Surface training t=21006, loss=0.03964037820696831\n", - "Surface training t=21007, loss=0.03548740595579147\n", - "Surface training t=21008, loss=0.02672354131937027\n", - "Surface training t=21009, loss=0.027028420940041542\n", - "Surface training t=21010, loss=0.03062563482671976\n", - "Surface training t=21011, loss=0.030257517471909523\n", - "Surface training t=21012, loss=0.03104264196008444\n", - "Surface training t=21013, loss=0.04765316843986511\n", - "Surface training t=21014, loss=0.03952419012784958\n", - "Surface training t=21015, loss=0.038392962887883186\n", - "Surface training t=21016, loss=0.043871862813830376\n", - "Surface training t=21017, loss=0.04566829279065132\n", - "Surface training t=21018, loss=0.03911098465323448\n", - "Surface training t=21019, loss=0.03824386186897755\n", - "Surface training t=21020, loss=0.03836130537092686\n", - "Surface training t=21021, loss=0.04201751947402954\n", - "Surface training t=21022, loss=0.03716679569333792\n", - "Surface training t=21023, loss=0.036530278623104095\n", - "Surface training t=21024, loss=0.03783584386110306\n", - "Surface training t=21025, loss=0.04485098458826542\n", - "Surface training t=21026, loss=0.03837111406028271\n", - "Surface training t=21027, loss=0.06323586590588093\n", - "Surface training t=21028, loss=0.04317517206072807\n", - "Surface training t=21029, loss=0.03971880488097668\n", - "Surface training t=21030, loss=0.038666306994855404\n", - "Surface training t=21031, loss=0.04463542439043522\n", - "Surface training t=21032, loss=0.03383772447705269\n", - "Surface training t=21033, loss=0.040186066180467606\n", - "Surface training t=21034, loss=0.035911801271140575\n", - "Surface training t=21035, loss=0.04918799176812172\n", - "Surface training t=21036, loss=0.03579124994575977\n", - "Surface training t=21037, loss=0.04238861799240112\n", - "Surface training t=21038, loss=0.03512124344706535\n", - "Surface training t=21039, loss=0.035043977200984955\n", - "Surface training t=21040, loss=0.03316319826990366\n", - "Surface training t=21041, loss=0.028566349763423204\n", - "Surface training t=21042, loss=0.037989942356944084\n", - "Surface training t=21043, loss=0.03662245534360409\n", - "Surface training t=21044, loss=0.024055088870227337\n", - "Surface training t=21045, loss=0.028539453633129597\n", - "Surface training t=21046, loss=0.03205419331789017\n", - "Surface training t=21047, loss=0.02663896232843399\n", - "Surface training t=21048, loss=0.030729865655303\n", - "Surface training t=21049, loss=0.029278995469212532\n", - "Surface training t=21050, loss=0.02666336204856634\n", - "Surface training t=21051, loss=0.029864849522709846\n", - "Surface training t=21052, loss=0.02339557744562626\n", - "Surface training t=21053, loss=0.02309716586023569\n", - "Surface training t=21054, loss=0.031906998716294765\n", - "Surface training t=21055, loss=0.02732210699468851\n", - "Surface training t=21056, loss=0.024386974051594734\n", - "Surface training t=21057, loss=0.029974620789289474\n", - "Surface training t=21058, loss=0.020201578736305237\n", - "Surface training t=21059, loss=0.0245660487562418\n", - "Surface training t=21060, loss=0.0235163401812315\n", - "Surface training t=21061, loss=0.020708266645669937\n", - "Surface training t=21062, loss=0.02969791367650032\n", - "Surface training t=21063, loss=0.03297042101621628\n", - "Surface training t=21064, loss=0.038439213298261166\n", - "Surface training t=21065, loss=0.03608609549701214\n", - "Surface training t=21066, loss=0.03774411045014858\n", - "Surface training t=21067, loss=0.046468159183859825\n", - "Surface training t=21068, loss=0.030194892548024654\n", - "Surface training t=21069, loss=0.03581838123500347\n", - "Surface training t=21070, loss=0.03137221932411194\n", - "Surface training t=21071, loss=0.03364155441522598\n", - "Surface training t=21072, loss=0.051899541169404984\n", - "Surface training t=21073, loss=0.03818534221500158\n", - "Surface training t=21074, loss=0.04560122452676296\n", - "Surface training t=21075, loss=0.04326883517205715\n", - "Surface training t=21076, loss=0.027875246480107307\n", - "Surface training t=21077, loss=0.021216796711087227\n", - "Surface training t=21078, loss=0.02223571389913559\n", - "Surface training t=21079, loss=0.022174875251948833\n", - "Surface training t=21080, loss=0.023017769679427147\n", - "Surface training t=21081, loss=0.028493568301200867\n", - "Surface training t=21082, loss=0.02934955805540085\n", - "Surface training t=21083, loss=0.03294860105961561\n", - "Surface training t=21084, loss=0.029722084291279316\n", - "Surface training t=21085, loss=0.03151035029441118\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=21086, loss=0.02851267158985138\n", - "Surface training t=21087, loss=0.02347736805677414\n", - "Surface training t=21088, loss=0.021423927508294582\n", - "Surface training t=21089, loss=0.011900193989276886\n", - "Surface training t=21090, loss=0.021494422107934952\n", - "Surface training t=21091, loss=0.027599562890827656\n", - "Surface training t=21092, loss=0.02418349590152502\n", - "Surface training t=21093, loss=0.04279728792607784\n", - "Surface training t=21094, loss=0.033435091376304626\n", - "Surface training t=21095, loss=0.04125528782606125\n", - "Surface training t=21096, loss=0.03517073392868042\n", - "Surface training t=21097, loss=0.029130362905561924\n", - "Surface training t=21098, loss=0.0355890691280365\n", - "Surface training t=21099, loss=0.026581769809126854\n", - "Surface training t=21100, loss=0.03924156352877617\n", - "Surface training t=21101, loss=0.028420340269804\n", - "Surface training t=21102, loss=0.0250373687595129\n", - "Surface training t=21103, loss=0.034556543454527855\n", - "Surface training t=21104, loss=0.027215597219765186\n", - "Surface training t=21105, loss=0.026454242877662182\n", - "Surface training t=21106, loss=0.031049540266394615\n", - "Surface training t=21107, loss=0.0246507590636611\n", - "Surface training t=21108, loss=0.022660814225673676\n", - "Surface training t=21109, loss=0.0288744755089283\n", - "Surface training t=21110, loss=0.0210021510720253\n", - "Surface training t=21111, loss=0.022075394168496132\n", - "Surface training t=21112, loss=0.02307989075779915\n", - "Surface training t=21113, loss=0.018201232887804508\n", - "Surface training t=21114, loss=0.016272657550871372\n", - "Surface training t=21115, loss=0.021182444877922535\n", - "Surface training t=21116, loss=0.02503401692956686\n", - "Surface training t=21117, loss=0.020811681635677814\n", - "Surface training t=21118, loss=0.02109692431986332\n", - "Surface training t=21119, loss=0.017929512076079845\n", - "Surface training t=21120, loss=0.022632308304309845\n", - "Surface training t=21121, loss=0.025131159462034702\n", - "Surface training t=21122, loss=0.02138747274875641\n", - "Surface training t=21123, loss=0.02791146282106638\n", - "Surface training t=21124, loss=0.023746307007968426\n", - "Surface training t=21125, loss=0.03462986461818218\n", - "Surface training t=21126, loss=0.024840330705046654\n", - "Surface training t=21127, loss=0.031377341598272324\n", - "Surface training t=21128, loss=0.031085627153515816\n", - "Surface training t=21129, loss=0.027398458682000637\n", - "Surface training t=21130, loss=0.028075644746422768\n", - "Surface training t=21131, loss=0.02868152502924204\n", - "Surface training t=21132, loss=0.021960768848657608\n", - "Surface training t=21133, loss=0.025331432931125164\n", - "Surface training t=21134, loss=0.026185990311205387\n", - "Surface training t=21135, loss=0.03234447352588177\n", - "Surface training t=21136, loss=0.026186853647232056\n", - "Surface training t=21137, loss=0.03203666117042303\n", - "Surface training t=21138, loss=0.03679118864238262\n", - "Surface training t=21139, loss=0.02461963426321745\n", - "Surface training t=21140, loss=0.028015590272843838\n", - "Surface training t=21141, loss=0.03166778851300478\n", - "Surface training t=21142, loss=0.02467846218496561\n", - "Surface training t=21143, loss=0.028315644711256027\n", - "Surface training t=21144, loss=0.034047143533825874\n", - "Surface training t=21145, loss=0.02429136447608471\n", - "Surface training t=21146, loss=0.020772553980350494\n", - "Surface training t=21147, loss=0.023548833094537258\n", - "Surface training t=21148, loss=0.026283694431185722\n", - "Surface training t=21149, loss=0.022877386771142483\n", - "Surface training t=21150, loss=0.023280435241758823\n", - "Surface training t=21151, loss=0.021722872741520405\n", - "Surface training t=21152, loss=0.020947732962667942\n", - "Surface training t=21153, loss=0.02219986915588379\n", - "Surface training t=21154, loss=0.020572765730321407\n", - "Surface training t=21155, loss=0.024941200390458107\n", - "Surface training t=21156, loss=0.024411235004663467\n", - "Surface training t=21157, loss=0.022958562709391117\n", - "Surface training t=21158, loss=0.027601192705333233\n", - "Surface training t=21159, loss=0.032631890848279\n", - "Surface training t=21160, loss=0.030382728204131126\n", - "Surface training t=21161, loss=0.029448647052049637\n", - "Surface training t=21162, loss=0.02036145981401205\n", - "Surface training t=21163, loss=0.01888793334364891\n", - "Surface training t=21164, loss=0.019394180737435818\n", - "Surface training t=21165, loss=0.02843465469777584\n", - "Surface training t=21166, loss=0.030280272476375103\n", - "Surface training t=21167, loss=0.025644640438258648\n", - "Surface training t=21168, loss=0.02726569212973118\n", - "Surface training t=21169, loss=0.024463624693453312\n", - "Surface training t=21170, loss=0.02189505845308304\n", - "Surface training t=21171, loss=0.02236600499600172\n", - "Surface training t=21172, loss=0.025405202992260456\n", - "Surface training t=21173, loss=0.018660059198737144\n", - "Surface training t=21174, loss=0.02751045674085617\n", - "Surface training t=21175, loss=0.021249043755233288\n", - "Surface training t=21176, loss=0.020956589840352535\n", - "Surface training t=21177, loss=0.02087627863511443\n", - "Surface training t=21178, loss=0.023592377081513405\n", - "Surface training t=21179, loss=0.02043743897229433\n", - "Surface training t=21180, loss=0.021476924419403076\n", - "Surface training t=21181, loss=0.0242149131372571\n", - "Surface training t=21182, loss=0.024322797544300556\n", - "Surface training t=21183, loss=0.023335623554885387\n", - "Surface training t=21184, loss=0.02491065114736557\n", - "Surface training t=21185, loss=0.02201333176344633\n", - "Surface training t=21186, loss=0.017759538255631924\n", - "Surface training t=21187, loss=0.019341816194355488\n", - "Surface training t=21188, loss=0.017840202897787094\n", - "Surface training t=21189, loss=0.019530734978616238\n", - "Surface training t=21190, loss=0.024378289468586445\n", - "Surface training t=21191, loss=0.05191126465797424\n", - "Surface training t=21192, loss=0.04127870965749025\n", - "Surface training t=21193, loss=0.035875190049409866\n", - "Surface training t=21194, loss=0.04126736521720886\n", - "Surface training t=21195, loss=0.03292288910597563\n", - "Surface training t=21196, loss=0.025812702253460884\n", - "Surface training t=21197, loss=0.030982056632637978\n", - "Surface training t=21198, loss=0.029350082390010357\n", - "Surface training t=21199, loss=0.03127561882138252\n", - "Surface training t=21200, loss=0.022800342179834843\n", - "Surface training t=21201, loss=0.02716340310871601\n", - "Surface training t=21202, loss=0.026337914168834686\n", - "Surface training t=21203, loss=0.025601886212825775\n", - "Surface training t=21204, loss=0.02815163880586624\n", - "Surface training t=21205, loss=0.028496908023953438\n", - "Surface training t=21206, loss=0.02878706343472004\n", - "Surface training t=21207, loss=0.03549780510365963\n", - "Surface training t=21208, loss=0.03620602376759052\n", - "Surface training t=21209, loss=0.026443741284310818\n", - "Surface training t=21210, loss=0.02344083972275257\n", - "Surface training t=21211, loss=0.021132142283022404\n", - "Surface training t=21212, loss=0.0215193759649992\n", - "Surface training t=21213, loss=0.021035542711615562\n", - "Surface training t=21214, loss=0.032533567398786545\n", - "Surface training t=21215, loss=0.029006773605942726\n", - "Surface training t=21216, loss=0.028557716868817806\n", - "Surface training t=21217, loss=0.020174052100628614\n", - "Surface training t=21218, loss=0.016668779775500298\n", - "Surface training t=21219, loss=0.015099091455340385\n", - "Surface training t=21220, loss=0.02096129208803177\n", - "Surface training t=21221, loss=0.021119918674230576\n", - "Surface training t=21222, loss=0.0265206815674901\n", - "Surface training t=21223, loss=0.022282710298895836\n", - "Surface training t=21224, loss=0.01927876938134432\n", - "Surface training t=21225, loss=0.022303403355181217\n", - "Surface training t=21226, loss=0.024375864304602146\n", - "Surface training t=21227, loss=0.026481923647224903\n", - "Surface training t=21228, loss=0.02360923122614622\n", - "Surface training t=21229, loss=0.018962460570037365\n", - "Surface training t=21230, loss=0.020936154760420322\n", - "Surface training t=21231, loss=0.028795539401471615\n", - "Surface training t=21232, loss=0.02413418795913458\n", - "Surface training t=21233, loss=0.022336901165544987\n", - "Surface training t=21234, loss=0.0249814810231328\n", - "Surface training t=21235, loss=0.018225345760583878\n", - "Surface training t=21236, loss=0.017566371709108353\n", - "Surface training t=21237, loss=0.014132903423160315\n", - "Surface training t=21238, loss=0.022814552299678326\n", - "Surface training t=21239, loss=0.023646327666938305\n", - "Surface training t=21240, loss=0.025318571366369724\n", - "Surface training t=21241, loss=0.025036984123289585\n", - "Surface training t=21242, loss=0.026366745121777058\n", - "Surface training t=21243, loss=0.026617166586220264\n", - "Surface training t=21244, loss=0.024222341366112232\n", - "Surface training t=21245, loss=0.027074899524450302\n", - "Surface training t=21246, loss=0.02559935813769698\n", - "Surface training t=21247, loss=0.025389382615685463\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=21248, loss=0.025202177464962006\n", - "Surface training t=21249, loss=0.041814396157860756\n", - "Surface training t=21250, loss=0.036826951429247856\n", - "Surface training t=21251, loss=0.030024860985577106\n", - "Surface training t=21252, loss=0.0309679564088583\n", - "Surface training t=21253, loss=0.033722284249961376\n", - "Surface training t=21254, loss=0.03606336936354637\n", - "Surface training t=21255, loss=0.02547960914671421\n", - "Surface training t=21256, loss=0.03505334071815014\n", - "Surface training t=21257, loss=0.028345118276774883\n", - "Surface training t=21258, loss=0.032305581495165825\n", - "Surface training t=21259, loss=0.031811388209462166\n", - "Surface training t=21260, loss=0.020210236310958862\n", - "Surface training t=21261, loss=0.03268925100564957\n", - "Surface training t=21262, loss=0.02865532413125038\n", - "Surface training t=21263, loss=0.031233718618750572\n", - "Surface training t=21264, loss=0.027567433193325996\n", - "Surface training t=21265, loss=0.02909621875733137\n", - "Surface training t=21266, loss=0.037277158349752426\n", - "Surface training t=21267, loss=0.03847863711416721\n", - "Surface training t=21268, loss=0.029391005635261536\n", - "Surface training t=21269, loss=0.03218562062829733\n", - "Surface training t=21270, loss=0.024621364660561085\n", - "Surface training t=21271, loss=0.031475807540118694\n", - "Surface training t=21272, loss=0.03392752446234226\n", - "Surface training t=21273, loss=0.029535888694226742\n", - "Surface training t=21274, loss=0.02277699112892151\n", - "Surface training t=21275, loss=0.022246235981583595\n", - "Surface training t=21276, loss=0.02849073614925146\n", - "Surface training t=21277, loss=0.024241674691438675\n", - "Surface training t=21278, loss=0.01986080314964056\n", - "Surface training t=21279, loss=0.020815186202526093\n", - "Surface training t=21280, loss=0.016189563553780317\n", - "Surface training t=21281, loss=0.014676582533866167\n", - "Surface training t=21282, loss=0.014719734899699688\n", - "Surface training t=21283, loss=0.020191731862723827\n", - "Surface training t=21284, loss=0.019423277117311954\n", - "Surface training t=21285, loss=0.017721005715429783\n", - "Surface training t=21286, loss=0.019087331369519234\n", - "Surface training t=21287, loss=0.01838372927159071\n", - "Surface training t=21288, loss=0.020701966248452663\n", - "Surface training t=21289, loss=0.02742917463183403\n", - "Surface training t=21290, loss=0.0231482470408082\n", - "Surface training t=21291, loss=0.021373441442847252\n", - "Surface training t=21292, loss=0.026933856308460236\n", - "Surface training t=21293, loss=0.020614569075405598\n", - "Surface training t=21294, loss=0.02398943342268467\n", - "Surface training t=21295, loss=0.03337348997592926\n", - "Surface training t=21296, loss=0.02482431475073099\n", - "Surface training t=21297, loss=0.02731137163937092\n", - "Surface training t=21298, loss=0.02627119980752468\n", - "Surface training t=21299, loss=0.02088580373674631\n", - "Surface training t=21300, loss=0.026229550130665302\n", - "Surface training t=21301, loss=0.022276089526712894\n", - "Surface training t=21302, loss=0.026806795969605446\n", - "Surface training t=21303, loss=0.02143879234790802\n", - "Surface training t=21304, loss=0.02034169342368841\n", - "Surface training t=21305, loss=0.02037130482494831\n", - "Surface training t=21306, loss=0.021290499716997147\n", - "Surface training t=21307, loss=0.02980750985443592\n", - "Surface training t=21308, loss=0.034371353685855865\n", - "Surface training t=21309, loss=0.03968701884150505\n", - "Surface training t=21310, loss=0.030240400694310665\n", - "Surface training t=21311, loss=0.039231352508068085\n", - "Surface training t=21312, loss=0.035490863025188446\n", - "Surface training t=21313, loss=0.020427390933036804\n", - "Surface training t=21314, loss=0.029488214291632175\n", - "Surface training t=21315, loss=0.022375376895070076\n", - "Surface training t=21316, loss=0.028984902426600456\n", - "Surface training t=21317, loss=0.024693419225513935\n", - "Surface training t=21318, loss=0.028373087756335735\n", - "Surface training t=21319, loss=0.027461374178528786\n", - "Surface training t=21320, loss=0.025923624634742737\n", - "Surface training t=21321, loss=0.03211577143520117\n", - "Surface training t=21322, loss=0.02312405500560999\n", - "Surface training t=21323, loss=0.023352299816906452\n", - "Surface training t=21324, loss=0.024105552583932877\n", - "Surface training t=21325, loss=0.029715324752032757\n", - "Surface training t=21326, loss=0.029014755971729755\n", - "Surface training t=21327, loss=0.02266595419496298\n", - "Surface training t=21328, loss=0.022788112983107567\n", - "Surface training t=21329, loss=0.02743288315832615\n", - "Surface training t=21330, loss=0.02839820086956024\n", - "Surface training t=21331, loss=0.01854623667895794\n", - "Surface training t=21332, loss=0.02546517737209797\n", - "Surface training t=21333, loss=0.01804745476692915\n", - "Surface training t=21334, loss=0.019197551533579826\n", - "Surface training t=21335, loss=0.02045085607096553\n", - "Surface training t=21336, loss=0.02649354748427868\n", - "Surface training t=21337, loss=0.024643990211188793\n", - "Surface training t=21338, loss=0.029895068146288395\n", - "Surface training t=21339, loss=0.02630645502358675\n", - "Surface training t=21340, loss=0.021169797517359257\n", - "Surface training t=21341, loss=0.022874029353260994\n", - "Surface training t=21342, loss=0.023925275541841984\n", - "Surface training t=21343, loss=0.027243499644100666\n", - "Surface training t=21344, loss=0.03233738150447607\n", - "Surface training t=21345, loss=0.02957226987928152\n", - "Surface training t=21346, loss=0.04964941926300526\n", - "Surface training t=21347, loss=0.03934285044670105\n", - "Surface training t=21348, loss=0.04024473391473293\n", - "Surface training t=21349, loss=0.03150595538318157\n", - "Surface training t=21350, loss=0.046736326068639755\n", - "Surface training t=21351, loss=0.030973633751273155\n", - "Surface training t=21352, loss=0.04091336950659752\n", - "Surface training t=21353, loss=0.04165882430970669\n", - "Surface training t=21354, loss=0.04456061124801636\n", - "Surface training t=21355, loss=0.03834149427711964\n", - "Surface training t=21356, loss=0.047417838126420975\n", - "Surface training t=21357, loss=0.03654848039150238\n", - "Surface training t=21358, loss=0.035212479531764984\n", - "Surface training t=21359, loss=0.0327763007953763\n", - "Surface training t=21360, loss=0.02562774531543255\n", - "Surface training t=21361, loss=0.060343584045767784\n", - "Surface training t=21362, loss=0.03692791797220707\n", - "Surface training t=21363, loss=0.038302091881632805\n", - "Surface training t=21364, loss=0.03902176208794117\n", - "Surface training t=21365, loss=0.039389763958752155\n", - "Surface training t=21366, loss=0.03062061034142971\n", - "Surface training t=21367, loss=0.033946624025702477\n", - "Surface training t=21368, loss=0.04478758946061134\n", - "Surface training t=21369, loss=0.03767531644552946\n", - "Surface training t=21370, loss=0.05417132005095482\n", - "Surface training t=21371, loss=0.043188994750380516\n", - "Surface training t=21372, loss=0.03958316333591938\n", - "Surface training t=21373, loss=0.0358536671847105\n", - "Surface training t=21374, loss=0.030373058281838894\n", - "Surface training t=21375, loss=0.03696069493889809\n", - "Surface training t=21376, loss=0.031447289511561394\n", - "Surface training t=21377, loss=0.03111441805958748\n", - "Surface training t=21378, loss=0.0342782698571682\n", - "Surface training t=21379, loss=0.02876055333763361\n", - "Surface training t=21380, loss=0.03692036308348179\n", - "Surface training t=21381, loss=0.03083059284836054\n", - "Surface training t=21382, loss=0.02847286406904459\n", - "Surface training t=21383, loss=0.03194059059023857\n", - "Surface training t=21384, loss=0.02794259786605835\n", - "Surface training t=21385, loss=0.02422668505460024\n", - "Surface training t=21386, loss=0.025546863675117493\n", - "Surface training t=21387, loss=0.01983871217817068\n", - "Surface training t=21388, loss=0.02599494345486164\n", - "Surface training t=21389, loss=0.021948304027318954\n", - "Surface training t=21390, loss=0.023352291900664568\n", - "Surface training t=21391, loss=0.022050444036722183\n", - "Surface training t=21392, loss=0.02808996755629778\n", - "Surface training t=21393, loss=0.03141951747238636\n", - "Surface training t=21394, loss=0.036280933767557144\n", - "Surface training t=21395, loss=0.028087889775633812\n", - "Surface training t=21396, loss=0.02977265603840351\n", - "Surface training t=21397, loss=0.03415117785334587\n", - "Surface training t=21398, loss=0.030699370428919792\n", - "Surface training t=21399, loss=0.02010000916197896\n", - "Surface training t=21400, loss=0.021285468712449074\n", - "Surface training t=21401, loss=0.024421253241598606\n", - "Surface training t=21402, loss=0.022488773800432682\n", - "Surface training t=21403, loss=0.021915840916335583\n", - "Surface training t=21404, loss=0.025458932854235172\n", - "Surface training t=21405, loss=0.020635697059333324\n", - "Surface training t=21406, loss=0.030579768121242523\n", - "Surface training t=21407, loss=0.03066375758498907\n", - "Surface training t=21408, loss=0.024252346716821194\n", - "Surface training t=21409, loss=0.038097208365797997\n", - "Surface training t=21410, loss=0.026724104769527912\n", - "Surface training t=21411, loss=0.026437530294060707\n", - "Surface training t=21412, loss=0.026101605966687202\n", - "Surface training t=21413, loss=0.022462490014731884\n", - "Surface training t=21414, loss=0.02331394050270319\n", - "Surface training t=21415, loss=0.02310659224167466\n", - "Surface training t=21416, loss=0.024119009263813496\n", - "Surface training t=21417, loss=0.02837938815355301\n", - "Surface training t=21418, loss=0.02510091755539179\n", - "Surface training t=21419, loss=0.027727175503969193\n", - "Surface training t=21420, loss=0.02556633297353983\n", - "Surface training t=21421, loss=0.02497836761176586\n", - "Surface training t=21422, loss=0.02449776604771614\n", - "Surface training t=21423, loss=0.026810786686837673\n", - "Surface training t=21424, loss=0.025559869594871998\n", - "Surface training t=21425, loss=0.027850709855556488\n", - "Surface training t=21426, loss=0.03061551135033369\n", - "Surface training t=21427, loss=0.036535607650876045\n", - "Surface training t=21428, loss=0.03763011749833822\n", - "Surface training t=21429, loss=0.034697397612035275\n", - "Surface training t=21430, loss=0.03495690878480673\n", - "Surface training t=21431, loss=0.041788375005126\n", - "Surface training t=21432, loss=0.04738427326083183\n", - "Surface training t=21433, loss=0.037603070959448814\n", - "Surface training t=21434, loss=0.04913277551531792\n", - "Surface training t=21435, loss=0.04211024194955826\n", - "Surface training t=21436, loss=0.03765042871236801\n", - "Surface training t=21437, loss=0.03926895745098591\n", - "Surface training t=21438, loss=0.04467210918664932\n", - "Surface training t=21439, loss=0.037966202944517136\n", - "Surface training t=21440, loss=0.028217323124408722\n", - "Surface training t=21441, loss=0.03619222156703472\n", - "Surface training t=21442, loss=0.029270361177623272\n", - "Surface training t=21443, loss=0.0322959553450346\n", - "Surface training t=21444, loss=0.026713619008660316\n", - "Surface training t=21445, loss=0.025817795656621456\n", - "Surface training t=21446, loss=0.027270939201116562\n", - "Surface training t=21447, loss=0.019918246194720268\n", - "Surface training t=21448, loss=0.032972484827041626\n", - "Surface training t=21449, loss=0.027255243621766567\n", - "Surface training t=21450, loss=0.030392988584935665\n", - "Surface training t=21451, loss=0.030727896839380264\n", - "Surface training t=21452, loss=0.031732989475131035\n", - "Surface training t=21453, loss=0.03198009263724089\n", - "Surface training t=21454, loss=0.05401547811925411\n", - "Surface training t=21455, loss=0.03761093970388174\n", - "Surface training t=21456, loss=0.03229042328894138\n", - "Surface training t=21457, loss=0.03255950286984444\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=21458, loss=0.022234815172851086\n", - "Surface training t=21459, loss=0.03244722541421652\n", - "Surface training t=21460, loss=0.026587704196572304\n", - "Surface training t=21461, loss=0.023888892494142056\n", - "Surface training t=21462, loss=0.03072529938071966\n", - "Surface training t=21463, loss=0.025668189860880375\n", - "Surface training t=21464, loss=0.029351123608648777\n", - "Surface training t=21465, loss=0.023952621966600418\n", - "Surface training t=21466, loss=0.036188364028930664\n", - "Surface training t=21467, loss=0.02754777856171131\n", - "Surface training t=21468, loss=0.038378747180104256\n", - "Surface training t=21469, loss=0.04065706953406334\n", - "Surface training t=21470, loss=0.031091120094060898\n", - "Surface training t=21471, loss=0.047223666682839394\n", - "Surface training t=21472, loss=0.037185841239988804\n", - "Surface training t=21473, loss=0.04616134986281395\n", - "Surface training t=21474, loss=0.029106718488037586\n", - "Surface training t=21475, loss=0.03789085987955332\n", - "Surface training t=21476, loss=0.02867411356419325\n", - "Surface training t=21477, loss=0.02800044696778059\n", - "Surface training t=21478, loss=0.027306340634822845\n", - "Surface training t=21479, loss=0.020256327465176582\n", - "Surface training t=21480, loss=0.019668453373014927\n", - "Surface training t=21481, loss=0.02071920409798622\n", - "Surface training t=21482, loss=0.023933159187436104\n", - "Surface training t=21483, loss=0.020482034888118505\n", - "Surface training t=21484, loss=0.02475624717772007\n", - "Surface training t=21485, loss=0.023641787469387054\n", - "Surface training t=21486, loss=0.02259428333491087\n", - "Surface training t=21487, loss=0.029444877989590168\n", - "Surface training t=21488, loss=0.02488008141517639\n", - "Surface training t=21489, loss=0.02568050380796194\n", - "Surface training t=21490, loss=0.027366015128791332\n", - "Surface training t=21491, loss=0.020395209081470966\n", - "Surface training t=21492, loss=0.02627928275614977\n", - "Surface training t=21493, loss=0.03623081836849451\n", - "Surface training t=21494, loss=0.024990873411297798\n", - "Surface training t=21495, loss=0.022429184056818485\n", - "Surface training t=21496, loss=0.025672731921076775\n", - "Surface training t=21497, loss=0.023341662250459194\n", - "Surface training t=21498, loss=0.021542293950915337\n", - "Surface training t=21499, loss=0.023128492757678032\n", - "Surface training t=21500, loss=0.018560227937996387\n", - "Surface training t=21501, loss=0.018843521364033222\n", - "Surface training t=21502, loss=0.019854003563523293\n", - "Surface training t=21503, loss=0.016406106762588024\n", - "Surface training t=21504, loss=0.020199786871671677\n", - "Surface training t=21505, loss=0.016765916720032692\n", - "Surface training t=21506, loss=0.023068583104759455\n", - "Surface training t=21507, loss=0.019205302000045776\n", - "Surface training t=21508, loss=0.021001002751290798\n", - "Surface training t=21509, loss=0.02196784596890211\n", - "Surface training t=21510, loss=0.031185586005449295\n", - "Surface training t=21511, loss=0.02267395006492734\n", - "Surface training t=21512, loss=0.03680977784097195\n", - "Surface training t=21513, loss=0.03093615360558033\n", - "Surface training t=21514, loss=0.03040378913283348\n", - "Surface training t=21515, loss=0.021319828927516937\n", - "Surface training t=21516, loss=0.016289942665025592\n", - "Surface training t=21517, loss=0.023810037411749363\n", - "Surface training t=21518, loss=0.01574592851102352\n", - "Surface training t=21519, loss=0.021295717917382717\n", - "Surface training t=21520, loss=0.015505009330809116\n", - "Surface training t=21521, loss=0.024483362212777138\n", - "Surface training t=21522, loss=0.020901932381093502\n", - "Surface training t=21523, loss=0.03202198352664709\n", - "Surface training t=21524, loss=0.041463326662778854\n", - "Surface training t=21525, loss=0.03484135679900646\n", - "Surface training t=21526, loss=0.028684397228062153\n", - "Surface training t=21527, loss=0.023414142429828644\n", - "Surface training t=21528, loss=0.030868190340697765\n", - "Surface training t=21529, loss=0.04533121548593044\n", - "Surface training t=21530, loss=0.02764403261244297\n", - "Surface training t=21531, loss=0.02697410061955452\n", - "Surface training t=21532, loss=0.02604234218597412\n", - "Surface training t=21533, loss=0.026712985709309578\n", - "Surface training t=21534, loss=0.03446668013930321\n", - "Surface training t=21535, loss=0.042076315730810165\n", - "Surface training t=21536, loss=0.03465436026453972\n", - "Surface training t=21537, loss=0.027936089783906937\n", - "Surface training t=21538, loss=0.03696741163730621\n", - "Surface training t=21539, loss=0.03600815124809742\n", - "Surface training t=21540, loss=0.038376384414732456\n", - "Surface training t=21541, loss=0.03445295337587595\n", - "Surface training t=21542, loss=0.038609977811574936\n", - "Surface training t=21543, loss=0.03921383246779442\n", - "Surface training t=21544, loss=0.05120376870036125\n", - "Surface training t=21545, loss=0.0355652179569006\n", - "Surface training t=21546, loss=0.032977937720716\n", - "Surface training t=21547, loss=0.04046604223549366\n", - "Surface training t=21548, loss=0.037579067051410675\n", - "Surface training t=21549, loss=0.03375392220914364\n", - "Surface training t=21550, loss=0.02887570485472679\n", - "Surface training t=21551, loss=0.03512807469815016\n", - "Surface training t=21552, loss=0.0288936635479331\n", - "Surface training t=21553, loss=0.02433035336434841\n", - "Surface training t=21554, loss=0.019860696978867054\n", - "Surface training t=21555, loss=0.01958797825500369\n", - "Surface training t=21556, loss=0.01703047752380371\n", - "Surface training t=21557, loss=0.019036862533539534\n", - "Surface training t=21558, loss=0.021088520996272564\n", - "Surface training t=21559, loss=0.018672706559300423\n", - "Surface training t=21560, loss=0.02017765585333109\n", - "Surface training t=21561, loss=0.01886660046875477\n", - "Surface training t=21562, loss=0.02223925106227398\n", - "Surface training t=21563, loss=0.020099577959626913\n", - "Surface training t=21564, loss=0.03133266605436802\n", - "Surface training t=21565, loss=0.025428620167076588\n", - "Surface training t=21566, loss=0.024342111311852932\n", - "Surface training t=21567, loss=0.031577604822814465\n", - "Surface training t=21568, loss=0.020503384992480278\n", - "Surface training t=21569, loss=0.024695314466953278\n", - "Surface training t=21570, loss=0.028326820582151413\n", - "Surface training t=21571, loss=0.02740997914224863\n", - "Surface training t=21572, loss=0.024696199223399162\n", - "Surface training t=21573, loss=0.04085026867687702\n", - "Surface training t=21574, loss=0.024314086884260178\n", - "Surface training t=21575, loss=0.024330277927219868\n", - "Surface training t=21576, loss=0.029706377536058426\n", - "Surface training t=21577, loss=0.025604162365198135\n", - "Surface training t=21578, loss=0.04220405034720898\n", - "Surface training t=21579, loss=0.038801681250333786\n", - "Surface training t=21580, loss=0.038459863513708115\n", - "Surface training t=21581, loss=0.04044543392956257\n", - "Surface training t=21582, loss=0.032697263173758984\n", - "Surface training t=21583, loss=0.038514770567417145\n", - "Surface training t=21584, loss=0.029834279790520668\n", - "Surface training t=21585, loss=0.023099849931895733\n", - "Surface training t=21586, loss=0.027649189345538616\n", - "Surface training t=21587, loss=0.025420275516808033\n", - "Surface training t=21588, loss=0.022776301950216293\n", - "Surface training t=21589, loss=0.015405082143843174\n", - "Surface training t=21590, loss=0.02522910013794899\n", - "Surface training t=21591, loss=0.017158829141408205\n", - "Surface training t=21592, loss=0.01992075890302658\n", - "Surface training t=21593, loss=0.014780095778405666\n", - "Surface training t=21594, loss=0.021537919528782368\n", - "Surface training t=21595, loss=0.0235019875690341\n", - "Surface training t=21596, loss=0.026183122768998146\n", - "Surface training t=21597, loss=0.02355912607163191\n", - "Surface training t=21598, loss=0.025284098461270332\n", - "Surface training t=21599, loss=0.024537439458072186\n", - "Surface training t=21600, loss=0.02407114114612341\n", - "Surface training t=21601, loss=0.037270525470376015\n", - "Surface training t=21602, loss=0.03176810033619404\n", - "Surface training t=21603, loss=0.027080493979156017\n", - "Surface training t=21604, loss=0.03142978064715862\n", - "Surface training t=21605, loss=0.02360380534082651\n", - "Surface training t=21606, loss=0.032290331088006496\n", - "Surface training t=21607, loss=0.026012607850134373\n", - "Surface training t=21608, loss=0.026945711113512516\n", - "Surface training t=21609, loss=0.024480437859892845\n", - "Surface training t=21610, loss=0.025033137761056423\n", - "Surface training t=21611, loss=0.02800885308533907\n", - "Surface training t=21612, loss=0.03438507951796055\n", - "Surface training t=21613, loss=0.027759747579693794\n", - "Surface training t=21614, loss=0.05070088803768158\n", - "Surface training t=21615, loss=0.03761475533246994\n", - "Surface training t=21616, loss=0.05721103772521019\n", - "Surface training t=21617, loss=0.04446774395182729\n", - "Surface training t=21618, loss=0.038719410076737404\n", - "Surface training t=21619, loss=0.04919988475739956\n", - "Surface training t=21620, loss=0.04013039916753769\n", - "Surface training t=21621, loss=0.030300578102469444\n", - "Surface training t=21622, loss=0.026980871334671974\n", - "Surface training t=21623, loss=0.02713505830615759\n", - "Surface training t=21624, loss=0.024330069310963154\n", - "Surface training t=21625, loss=0.02329519484192133\n", - "Surface training t=21626, loss=0.02209513308480382\n", - "Surface training t=21627, loss=0.031121304258704185\n", - "Surface training t=21628, loss=0.03430100344121456\n", - "Surface training t=21629, loss=0.03478276450186968\n", - "Surface training t=21630, loss=0.03354920353740454\n", - "Surface training t=21631, loss=0.03191813360899687\n", - "Surface training t=21632, loss=0.02811240777373314\n", - "Surface training t=21633, loss=0.03391058184206486\n", - "Surface training t=21634, loss=0.02535158023238182\n", - "Surface training t=21635, loss=0.021848345175385475\n", - "Surface training t=21636, loss=0.02055727643892169\n", - "Surface training t=21637, loss=0.027780921198427677\n", - "Surface training t=21638, loss=0.047736600041389465\n", - "Surface training t=21639, loss=0.041602025739848614\n", - "Surface training t=21640, loss=0.042208677157759666\n", - "Surface training t=21641, loss=0.0386333093047142\n", - "Surface training t=21642, loss=0.036039071157574654\n", - "Surface training t=21643, loss=0.022984202951192856\n", - "Surface training t=21644, loss=0.03129534795880318\n", - "Surface training t=21645, loss=0.03293480910360813\n", - "Surface training t=21646, loss=0.025973578914999962\n", - "Surface training t=21647, loss=0.038580892607569695\n", - "Surface training t=21648, loss=0.03189646266400814\n", - "Surface training t=21649, loss=0.04635729640722275\n", - "Surface training t=21650, loss=0.03528723493218422\n", - "Surface training t=21651, loss=0.04332461394369602\n", - "Surface training t=21652, loss=0.026380016468465328\n", - "Surface training t=21653, loss=0.031201466917991638\n", - "Surface training t=21654, loss=0.03488171100616455\n", - "Surface training t=21655, loss=0.024996746331453323\n", - "Surface training t=21656, loss=0.02733377180993557\n", - "Surface training t=21657, loss=0.03556813485920429\n", - "Surface training t=21658, loss=0.03359830752015114\n", - "Surface training t=21659, loss=0.026219391264021397\n", - "Surface training t=21660, loss=0.027353743091225624\n", - "Surface training t=21661, loss=0.030481360852718353\n", - "Surface training t=21662, loss=0.02042154874652624\n", - "Surface training t=21663, loss=0.016047317069023848\n", - "Surface training t=21664, loss=0.021299325861036777\n", - "Surface training t=21665, loss=0.027691065333783627\n", - "Surface training t=21666, loss=0.021850167773663998\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=21667, loss=0.023913699202239513\n", - "Surface training t=21668, loss=0.02731403987854719\n", - "Surface training t=21669, loss=0.023276268504559994\n", - "Surface training t=21670, loss=0.01911099348217249\n", - "Surface training t=21671, loss=0.020967775024473667\n", - "Surface training t=21672, loss=0.02675826009362936\n", - "Surface training t=21673, loss=0.023419198114424944\n", - "Surface training t=21674, loss=0.032192484475672245\n", - "Surface training t=21675, loss=0.027596752159297466\n", - "Surface training t=21676, loss=0.03448530659079552\n", - "Surface training t=21677, loss=0.02662041410803795\n", - "Surface training t=21678, loss=0.03235559072345495\n", - "Surface training t=21679, loss=0.03336536232382059\n", - "Surface training t=21680, loss=0.030717574059963226\n", - "Surface training t=21681, loss=0.04870051145553589\n", - "Surface training t=21682, loss=0.03854638431221247\n", - "Surface training t=21683, loss=0.04036450386047363\n", - "Surface training t=21684, loss=0.033301373943686485\n", - "Surface training t=21685, loss=0.05383830703794956\n", - "Surface training t=21686, loss=0.03512281831353903\n", - "Surface training t=21687, loss=0.03562774695456028\n", - "Surface training t=21688, loss=0.04986953362822533\n", - "Surface training t=21689, loss=0.03847232833504677\n", - "Surface training t=21690, loss=0.035033978056162596\n", - "Surface training t=21691, loss=0.050399694591760635\n", - "Surface training t=21692, loss=0.03333266917616129\n", - "Surface training t=21693, loss=0.030343121849000454\n", - "Surface training t=21694, loss=0.026752651669085026\n", - "Surface training t=21695, loss=0.028189698234200478\n", - "Surface training t=21696, loss=0.030089296400547028\n", - "Surface training t=21697, loss=0.023768139071762562\n", - "Surface training t=21698, loss=0.026160607114434242\n", - "Surface training t=21699, loss=0.025224427692592144\n", - "Surface training t=21700, loss=0.020536973141133785\n", - "Surface training t=21701, loss=0.022042522206902504\n", - "Surface training t=21702, loss=0.015731782652437687\n", - "Surface training t=21703, loss=0.017768600955605507\n", - "Surface training t=21704, loss=0.02904553711414337\n", - "Surface training t=21705, loss=0.02819317113608122\n", - "Surface training t=21706, loss=0.032567258924245834\n", - "Surface training t=21707, loss=0.03188618365675211\n", - "Surface training t=21708, loss=0.02585391141474247\n", - "Surface training t=21709, loss=0.029332755133509636\n", - "Surface training t=21710, loss=0.03096399176865816\n", - "Surface training t=21711, loss=0.03194124437868595\n", - "Surface training t=21712, loss=0.04954160377383232\n", - "Surface training t=21713, loss=0.03540880233049393\n", - "Surface training t=21714, loss=0.05250548757612705\n", - "Surface training t=21715, loss=0.04028295911848545\n", - "Surface training t=21716, loss=0.03674272261559963\n", - "Surface training t=21717, loss=0.03690615575760603\n", - "Surface training t=21718, loss=0.049660421907901764\n", - "Surface training t=21719, loss=0.043383764103055\n", - "Surface training t=21720, loss=0.03810702171176672\n", - "Surface training t=21721, loss=0.047721141949296\n", - "Surface training t=21722, loss=0.03359268233180046\n", - "Surface training t=21723, loss=0.03388562612235546\n", - "Surface training t=21724, loss=0.04845987819135189\n", - "Surface training t=21725, loss=0.0308984462171793\n", - "Surface training t=21726, loss=0.03086042031645775\n", - "Surface training t=21727, loss=0.03228084649890661\n", - "Surface training t=21728, loss=0.03359062597155571\n", - "Surface training t=21729, loss=0.036198921501636505\n", - "Surface training t=21730, loss=0.024671271443367004\n", - "Surface training t=21731, loss=0.02685539424419403\n", - "Surface training t=21732, loss=0.035146502777934074\n", - "Surface training t=21733, loss=0.022773086093366146\n", - "Surface training t=21734, loss=0.020463897846639156\n", - "Surface training t=21735, loss=0.02215358428657055\n", - "Surface training t=21736, loss=0.025305128656327724\n", - "Surface training t=21737, loss=0.015757578425109386\n", - "Surface training t=21738, loss=0.029367215931415558\n", - "Surface training t=21739, loss=0.04615177772939205\n", - "Surface training t=21740, loss=0.03322731051594019\n", - "Surface training t=21741, loss=0.05297279916703701\n", - "Surface training t=21742, loss=0.03657032921910286\n", - "Surface training t=21743, loss=0.02282409742474556\n", - "Surface training t=21744, loss=0.02546819392591715\n", - "Surface training t=21745, loss=0.02820486668497324\n", - "Surface training t=21746, loss=0.03399219550192356\n", - "Surface training t=21747, loss=0.024672201368957758\n", - "Surface training t=21748, loss=0.029171042144298553\n", - "Surface training t=21749, loss=0.028662708587944508\n", - "Surface training t=21750, loss=0.03260771371424198\n", - "Surface training t=21751, loss=0.0316489152610302\n", - "Surface training t=21752, loss=0.040560683235526085\n", - "Surface training t=21753, loss=0.030201501213014126\n", - "Surface training t=21754, loss=0.030984295532107353\n", - "Surface training t=21755, loss=0.029468581080436707\n", - "Surface training t=21756, loss=0.026891231536865234\n", - "Surface training t=21757, loss=0.02578129991889\n", - "Surface training t=21758, loss=0.024559013545513153\n", - "Surface training t=21759, loss=0.02301288302987814\n", - "Surface training t=21760, loss=0.021833025850355625\n", - "Surface training t=21761, loss=0.021022078581154346\n", - "Surface training t=21762, loss=0.02345573715865612\n", - "Surface training t=21763, loss=0.023843485862016678\n", - "Surface training t=21764, loss=0.022100436501204967\n", - "Surface training t=21765, loss=0.02678530104458332\n", - "Surface training t=21766, loss=0.03724412992596626\n", - "Surface training t=21767, loss=0.02343988325446844\n", - "Surface training t=21768, loss=0.01964804157614708\n", - "Surface training t=21769, loss=0.03453070670366287\n", - "Surface training t=21770, loss=0.03161912504583597\n", - "Surface training t=21771, loss=0.028008099645376205\n", - "Surface training t=21772, loss=0.04631374403834343\n", - "Surface training t=21773, loss=0.034547604620456696\n", - "Surface training t=21774, loss=0.03416453115642071\n", - "Surface training t=21775, loss=0.043374694883823395\n", - "Surface training t=21776, loss=0.027023455128073692\n", - "Surface training t=21777, loss=0.030982178635895252\n", - "Surface training t=21778, loss=0.03200922906398773\n", - "Surface training t=21779, loss=0.03266952931880951\n", - "Surface training t=21780, loss=0.057505372911691666\n", - "Surface training t=21781, loss=0.03972264379262924\n", - "Surface training t=21782, loss=0.03964346460998058\n", - "Surface training t=21783, loss=0.031468553468585014\n", - "Surface training t=21784, loss=0.0169390756636858\n", - "Surface training t=21785, loss=0.021722939796745777\n", - "Surface training t=21786, loss=0.014982030726969242\n", - "Surface training t=21787, loss=0.019528023898601532\n", - "Surface training t=21788, loss=0.01724552223458886\n", - "Surface training t=21789, loss=0.02001897059381008\n", - "Surface training t=21790, loss=0.021845081821084023\n", - "Surface training t=21791, loss=0.02554892562329769\n", - "Surface training t=21792, loss=0.0223913062363863\n", - "Surface training t=21793, loss=0.029867137782275677\n", - "Surface training t=21794, loss=0.026113787665963173\n", - "Surface training t=21795, loss=0.037322597578167915\n", - "Surface training t=21796, loss=0.03559928014874458\n", - "Surface training t=21797, loss=0.029262810945510864\n", - "Surface training t=21798, loss=0.028211964294314384\n", - "Surface training t=21799, loss=0.02759556006640196\n", - "Surface training t=21800, loss=0.026779781095683575\n", - "Surface training t=21801, loss=0.03126519639045\n", - "Surface training t=21802, loss=0.024610360153019428\n", - "Surface training t=21803, loss=0.025325112976133823\n", - "Surface training t=21804, loss=0.043606771156191826\n", - "Surface training t=21805, loss=0.03109322302043438\n", - "Surface training t=21806, loss=0.03836149163544178\n", - "Surface training t=21807, loss=0.02714721579104662\n", - "Surface training t=21808, loss=0.032503400929272175\n", - "Surface training t=21809, loss=0.030068015679717064\n", - "Surface training t=21810, loss=0.024281645193696022\n", - "Surface training t=21811, loss=0.03398643061518669\n", - "Surface training t=21812, loss=0.04623265005648136\n", - "Surface training t=21813, loss=0.031037429347634315\n", - "Surface training t=21814, loss=0.041015300899744034\n", - "Surface training t=21815, loss=0.05816659517586231\n", - "Surface training t=21816, loss=0.034718869253993034\n", - "Surface training t=21817, loss=0.03409002721309662\n", - "Surface training t=21818, loss=0.03792899288237095\n", - "Surface training t=21819, loss=0.02607994433492422\n", - "Surface training t=21820, loss=0.02336259838193655\n", - "Surface training t=21821, loss=0.029125122353434563\n", - "Surface training t=21822, loss=0.023746035993099213\n", - "Surface training t=21823, loss=0.02733614295721054\n", - "Surface training t=21824, loss=0.034204806201159954\n", - "Surface training t=21825, loss=0.024952873587608337\n", - "Surface training t=21826, loss=0.02833533100783825\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=21827, loss=0.01958723831921816\n", - "Surface training t=21828, loss=0.02973203267902136\n", - "Surface training t=21829, loss=0.027159756049513817\n", - "Surface training t=21830, loss=0.027486205101013184\n", - "Surface training t=21831, loss=0.03011213429272175\n", - "Surface training t=21832, loss=0.029724691063165665\n", - "Surface training t=21833, loss=0.03835131786763668\n", - "Surface training t=21834, loss=0.03415237832814455\n", - "Surface training t=21835, loss=0.03468907531350851\n", - "Surface training t=21836, loss=0.04445369727909565\n", - "Surface training t=21837, loss=0.05301660671830177\n", - "Surface training t=21838, loss=0.03979112859815359\n", - "Surface training t=21839, loss=0.04738844931125641\n", - "Surface training t=21840, loss=0.06511749885976315\n", - "Surface training t=21841, loss=0.04151454195380211\n", - "Surface training t=21842, loss=0.06140783429145813\n", - "Surface training t=21843, loss=0.03864374291151762\n", - "Surface training t=21844, loss=0.04352196119725704\n", - "Surface training t=21845, loss=0.046903908252716064\n", - "Surface training t=21846, loss=0.03790607023984194\n", - "Surface training t=21847, loss=0.03277277201414108\n", - "Surface training t=21848, loss=0.02688418235629797\n", - "Surface training t=21849, loss=0.03116686176508665\n", - "Surface training t=21850, loss=0.029471924528479576\n", - "Surface training t=21851, loss=0.01943836361169815\n", - "Surface training t=21852, loss=0.028660383075475693\n", - "Surface training t=21853, loss=0.019006221555173397\n", - "Surface training t=21854, loss=0.021529200486838818\n", - "Surface training t=21855, loss=0.02150859124958515\n", - "Surface training t=21856, loss=0.02090397570282221\n", - "Surface training t=21857, loss=0.022288069128990173\n", - "Surface training t=21858, loss=0.026789240539073944\n", - "Surface training t=21859, loss=0.02445532474666834\n", - "Surface training t=21860, loss=0.01522510638460517\n", - "Surface training t=21861, loss=0.021235172636806965\n", - "Surface training t=21862, loss=0.02327130176126957\n", - "Surface training t=21863, loss=0.015618134289979935\n", - "Surface training t=21864, loss=0.02030748315155506\n", - "Surface training t=21865, loss=0.020341251976788044\n", - "Surface training t=21866, loss=0.016174377873539925\n", - "Surface training t=21867, loss=0.016333014238625765\n", - "Surface training t=21868, loss=0.014934925362467766\n", - "Surface training t=21869, loss=0.021160189993679523\n", - "Surface training t=21870, loss=0.021559210494160652\n", - "Surface training t=21871, loss=0.022078149020671844\n", - "Surface training t=21872, loss=0.020376751199364662\n", - "Surface training t=21873, loss=0.027612227015197277\n", - "Surface training t=21874, loss=0.019479037262499332\n", - "Surface training t=21875, loss=0.020140690729022026\n", - "Surface training t=21876, loss=0.019203534349799156\n", - "Surface training t=21877, loss=0.026932156644761562\n", - "Surface training t=21878, loss=0.024646727368235588\n", - "Surface training t=21879, loss=0.030411496758461\n", - "Surface training t=21880, loss=0.029523639008402824\n", - "Surface training t=21881, loss=0.029986579902470112\n", - "Surface training t=21882, loss=0.034255195409059525\n", - "Surface training t=21883, loss=0.02383920829743147\n", - "Surface training t=21884, loss=0.033062005415558815\n", - "Surface training t=21885, loss=0.025177057832479477\n", - "Surface training t=21886, loss=0.023219166323542595\n", - "Surface training t=21887, loss=0.018564917147159576\n", - "Surface training t=21888, loss=0.023983248509466648\n", - "Surface training t=21889, loss=0.025863737799227238\n", - "Surface training t=21890, loss=0.02299104444682598\n", - "Surface training t=21891, loss=0.02303921338170767\n", - "Surface training t=21892, loss=0.023495899513363838\n", - "Surface training t=21893, loss=0.02153288759291172\n", - "Surface training t=21894, loss=0.026256700046360493\n", - "Surface training t=21895, loss=0.024012946523725986\n", - "Surface training t=21896, loss=0.039604317396879196\n", - "Surface training t=21897, loss=0.031906234100461006\n", - "Surface training t=21898, loss=0.022643295116722584\n", - "Surface training t=21899, loss=0.025224853307008743\n", - "Surface training t=21900, loss=0.025370162911713123\n", - "Surface training t=21901, loss=0.029147121123969555\n", - "Surface training t=21902, loss=0.023508411832153797\n", - "Surface training t=21903, loss=0.029756025411188602\n", - "Surface training t=21904, loss=0.023164893500506878\n", - "Surface training t=21905, loss=0.035267503932118416\n", - "Surface training t=21906, loss=0.03093410935252905\n", - "Surface training t=21907, loss=0.02812265232205391\n", - "Surface training t=21908, loss=0.02938767522573471\n", - "Surface training t=21909, loss=0.030295075848698616\n", - "Surface training t=21910, loss=0.03194954991340637\n", - "Surface training t=21911, loss=0.03936807066202164\n", - "Surface training t=21912, loss=0.032171727158129215\n", - "Surface training t=21913, loss=0.031315071508288383\n", - "Surface training t=21914, loss=0.029621806927025318\n", - "Surface training t=21915, loss=0.036180973052978516\n", - "Surface training t=21916, loss=0.027438117191195488\n", - "Surface training t=21917, loss=0.020259263925254345\n", - "Surface training t=21918, loss=0.01884859846904874\n", - "Surface training t=21919, loss=0.025490853935480118\n", - "Surface training t=21920, loss=0.027456965297460556\n", - "Surface training t=21921, loss=0.020473815500736237\n", - "Surface training t=21922, loss=0.02246087323874235\n", - "Surface training t=21923, loss=0.021299540996551514\n", - "Surface training t=21924, loss=0.02117115817964077\n", - "Surface training t=21925, loss=0.02872623596340418\n", - "Surface training t=21926, loss=0.026447154581546783\n", - "Surface training t=21927, loss=0.034858908504247665\n", - "Surface training t=21928, loss=0.0313609978184104\n", - "Surface training t=21929, loss=0.02895522303879261\n", - "Surface training t=21930, loss=0.022700142115354538\n", - "Surface training t=21931, loss=0.02645322121679783\n", - "Surface training t=21932, loss=0.019453072920441628\n", - "Surface training t=21933, loss=0.01886102184653282\n", - "Surface training t=21934, loss=0.021324929781258106\n", - "Surface training t=21935, loss=0.019323689863085747\n", - "Surface training t=21936, loss=0.017066495027393103\n", - "Surface training t=21937, loss=0.016700667329132557\n", - "Surface training t=21938, loss=0.0274136527441442\n", - "Surface training t=21939, loss=0.028755689971148968\n", - "Surface training t=21940, loss=0.0273045776411891\n", - "Surface training t=21941, loss=0.03230142407119274\n", - "Surface training t=21942, loss=0.03308899328112602\n", - "Surface training t=21943, loss=0.038244593888521194\n", - "Surface training t=21944, loss=0.03167398925870657\n", - "Surface training t=21945, loss=0.04082885570824146\n", - "Surface training t=21946, loss=0.027063782326877117\n", - "Surface training t=21947, loss=0.029519672505557537\n", - "Surface training t=21948, loss=0.031431566923856735\n", - "Surface training t=21949, loss=0.0255977064371109\n", - "Surface training t=21950, loss=0.023784220218658447\n", - "Surface training t=21951, loss=0.032254984602332115\n", - "Surface training t=21952, loss=0.025649666786193848\n", - "Surface training t=21953, loss=0.020804069936275482\n", - "Surface training t=21954, loss=0.0181803060695529\n", - "Surface training t=21955, loss=0.016671735793352127\n", - "Surface training t=21956, loss=0.018540259450674057\n", - "Surface training t=21957, loss=0.022368046455085278\n", - "Surface training t=21958, loss=0.018260996788740158\n", - "Surface training t=21959, loss=0.02001813519746065\n", - "Surface training t=21960, loss=0.016458925791084766\n", - "Surface training t=21961, loss=0.017367747612297535\n", - "Surface training t=21962, loss=0.016163108870387077\n", - "Surface training t=21963, loss=0.016094432212412357\n", - "Surface training t=21964, loss=0.014816096983850002\n", - "Surface training t=21965, loss=0.017527151852846146\n", - "Surface training t=21966, loss=0.015276785008609295\n", - "Surface training t=21967, loss=0.019249681383371353\n", - "Surface training t=21968, loss=0.01911934744566679\n", - "Surface training t=21969, loss=0.023074024356901646\n", - "Surface training t=21970, loss=0.02295816410332918\n", - "Surface training t=21971, loss=0.020876367576420307\n", - "Surface training t=21972, loss=0.02143208682537079\n", - "Surface training t=21973, loss=0.023689360357820988\n", - "Surface training t=21974, loss=0.03368273191154003\n", - "Surface training t=21975, loss=0.020981808193027973\n", - "Surface training t=21976, loss=0.02303288411349058\n", - "Surface training t=21977, loss=0.019984391517937183\n", - "Surface training t=21978, loss=0.021265593357384205\n", - "Surface training t=21979, loss=0.018386327661573887\n", - "Surface training t=21980, loss=0.01452052965760231\n", - "Surface training t=21981, loss=0.015825101174414158\n", - "Surface training t=21982, loss=0.02242917288094759\n", - "Surface training t=21983, loss=0.017047908157110214\n", - "Surface training t=21984, loss=0.017735950648784637\n", - "Surface training t=21985, loss=0.017212113365530968\n", - "Surface training t=21986, loss=0.02082775440067053\n", - "Surface training t=21987, loss=0.020311848260462284\n", - "Surface training t=21988, loss=0.01711027417331934\n", - "Surface training t=21989, loss=0.02376052923500538\n", - "Surface training t=21990, loss=0.019434668123722076\n", - "Surface training t=21991, loss=0.022000327706336975\n", - "Surface training t=21992, loss=0.0267439940944314\n", - "Surface training t=21993, loss=0.027873612940311432\n", - "Surface training t=21994, loss=0.031369538977742195\n", - "Surface training t=21995, loss=0.025330240838229656\n", - "Surface training t=21996, loss=0.02822575718164444\n", - "Surface training t=21997, loss=0.026100759394466877\n", - "Surface training t=21998, loss=0.017117645125836134\n", - "Surface training t=21999, loss=0.020074275322258472\n", - "Surface training t=22000, loss=0.01805905904620886\n", - "Surface training t=22001, loss=0.019593817181885242\n", - "Surface training t=22002, loss=0.018254661932587624\n", - "Surface training t=22003, loss=0.018052690662443638\n", - "Surface training t=22004, loss=0.02008549775928259\n", - "Surface training t=22005, loss=0.014559752773493528\n", - "Surface training t=22006, loss=0.014936991035938263\n", - "Surface training t=22007, loss=0.01438913308084011\n", - "Surface training t=22008, loss=0.019683662801980972\n", - "Surface training t=22009, loss=0.02031740080565214\n", - "Surface training t=22010, loss=0.014579713344573975\n", - "Surface training t=22011, loss=0.021446830593049526\n", - "Surface training t=22012, loss=0.020975119434297085\n", - "Surface training t=22013, loss=0.018532930873334408\n", - "Surface training t=22014, loss=0.024740011431276798\n", - "Surface training t=22015, loss=0.024621211923658848\n", - "Surface training t=22016, loss=0.024679090827703476\n", - "Surface training t=22017, loss=0.03429005853831768\n", - "Surface training t=22018, loss=0.03533833287656307\n", - "Surface training t=22019, loss=0.03424096293747425\n", - "Surface training t=22020, loss=0.03313743881881237\n", - "Surface training t=22021, loss=0.03730759769678116\n", - "Surface training t=22022, loss=0.042337816208601\n", - "Surface training t=22023, loss=0.03532858472317457\n", - "Surface training t=22024, loss=0.03877643123269081\n", - "Surface training t=22025, loss=0.06144435331225395\n", - "Surface training t=22026, loss=0.03770169056952\n", - "Surface training t=22027, loss=0.035323429852724075\n", - "Surface training t=22028, loss=0.02723597176373005\n", - "Surface training t=22029, loss=0.029076875187456608\n", - "Surface training t=22030, loss=0.021669792011380196\n", - "Surface training t=22031, loss=0.026148436591029167\n", - "Surface training t=22032, loss=0.02405739575624466\n", - "Surface training t=22033, loss=0.02349831350147724\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=22034, loss=0.020944871939718723\n", - "Surface training t=22035, loss=0.020173529628664255\n", - "Surface training t=22036, loss=0.022378754802048206\n", - "Surface training t=22037, loss=0.02766764536499977\n", - "Surface training t=22038, loss=0.019730858504772186\n", - "Surface training t=22039, loss=0.02195363398641348\n", - "Surface training t=22040, loss=0.025115694850683212\n", - "Surface training t=22041, loss=0.024355227127671242\n", - "Surface training t=22042, loss=0.03167299926280975\n", - "Surface training t=22043, loss=0.02493910025805235\n", - "Surface training t=22044, loss=0.0299476170912385\n", - "Surface training t=22045, loss=0.028612300753593445\n", - "Surface training t=22046, loss=0.033866655081510544\n", - "Surface training t=22047, loss=0.030710672959685326\n", - "Surface training t=22048, loss=0.028366507031023502\n", - "Surface training t=22049, loss=0.02780803106725216\n", - "Surface training t=22050, loss=0.024282537400722504\n", - "Surface training t=22051, loss=0.021304904483258724\n", - "Surface training t=22052, loss=0.019043284468352795\n", - "Surface training t=22053, loss=0.019506114535033703\n", - "Surface training t=22054, loss=0.019829455763101578\n", - "Surface training t=22055, loss=0.015371652320027351\n", - "Surface training t=22056, loss=0.019289330579340458\n", - "Surface training t=22057, loss=0.016033754218369722\n", - "Surface training t=22058, loss=0.026238571852445602\n", - "Surface training t=22059, loss=0.027731155045330524\n", - "Surface training t=22060, loss=0.023201516829431057\n", - "Surface training t=22061, loss=0.023094844073057175\n", - "Surface training t=22062, loss=0.025434906594455242\n", - "Surface training t=22063, loss=0.019774585030972958\n", - "Surface training t=22064, loss=0.01674804976209998\n", - "Surface training t=22065, loss=0.020491620525717735\n", - "Surface training t=22066, loss=0.017678342293947935\n", - "Surface training t=22067, loss=0.02238804567605257\n", - "Surface training t=22068, loss=0.01932976022362709\n", - "Surface training t=22069, loss=0.02300236001610756\n", - "Surface training t=22070, loss=0.027213560417294502\n", - "Surface training t=22071, loss=0.024753552861511707\n", - "Surface training t=22072, loss=0.026827488094568253\n", - "Surface training t=22073, loss=0.04142565652728081\n", - "Surface training t=22074, loss=0.030854661017656326\n", - "Surface training t=22075, loss=0.02638243045657873\n", - "Surface training t=22076, loss=0.030371437780559063\n", - "Surface training t=22077, loss=0.03329404070973396\n", - "Surface training t=22078, loss=0.02230676729232073\n", - "Surface training t=22079, loss=0.030799967236816883\n", - "Surface training t=22080, loss=0.03708258457481861\n", - "Surface training t=22081, loss=0.03143364563584328\n", - "Surface training t=22082, loss=0.027375422418117523\n", - "Surface training t=22083, loss=0.02305808011442423\n", - "Surface training t=22084, loss=0.029941482469439507\n", - "Surface training t=22085, loss=0.03313327208161354\n", - "Surface training t=22086, loss=0.02943903859704733\n", - "Surface training t=22087, loss=0.045471396297216415\n", - "Surface training t=22088, loss=0.03672424703836441\n", - "Surface training t=22089, loss=0.046415312215685844\n", - "Surface training t=22090, loss=0.033335404470562935\n", - "Surface training t=22091, loss=0.04219293221831322\n", - "Surface training t=22092, loss=0.0341014014557004\n", - "Surface training t=22093, loss=0.04039068799465895\n", - "Surface training t=22094, loss=0.03230410907417536\n", - "Surface training t=22095, loss=0.02546554058790207\n", - "Surface training t=22096, loss=0.02310125157237053\n", - "Surface training t=22097, loss=0.030323386192321777\n", - "Surface training t=22098, loss=0.024986449629068375\n", - "Surface training t=22099, loss=0.025408298708498478\n", - "Surface training t=22100, loss=0.02156020514667034\n", - "Surface training t=22101, loss=0.03696836903691292\n", - "Surface training t=22102, loss=0.02639601193368435\n", - "Surface training t=22103, loss=0.018873692024499178\n", - "Surface training t=22104, loss=0.03162059001624584\n", - "Surface training t=22105, loss=0.02914297580718994\n", - "Surface training t=22106, loss=0.01994136441498995\n", - "Surface training t=22107, loss=0.01805378682911396\n", - "Surface training t=22108, loss=0.01619199151173234\n", - "Surface training t=22109, loss=0.017208095639944077\n", - "Surface training t=22110, loss=0.018331391736865044\n", - "Surface training t=22111, loss=0.015499481931328773\n", - "Surface training t=22112, loss=0.017643450759351254\n", - "Surface training t=22113, loss=0.015401795040816069\n", - "Surface training t=22114, loss=0.01768196327611804\n", - "Surface training t=22115, loss=0.021138238720595837\n", - "Surface training t=22116, loss=0.020853672176599503\n", - "Surface training t=22117, loss=0.011932802386581898\n", - "Surface training t=22118, loss=0.020850143395364285\n", - "Surface training t=22119, loss=0.021626679692417383\n", - "Surface training t=22120, loss=0.02498659584671259\n", - "Surface training t=22121, loss=0.0265636146068573\n", - "Surface training t=22122, loss=0.03751029446721077\n", - "Surface training t=22123, loss=0.02427726238965988\n", - "Surface training t=22124, loss=0.03359448350965977\n", - "Surface training t=22125, loss=0.02741170395165682\n", - "Surface training t=22126, loss=0.022335371002554893\n", - "Surface training t=22127, loss=0.021823765709996223\n", - "Surface training t=22128, loss=0.023171219043433666\n", - "Surface training t=22129, loss=0.02258879877626896\n", - "Surface training t=22130, loss=0.025764700956642628\n", - "Surface training t=22131, loss=0.026804691180586815\n", - "Surface training t=22132, loss=0.0338806351646781\n", - "Surface training t=22133, loss=0.02304884511977434\n", - "Surface training t=22134, loss=0.025408009998500347\n", - "Surface training t=22135, loss=0.022382422350347042\n", - "Surface training t=22136, loss=0.030883818864822388\n", - "Surface training t=22137, loss=0.02730818372219801\n", - "Surface training t=22138, loss=0.027209005318582058\n", - "Surface training t=22139, loss=0.021846835501492023\n", - "Surface training t=22140, loss=0.02110040094703436\n", - "Surface training t=22141, loss=0.019222023896872997\n", - "Surface training t=22142, loss=0.018853568471968174\n", - "Surface training t=22143, loss=0.019844548776745796\n", - "Surface training t=22144, loss=0.021893958561122417\n", - "Surface training t=22145, loss=0.02038543112576008\n", - "Surface training t=22146, loss=0.033813174813985825\n", - "Surface training t=22147, loss=0.019955262541770935\n", - "Surface training t=22148, loss=0.023740987293422222\n", - "Surface training t=22149, loss=0.02049935609102249\n", - "Surface training t=22150, loss=0.023009211756289005\n", - "Surface training t=22151, loss=0.02101675048470497\n", - "Surface training t=22152, loss=0.023094735108315945\n", - "Surface training t=22153, loss=0.02009246777743101\n", - "Surface training t=22154, loss=0.02343863807618618\n", - "Surface training t=22155, loss=0.01684616692364216\n", - "Surface training t=22156, loss=0.01996735204011202\n", - "Surface training t=22157, loss=0.022059504874050617\n", - "Surface training t=22158, loss=0.023024968802928925\n", - "Surface training t=22159, loss=0.02387849520891905\n", - "Surface training t=22160, loss=0.024930210784077644\n", - "Surface training t=22161, loss=0.022398442961275578\n", - "Surface training t=22162, loss=0.017278220504522324\n", - "Surface training t=22163, loss=0.018124084919691086\n", - "Surface training t=22164, loss=0.02283111959695816\n", - "Surface training t=22165, loss=0.022378121502697468\n", - "Surface training t=22166, loss=0.022245616652071476\n", - "Surface training t=22167, loss=0.02303978055715561\n", - "Surface training t=22168, loss=0.02211393415927887\n", - "Surface training t=22169, loss=0.02765951119363308\n", - "Surface training t=22170, loss=0.03140908665955067\n", - "Surface training t=22171, loss=0.0339322155341506\n", - "Surface training t=22172, loss=0.03370491601526737\n", - "Surface training t=22173, loss=0.02417199220508337\n", - "Surface training t=22174, loss=0.024796297773718834\n", - "Surface training t=22175, loss=0.026678369380533695\n", - "Surface training t=22176, loss=0.02003865409642458\n", - "Surface training t=22177, loss=0.027308964170515537\n", - "Surface training t=22178, loss=0.020093907602131367\n", - "Surface training t=22179, loss=0.021379142999649048\n", - "Surface training t=22180, loss=0.017577914521098137\n", - "Surface training t=22181, loss=0.023236659355461597\n", - "Surface training t=22182, loss=0.0325038880109787\n", - "Surface training t=22183, loss=0.022088536992669106\n", - "Surface training t=22184, loss=0.021251355297863483\n", - "Surface training t=22185, loss=0.029987200163304806\n", - "Surface training t=22186, loss=0.020947290584445\n", - "Surface training t=22187, loss=0.02269593719393015\n", - "Surface training t=22188, loss=0.01627424033358693\n", - "Surface training t=22189, loss=0.023996470496058464\n", - "Surface training t=22190, loss=0.019044709391891956\n", - "Surface training t=22191, loss=0.022026083432137966\n", - "Surface training t=22192, loss=0.019190829247236252\n", - "Surface training t=22193, loss=0.022114960476756096\n", - "Surface training t=22194, loss=0.021957959048449993\n", - "Surface training t=22195, loss=0.02168547362089157\n", - "Surface training t=22196, loss=0.025663860142230988\n", - "Surface training t=22197, loss=0.019417785108089447\n", - "Surface training t=22198, loss=0.022152460180222988\n", - "Surface training t=22199, loss=0.024253268726170063\n", - "Surface training t=22200, loss=0.028946511447429657\n", - "Surface training t=22201, loss=0.023215548135340214\n", - "Surface training t=22202, loss=0.020150686614215374\n", - "Surface training t=22203, loss=0.023154964670538902\n", - "Surface training t=22204, loss=0.02159926388412714\n", - "Surface training t=22205, loss=0.024614552967250347\n", - "Surface training t=22206, loss=0.021034798584878445\n", - "Surface training t=22207, loss=0.0204908000305295\n", - "Surface training t=22208, loss=0.019336744211614132\n", - "Surface training t=22209, loss=0.024744918569922447\n", - "Surface training t=22210, loss=0.023544996045529842\n", - "Surface training t=22211, loss=0.021539966575801373\n", - "Surface training t=22212, loss=0.02730951551347971\n", - "Surface training t=22213, loss=0.036867326125502586\n", - "Surface training t=22214, loss=0.042712049558758736\n", - "Surface training t=22215, loss=0.03268701396882534\n", - "Surface training t=22216, loss=0.02591981738805771\n", - "Surface training t=22217, loss=0.031215570867061615\n", - "Surface training t=22218, loss=0.03587471880018711\n", - "Surface training t=22219, loss=0.026174183934926987\n", - "Surface training t=22220, loss=0.03374961577355862\n", - "Surface training t=22221, loss=0.02847965620458126\n", - "Surface training t=22222, loss=0.02589882630854845\n", - "Surface training t=22223, loss=0.04100225865840912\n", - "Surface training t=22224, loss=0.036050545051693916\n", - "Surface training t=22225, loss=0.041593991219997406\n", - "Surface training t=22226, loss=0.03040639776736498\n", - "Surface training t=22227, loss=0.02084320317953825\n", - "Surface training t=22228, loss=0.025520446710288525\n", - "Surface training t=22229, loss=0.018978755921125412\n", - "Surface training t=22230, loss=0.022469403222203255\n", - "Surface training t=22231, loss=0.019528496079146862\n", - "Surface training t=22232, loss=0.021427473053336143\n", - "Surface training t=22233, loss=0.02073366940021515\n", - "Surface training t=22234, loss=0.02945620473474264\n", - "Surface training t=22235, loss=0.02858730312436819\n", - "Surface training t=22236, loss=0.026251166127622128\n", - "Surface training t=22237, loss=0.03134562447667122\n", - "Surface training t=22238, loss=0.03375545423477888\n", - "Surface training t=22239, loss=0.0306667173281312\n", - "Surface training t=22240, loss=0.030639919452369213\n", - "Surface training t=22241, loss=0.03693337365984917\n", - "Surface training t=22242, loss=0.022749319672584534\n", - "Surface training t=22243, loss=0.020102476701140404\n", - "Surface training t=22244, loss=0.0241486057639122\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=22245, loss=0.026278779841959476\n", - "Surface training t=22246, loss=0.021260621026158333\n", - "Surface training t=22247, loss=0.01869295071810484\n", - "Surface training t=22248, loss=0.02079432550817728\n", - "Surface training t=22249, loss=0.032512047328054905\n", - "Surface training t=22250, loss=0.0278725391253829\n", - "Surface training t=22251, loss=0.03020638506859541\n", - "Surface training t=22252, loss=0.045377178117632866\n", - "Surface training t=22253, loss=0.02690794412046671\n", - "Surface training t=22254, loss=0.03273206762969494\n", - "Surface training t=22255, loss=0.028425457887351513\n", - "Surface training t=22256, loss=0.02570098452270031\n", - "Surface training t=22257, loss=0.03610428795218468\n", - "Surface training t=22258, loss=0.036138296127319336\n", - "Surface training t=22259, loss=0.03892001882195473\n", - "Surface training t=22260, loss=0.03497541509568691\n", - "Surface training t=22261, loss=0.034638142213225365\n", - "Surface training t=22262, loss=0.025197656825184822\n", - "Surface training t=22263, loss=0.030630631372332573\n", - "Surface training t=22264, loss=0.02601648773998022\n", - "Surface training t=22265, loss=0.02288190834224224\n", - "Surface training t=22266, loss=0.021338921040296555\n", - "Surface training t=22267, loss=0.01881693582981825\n", - "Surface training t=22268, loss=0.023011820390820503\n", - "Surface training t=22269, loss=0.015250120777636766\n", - "Surface training t=22270, loss=0.016835561953485012\n", - "Surface training t=22271, loss=0.01897760108113289\n", - "Surface training t=22272, loss=0.017164526507258415\n", - "Surface training t=22273, loss=0.02074800617992878\n", - "Surface training t=22274, loss=0.020999412052333355\n", - "Surface training t=22275, loss=0.023724243976175785\n", - "Surface training t=22276, loss=0.018390589393675327\n", - "Surface training t=22277, loss=0.019276902079582214\n", - "Surface training t=22278, loss=0.017328675836324692\n", - "Surface training t=22279, loss=0.018512113019824028\n", - "Surface training t=22280, loss=0.019214823842048645\n", - "Surface training t=22281, loss=0.020094089210033417\n", - "Surface training t=22282, loss=0.018871144391596317\n", - "Surface training t=22283, loss=0.019732595421373844\n", - "Surface training t=22284, loss=0.018809632398188114\n", - "Surface training t=22285, loss=0.031354802660644054\n", - "Surface training t=22286, loss=0.03316064924001694\n", - "Surface training t=22287, loss=0.02742354478687048\n", - "Surface training t=22288, loss=0.02808411791920662\n", - "Surface training t=22289, loss=0.02685677446424961\n", - "Surface training t=22290, loss=0.021733082830905914\n", - "Surface training t=22291, loss=0.018567136954516172\n", - "Surface training t=22292, loss=0.016592197120189667\n", - "Surface training t=22293, loss=0.016131756827235222\n", - "Surface training t=22294, loss=0.01800608914345503\n", - "Surface training t=22295, loss=0.024792302399873734\n", - "Surface training t=22296, loss=0.028383287601172924\n", - "Surface training t=22297, loss=0.04605773836374283\n", - "Surface training t=22298, loss=0.03658100590109825\n", - "Surface training t=22299, loss=0.034529659897089005\n", - "Surface training t=22300, loss=0.0255509028211236\n", - "Surface training t=22301, loss=0.022126412019133568\n", - "Surface training t=22302, loss=0.02400073315948248\n", - "Surface training t=22303, loss=0.023563257418572903\n", - "Surface training t=22304, loss=0.023676727898418903\n", - "Surface training t=22305, loss=0.028400693088769913\n", - "Surface training t=22306, loss=0.0326172411441803\n", - "Surface training t=22307, loss=0.034476340748369694\n", - "Surface training t=22308, loss=0.03180182818323374\n", - "Surface training t=22309, loss=0.03561925143003464\n", - "Surface training t=22310, loss=0.03407362103462219\n", - "Surface training t=22311, loss=0.03409909829497337\n", - "Surface training t=22312, loss=0.028757828287780285\n", - "Surface training t=22313, loss=0.041042692959308624\n", - "Surface training t=22314, loss=0.034267423674464226\n", - "Surface training t=22315, loss=0.031907690688967705\n", - "Surface training t=22316, loss=0.0298880310729146\n", - "Surface training t=22317, loss=0.038090264424681664\n", - "Surface training t=22318, loss=0.028200636617839336\n", - "Surface training t=22319, loss=0.032078216783702374\n", - "Surface training t=22320, loss=0.030085205100476742\n", - "Surface training t=22321, loss=0.025295785628259182\n", - "Surface training t=22322, loss=0.02848039288073778\n", - "Surface training t=22323, loss=0.027363763190805912\n", - "Surface training t=22324, loss=0.022056668996810913\n", - "Surface training t=22325, loss=0.021558872424066067\n", - "Surface training t=22326, loss=0.028770262375473976\n", - "Surface training t=22327, loss=0.02459173183888197\n", - "Surface training t=22328, loss=0.03159311693161726\n", - "Surface training t=22329, loss=0.03147165942937136\n", - "Surface training t=22330, loss=0.026995855383574963\n", - "Surface training t=22331, loss=0.028410110622644424\n", - "Surface training t=22332, loss=0.03256215061992407\n", - "Surface training t=22333, loss=0.023856007494032383\n", - "Surface training t=22334, loss=0.024925402365624905\n", - "Surface training t=22335, loss=0.035433707758784294\n", - "Surface training t=22336, loss=0.03337001521140337\n", - "Surface training t=22337, loss=0.03233751840889454\n", - "Surface training t=22338, loss=0.0284184068441391\n", - "Surface training t=22339, loss=0.026835890486836433\n", - "Surface training t=22340, loss=0.04230893403291702\n", - "Surface training t=22341, loss=0.03264816477894783\n", - "Surface training t=22342, loss=0.03680145274847746\n", - "Surface training t=22343, loss=0.028649368323385715\n", - "Surface training t=22344, loss=0.0313374875113368\n", - "Surface training t=22345, loss=0.04392137564718723\n", - "Surface training t=22346, loss=0.028400946408510208\n", - "Surface training t=22347, loss=0.026291591115295887\n", - "Surface training t=22348, loss=0.023524442687630653\n", - "Surface training t=22349, loss=0.023875592276453972\n", - "Surface training t=22350, loss=0.03479215409606695\n", - "Surface training t=22351, loss=0.03074601199477911\n", - "Surface training t=22352, loss=0.03047350514680147\n", - "Surface training t=22353, loss=0.031909787096083164\n", - "Surface training t=22354, loss=0.026208672672510147\n", - "Surface training t=22355, loss=0.024388394318521023\n", - "Surface training t=22356, loss=0.019878643564879894\n", - "Surface training t=22357, loss=0.01938471570611\n", - "Surface training t=22358, loss=0.01684977300465107\n", - "Surface training t=22359, loss=0.02374393492937088\n", - "Surface training t=22360, loss=0.026234904304146767\n", - "Surface training t=22361, loss=0.027574289590120316\n", - "Surface training t=22362, loss=0.025199316442012787\n", - "Surface training t=22363, loss=0.02131143305450678\n", - "Surface training t=22364, loss=0.020318150520324707\n", - "Surface training t=22365, loss=0.02744324877858162\n", - "Surface training t=22366, loss=0.017327317036688328\n", - "Surface training t=22367, loss=0.02404334582388401\n", - "Surface training t=22368, loss=0.027294624596834183\n", - "Surface training t=22369, loss=0.03810802847146988\n", - "Surface training t=22370, loss=0.037460289895534515\n", - "Surface training t=22371, loss=0.029695109464228153\n", - "Surface training t=22372, loss=0.028750259429216385\n", - "Surface training t=22373, loss=0.026685421355068684\n", - "Surface training t=22374, loss=0.02233980130404234\n", - "Surface training t=22375, loss=0.025850525125861168\n", - "Surface training t=22376, loss=0.02579017635434866\n", - "Surface training t=22377, loss=0.026741462759673595\n", - "Surface training t=22378, loss=0.02880065143108368\n", - "Surface training t=22379, loss=0.039933785796165466\n", - "Surface training t=22380, loss=0.030540059320628643\n", - "Surface training t=22381, loss=0.03586895391345024\n", - "Surface training t=22382, loss=0.03244900889694691\n", - "Surface training t=22383, loss=0.03353030048310757\n", - "Surface training t=22384, loss=0.03710535168647766\n", - "Surface training t=22385, loss=0.030046092346310616\n", - "Surface training t=22386, loss=0.037510182708501816\n", - "Surface training t=22387, loss=0.02085623424500227\n", - "Surface training t=22388, loss=0.022931253537535667\n", - "Surface training t=22389, loss=0.030009374022483826\n", - "Surface training t=22390, loss=0.020036606118083\n", - "Surface training t=22391, loss=0.024912971071898937\n", - "Surface training t=22392, loss=0.02422327548265457\n", - "Surface training t=22393, loss=0.01770987268537283\n", - "Surface training t=22394, loss=0.023765080608427525\n", - "Surface training t=22395, loss=0.019720707088708878\n", - "Surface training t=22396, loss=0.02430999930948019\n", - "Surface training t=22397, loss=0.02241578698158264\n", - "Surface training t=22398, loss=0.02152479998767376\n", - "Surface training t=22399, loss=0.021147051826119423\n", - "Surface training t=22400, loss=0.028180494904518127\n", - "Surface training t=22401, loss=0.0242965966463089\n", - "Surface training t=22402, loss=0.024056500755250454\n", - "Surface training t=22403, loss=0.02547011710703373\n", - "Surface training t=22404, loss=0.022685031406581402\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=22405, loss=0.02963394671678543\n", - "Surface training t=22406, loss=0.02367174904793501\n", - "Surface training t=22407, loss=0.02717871218919754\n", - "Surface training t=22408, loss=0.025893285870552063\n", - "Surface training t=22409, loss=0.024837389588356018\n", - "Surface training t=22410, loss=0.025295970030128956\n", - "Surface training t=22411, loss=0.026368318125605583\n", - "Surface training t=22412, loss=0.029950976371765137\n", - "Surface training t=22413, loss=0.02586204931139946\n", - "Surface training t=22414, loss=0.03473497927188873\n", - "Surface training t=22415, loss=0.04260912165045738\n", - "Surface training t=22416, loss=0.03634548094123602\n", - "Surface training t=22417, loss=0.03435340337455273\n", - "Surface training t=22418, loss=0.03650118876248598\n", - "Surface training t=22419, loss=0.04053688794374466\n", - "Surface training t=22420, loss=0.03888477757573128\n", - "Surface training t=22421, loss=0.03104351833462715\n", - "Surface training t=22422, loss=0.02507508173584938\n", - "Surface training t=22423, loss=0.026498226448893547\n", - "Surface training t=22424, loss=0.026632492430508137\n", - "Surface training t=22425, loss=0.028180853463709354\n", - "Surface training t=22426, loss=0.03136356174945831\n", - "Surface training t=22427, loss=0.024496243335306644\n", - "Surface training t=22428, loss=0.02478130953386426\n", - "Surface training t=22429, loss=0.029169931076467037\n", - "Surface training t=22430, loss=0.02551737893372774\n", - "Surface training t=22431, loss=0.028697348199784756\n", - "Surface training t=22432, loss=0.027673236094415188\n", - "Surface training t=22433, loss=0.01686945930123329\n", - "Surface training t=22434, loss=0.01822714600712061\n", - "Surface training t=22435, loss=0.022807314060628414\n", - "Surface training t=22436, loss=0.03850308433175087\n", - "Surface training t=22437, loss=0.027933740057051182\n", - "Surface training t=22438, loss=0.03286487236618996\n", - "Surface training t=22439, loss=0.026392746716737747\n", - "Surface training t=22440, loss=0.029054854065179825\n", - "Surface training t=22441, loss=0.02636373322457075\n", - "Surface training t=22442, loss=0.02748740464448929\n", - "Surface training t=22443, loss=0.02688795141875744\n", - "Surface training t=22444, loss=0.02156748529523611\n", - "Surface training t=22445, loss=0.02390687633305788\n", - "Surface training t=22446, loss=0.0242245988920331\n", - "Surface training t=22447, loss=0.029829245060682297\n", - "Surface training t=22448, loss=0.030339475721120834\n", - "Surface training t=22449, loss=0.021229715086519718\n", - "Surface training t=22450, loss=0.027606758289039135\n", - "Surface training t=22451, loss=0.02143212128430605\n", - "Surface training t=22452, loss=0.0372482780367136\n", - "Surface training t=22453, loss=0.02156259771436453\n", - "Surface training t=22454, loss=0.024728644639253616\n", - "Surface training t=22455, loss=0.021527145989239216\n", - "Surface training t=22456, loss=0.02042229939252138\n", - "Surface training t=22457, loss=0.02426253817975521\n", - "Surface training t=22458, loss=0.023559301160275936\n", - "Surface training t=22459, loss=0.019799519795924425\n", - "Surface training t=22460, loss=0.021428795531392097\n", - "Surface training t=22461, loss=0.014930030331015587\n", - "Surface training t=22462, loss=0.01578678237274289\n", - "Surface training t=22463, loss=0.019185824319720268\n", - "Surface training t=22464, loss=0.023310906253755093\n", - "Surface training t=22465, loss=0.025945018976926804\n", - "Surface training t=22466, loss=0.020961718633770943\n", - "Surface training t=22467, loss=0.02259136736392975\n", - "Surface training t=22468, loss=0.022563009522855282\n", - "Surface training t=22469, loss=0.01680830167606473\n", - "Surface training t=22470, loss=0.0195852043107152\n", - "Surface training t=22471, loss=0.023390863090753555\n", - "Surface training t=22472, loss=0.023253142833709717\n", - "Surface training t=22473, loss=0.02328452654182911\n", - "Surface training t=22474, loss=0.023072444833815098\n", - "Surface training t=22475, loss=0.01710372418165207\n", - "Surface training t=22476, loss=0.020488837733864784\n", - "Surface training t=22477, loss=0.025842148810625076\n", - "Surface training t=22478, loss=0.016186955850571394\n", - "Surface training t=22479, loss=0.02019700314849615\n", - "Surface training t=22480, loss=0.017098655458539724\n", - "Surface training t=22481, loss=0.0193854421377182\n", - "Surface training t=22482, loss=0.023200816474854946\n", - "Surface training t=22483, loss=0.022410244680941105\n", - "Surface training t=22484, loss=0.014926822856068611\n", - "Surface training t=22485, loss=0.021687575615942478\n", - "Surface training t=22486, loss=0.02695622108876705\n", - "Surface training t=22487, loss=0.030409245751798153\n", - "Surface training t=22488, loss=0.02975583355873823\n", - "Surface training t=22489, loss=0.026109528727829456\n", - "Surface training t=22490, loss=0.02079428918659687\n", - "Surface training t=22491, loss=0.021335287019610405\n", - "Surface training t=22492, loss=0.030683537013828754\n", - "Surface training t=22493, loss=0.035587869584560394\n", - "Surface training t=22494, loss=0.05536830425262451\n", - "Surface training t=22495, loss=0.04124355874955654\n", - "Surface training t=22496, loss=0.03499554097652435\n", - "Surface training t=22497, loss=0.029681839048862457\n", - "Surface training t=22498, loss=0.030182842165231705\n", - "Surface training t=22499, loss=0.02875596471130848\n", - "Surface training t=22500, loss=0.027032921090722084\n", - "Surface training t=22501, loss=0.03098737634718418\n", - "Surface training t=22502, loss=0.027059320360422134\n", - "Surface training t=22503, loss=0.031351691111922264\n", - "Surface training t=22504, loss=0.03782500699162483\n", - "Surface training t=22505, loss=0.026743371970951557\n", - "Surface training t=22506, loss=0.030323498882353306\n", - "Surface training t=22507, loss=0.0309979896992445\n", - "Surface training t=22508, loss=0.028120623901486397\n", - "Surface training t=22509, loss=0.04921649768948555\n", - "Surface training t=22510, loss=0.03743505850434303\n", - "Surface training t=22511, loss=0.03649340011179447\n", - "Surface training t=22512, loss=0.03608825337141752\n", - "Surface training t=22513, loss=0.039776913821697235\n", - "Surface training t=22514, loss=0.027316215448081493\n", - "Surface training t=22515, loss=0.02070888876914978\n", - "Surface training t=22516, loss=0.016083240043371916\n", - "Surface training t=22517, loss=0.02186404913663864\n", - "Surface training t=22518, loss=0.015328405890613794\n", - "Surface training t=22519, loss=0.011747962795197964\n", - "Surface training t=22520, loss=0.022454104386270046\n", - "Surface training t=22521, loss=0.018579737283289433\n", - "Surface training t=22522, loss=0.02212690655142069\n", - "Surface training t=22523, loss=0.021591821685433388\n", - "Surface training t=22524, loss=0.03334908187389374\n", - "Surface training t=22525, loss=0.028987967874854803\n", - "Surface training t=22526, loss=0.03260847460478544\n", - "Surface training t=22527, loss=0.05367981269955635\n", - "Surface training t=22528, loss=0.038245758041739464\n", - "Surface training t=22529, loss=0.0366282369941473\n", - "Surface training t=22530, loss=0.031210916116833687\n", - "Surface training t=22531, loss=0.05280222184956074\n", - "Surface training t=22532, loss=0.03219612315297127\n", - "Surface training t=22533, loss=0.032162873074412346\n", - "Surface training t=22534, loss=0.025090080685913563\n", - "Surface training t=22535, loss=0.028018981218338013\n", - "Surface training t=22536, loss=0.031800538301467896\n", - "Surface training t=22537, loss=0.020368196070194244\n", - "Surface training t=22538, loss=0.020695547573268414\n", - "Surface training t=22539, loss=0.020646777004003525\n", - "Surface training t=22540, loss=0.01940792566165328\n", - "Surface training t=22541, loss=0.019631068222224712\n", - "Surface training t=22542, loss=0.01556614926084876\n", - "Surface training t=22543, loss=0.017262442037463188\n", - "Surface training t=22544, loss=0.019510905258357525\n", - "Surface training t=22545, loss=0.018805847503244877\n", - "Surface training t=22546, loss=0.018046190030872822\n", - "Surface training t=22547, loss=0.02282961830496788\n", - "Surface training t=22548, loss=0.03449215553700924\n", - "Surface training t=22549, loss=0.03525449335575104\n", - "Surface training t=22550, loss=0.026605715043842793\n", - "Surface training t=22551, loss=0.03178435005247593\n", - "Surface training t=22552, loss=0.030425832606852055\n", - "Surface training t=22553, loss=0.032142700627446175\n", - "Surface training t=22554, loss=0.04742267727851868\n", - "Surface training t=22555, loss=0.030612755566835403\n", - "Surface training t=22556, loss=0.029110086150467396\n", - "Surface training t=22557, loss=0.03386676497757435\n", - "Surface training t=22558, loss=0.05132400617003441\n", - "Surface training t=22559, loss=0.03663117624819279\n", - "Surface training t=22560, loss=0.029171663336455822\n", - "Surface training t=22561, loss=0.025369973853230476\n", - "Surface training t=22562, loss=0.024290475994348526\n", - "Surface training t=22563, loss=0.03368986025452614\n", - "Surface training t=22564, loss=0.023788686841726303\n", - "Surface training t=22565, loss=0.025069057941436768\n", - "Surface training t=22566, loss=0.030302799306809902\n", - "Surface training t=22567, loss=0.026182997971773148\n", - "Surface training t=22568, loss=0.02197780553251505\n", - "Surface training t=22569, loss=0.027866963297128677\n", - "Surface training t=22570, loss=0.03427165001630783\n", - "Surface training t=22571, loss=0.027193758636713028\n", - "Surface training t=22572, loss=0.021670174784958363\n", - "Surface training t=22573, loss=0.03249570354819298\n", - "Surface training t=22574, loss=0.02565052919089794\n", - "Surface training t=22575, loss=0.023767651990056038\n", - "Surface training t=22576, loss=0.026515514589846134\n", - "Surface training t=22577, loss=0.02232396323233843\n", - "Surface training t=22578, loss=0.02621052134782076\n", - "Surface training t=22579, loss=0.025471731089055538\n", - "Surface training t=22580, loss=0.04123345576226711\n", - "Surface training t=22581, loss=0.027751334942877293\n", - "Surface training t=22582, loss=0.0273220157250762\n", - "Surface training t=22583, loss=0.03349333722144365\n", - "Surface training t=22584, loss=0.03731625899672508\n", - "Surface training t=22585, loss=0.04222276248037815\n", - "Surface training t=22586, loss=0.03148658014833927\n", - "Surface training t=22587, loss=0.03067880868911743\n", - "Surface training t=22588, loss=0.046522125601768494\n", - "Surface training t=22589, loss=0.042879618704319\n", - "Surface training t=22590, loss=0.04045167565345764\n", - "Surface training t=22591, loss=0.033219615928828716\n", - "Surface training t=22592, loss=0.04223962686955929\n", - "Surface training t=22593, loss=0.03544230107218027\n", - "Surface training t=22594, loss=0.04119467921555042\n", - "Surface training t=22595, loss=0.05192556045949459\n", - "Surface training t=22596, loss=0.02986164763569832\n", - "Surface training t=22597, loss=0.03187308367341757\n", - "Surface training t=22598, loss=0.03662859182804823\n", - "Surface training t=22599, loss=0.04307898320257664\n", - "Surface training t=22600, loss=0.032034192234277725\n", - "Surface training t=22601, loss=0.024943213909864426\n", - "Surface training t=22602, loss=0.02540867030620575\n", - "Surface training t=22603, loss=0.02805788442492485\n", - "Surface training t=22604, loss=0.02885811123996973\n", - "Surface training t=22605, loss=0.030113838613033295\n", - "Surface training t=22606, loss=0.03230107203125954\n", - "Surface training t=22607, loss=0.022217484191060066\n", - "Surface training t=22608, loss=0.01808304525911808\n", - "Surface training t=22609, loss=0.017073259688913822\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=22610, loss=0.02268418576568365\n", - "Surface training t=22611, loss=0.027506045997142792\n", - "Surface training t=22612, loss=0.029001709073781967\n", - "Surface training t=22613, loss=0.02414606511592865\n", - "Surface training t=22614, loss=0.028123469091951847\n", - "Surface training t=22615, loss=0.030154350213706493\n", - "Surface training t=22616, loss=0.032515715807676315\n", - "Surface training t=22617, loss=0.034979457035660744\n", - "Surface training t=22618, loss=0.03145505581051111\n", - "Surface training t=22619, loss=0.030213323421776295\n", - "Surface training t=22620, loss=0.03691496979445219\n", - "Surface training t=22621, loss=0.02421264350414276\n", - "Surface training t=22622, loss=0.03783976845443249\n", - "Surface training t=22623, loss=0.02596462331712246\n", - "Surface training t=22624, loss=0.02231818437576294\n", - "Surface training t=22625, loss=0.03128228150308132\n", - "Surface training t=22626, loss=0.02767948340624571\n", - "Surface training t=22627, loss=0.03309761732816696\n", - "Surface training t=22628, loss=0.030662326142191887\n", - "Surface training t=22629, loss=0.027777470648288727\n", - "Surface training t=22630, loss=0.0323594119399786\n", - "Surface training t=22631, loss=0.025098771788179874\n", - "Surface training t=22632, loss=0.024432888254523277\n", - "Surface training t=22633, loss=0.030737271532416344\n", - "Surface training t=22634, loss=0.025444273836910725\n", - "Surface training t=22635, loss=0.0270999763160944\n", - "Surface training t=22636, loss=0.027676408179104328\n", - "Surface training t=22637, loss=0.02188208047300577\n", - "Surface training t=22638, loss=0.028870214708149433\n", - "Surface training t=22639, loss=0.03440001606941223\n", - "Surface training t=22640, loss=0.027792003005743027\n", - "Surface training t=22641, loss=0.02957082074135542\n", - "Surface training t=22642, loss=0.023207458667457104\n", - "Surface training t=22643, loss=0.026225022040307522\n", - "Surface training t=22644, loss=0.021947388537228107\n", - "Surface training t=22645, loss=0.020650923252105713\n", - "Surface training t=22646, loss=0.02872251160442829\n", - "Surface training t=22647, loss=0.024686299264431\n", - "Surface training t=22648, loss=0.025622498244047165\n", - "Surface training t=22649, loss=0.026023010723292828\n", - "Surface training t=22650, loss=0.019618410617113113\n", - "Surface training t=22651, loss=0.028501160442829132\n", - "Surface training t=22652, loss=0.021101408638060093\n", - "Surface training t=22653, loss=0.028494305908679962\n", - "Surface training t=22654, loss=0.022516383789479733\n", - "Surface training t=22655, loss=0.024720024317502975\n", - "Surface training t=22656, loss=0.021530586294829845\n", - "Surface training t=22657, loss=0.0331551693379879\n", - "Surface training t=22658, loss=0.026059724390506744\n", - "Surface training t=22659, loss=0.02595922164618969\n", - "Surface training t=22660, loss=0.032407136633992195\n", - "Surface training t=22661, loss=0.028573316521942616\n", - "Surface training t=22662, loss=0.02866237796843052\n", - "Surface training t=22663, loss=0.02410672791302204\n", - "Surface training t=22664, loss=0.03769958205521107\n", - "Surface training t=22665, loss=0.031050704419612885\n", - "Surface training t=22666, loss=0.028029541485011578\n", - "Surface training t=22667, loss=0.03351046796888113\n", - "Surface training t=22668, loss=0.04350098967552185\n", - "Surface training t=22669, loss=0.029516124166548252\n", - "Surface training t=22670, loss=0.032224384136497974\n", - "Surface training t=22671, loss=0.04553176090121269\n", - "Surface training t=22672, loss=0.038797320798039436\n", - "Surface training t=22673, loss=0.04086120147258043\n", - "Surface training t=22674, loss=0.04789944738149643\n", - "Surface training t=22675, loss=0.05023886449635029\n", - "Surface training t=22676, loss=0.03627696633338928\n", - "Surface training t=22677, loss=0.030760856345295906\n", - "Surface training t=22678, loss=0.03629159368574619\n", - "Surface training t=22679, loss=0.026313778944313526\n", - "Surface training t=22680, loss=0.03476736880838871\n", - "Surface training t=22681, loss=0.03231590986251831\n", - "Surface training t=22682, loss=0.033345622941851616\n", - "Surface training t=22683, loss=0.0527243297547102\n", - "Surface training t=22684, loss=0.04067177698016167\n", - "Surface training t=22685, loss=0.03981238044798374\n", - "Surface training t=22686, loss=0.043043846264481544\n", - "Surface training t=22687, loss=0.03723311610519886\n", - "Surface training t=22688, loss=0.02984411921352148\n", - "Surface training t=22689, loss=0.03945373743772507\n", - "Surface training t=22690, loss=0.032090007327497005\n", - "Surface training t=22691, loss=0.05196047201752663\n", - "Surface training t=22692, loss=0.04074695706367493\n", - "Surface training t=22693, loss=0.04730236530303955\n", - "Surface training t=22694, loss=0.04233673959970474\n", - "Surface training t=22695, loss=0.038020421750843525\n", - "Surface training t=22696, loss=0.05046873912215233\n", - "Surface training t=22697, loss=0.04702611453831196\n", - "Surface training t=22698, loss=0.03406370431184769\n", - "Surface training t=22699, loss=0.0483661163598299\n", - "Surface training t=22700, loss=0.03575569670647383\n", - "Surface training t=22701, loss=0.04069233871996403\n", - "Surface training t=22702, loss=0.03568683844059706\n", - "Surface training t=22703, loss=0.03764335438609123\n", - "Surface training t=22704, loss=0.02597129438072443\n", - "Surface training t=22705, loss=0.030869370326399803\n", - "Surface training t=22706, loss=0.032956818118691444\n", - "Surface training t=22707, loss=0.027071951888501644\n", - "Surface training t=22708, loss=0.031225244514644146\n", - "Surface training t=22709, loss=0.027815306559205055\n", - "Surface training t=22710, loss=0.02934231236577034\n", - "Surface training t=22711, loss=0.03382481634616852\n", - "Surface training t=22712, loss=0.02671628911048174\n", - "Surface training t=22713, loss=0.020816036500036716\n", - "Surface training t=22714, loss=0.02402295172214508\n", - "Surface training t=22715, loss=0.020405868999660015\n", - "Surface training t=22716, loss=0.020320948213338852\n", - "Surface training t=22717, loss=0.01752736046910286\n", - "Surface training t=22718, loss=0.020582537166774273\n", - "Surface training t=22719, loss=0.016490054316818714\n", - "Surface training t=22720, loss=0.01806231401860714\n", - "Surface training t=22721, loss=0.01999809592962265\n", - "Surface training t=22722, loss=0.015601781196892262\n", - "Surface training t=22723, loss=0.026857743971049786\n", - "Surface training t=22724, loss=0.018219638615846634\n", - "Surface training t=22725, loss=0.03455127589404583\n", - "Surface training t=22726, loss=0.037755267694592476\n", - "Surface training t=22727, loss=0.035737511701881886\n", - "Surface training t=22728, loss=0.042128621600568295\n", - "Surface training t=22729, loss=0.058706894516944885\n", - "Surface training t=22730, loss=0.0358202513307333\n", - "Surface training t=22731, loss=0.043576059862971306\n", - "Surface training t=22732, loss=0.046234844252467155\n", - "Surface training t=22733, loss=0.037175887264311314\n", - "Surface training t=22734, loss=0.04732632450759411\n", - "Surface training t=22735, loss=0.04156533069908619\n", - "Surface training t=22736, loss=0.027387382462620735\n", - "Surface training t=22737, loss=0.029035072773694992\n", - "Surface training t=22738, loss=0.030230043455958366\n", - "Surface training t=22739, loss=0.04118568263947964\n", - "Surface training t=22740, loss=0.02630016952753067\n", - "Surface training t=22741, loss=0.028805255889892578\n", - "Surface training t=22742, loss=0.02847798727452755\n", - "Surface training t=22743, loss=0.026891501620411873\n", - "Surface training t=22744, loss=0.02945962455123663\n", - "Surface training t=22745, loss=0.028995591215789318\n", - "Surface training t=22746, loss=0.025059456937015057\n", - "Surface training t=22747, loss=0.034057460725307465\n", - "Surface training t=22748, loss=0.0337783582508564\n", - "Surface training t=22749, loss=0.022547323256731033\n", - "Surface training t=22750, loss=0.018946140073239803\n", - "Surface training t=22751, loss=0.02666857372969389\n", - "Surface training t=22752, loss=0.0222040805965662\n", - "Surface training t=22753, loss=0.014337572269141674\n", - "Surface training t=22754, loss=0.018798284232616425\n", - "Surface training t=22755, loss=0.018492942675948143\n", - "Surface training t=22756, loss=0.022746117785573006\n", - "Surface training t=22757, loss=0.017784709110856056\n", - "Surface training t=22758, loss=0.01814162451773882\n", - "Surface training t=22759, loss=0.01777857542037964\n", - "Surface training t=22760, loss=0.017712516710162163\n", - "Surface training t=22761, loss=0.017106642480939627\n", - "Surface training t=22762, loss=0.017916414886713028\n", - "Surface training t=22763, loss=0.016635803505778313\n", - "Surface training t=22764, loss=0.01975026074796915\n", - "Surface training t=22765, loss=0.017845874652266502\n", - "Surface training t=22766, loss=0.01714298687875271\n", - "Surface training t=22767, loss=0.01799296960234642\n", - "Surface training t=22768, loss=0.019885637797415257\n", - "Surface training t=22769, loss=0.020983771421015263\n", - "Surface training t=22770, loss=0.02227356843650341\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=22771, loss=0.026696047745645046\n", - "Surface training t=22772, loss=0.02399462927132845\n", - "Surface training t=22773, loss=0.022782555781304836\n", - "Surface training t=22774, loss=0.024708754383027554\n", - "Surface training t=22775, loss=0.020033431239426136\n", - "Surface training t=22776, loss=0.018115611746907234\n", - "Surface training t=22777, loss=0.01746706198900938\n", - "Surface training t=22778, loss=0.01717735454440117\n", - "Surface training t=22779, loss=0.026347951032221317\n", - "Surface training t=22780, loss=0.0258311303332448\n", - "Surface training t=22781, loss=0.024952120147645473\n", - "Surface training t=22782, loss=0.024091510102152824\n", - "Surface training t=22783, loss=0.027070796117186546\n", - "Surface training t=22784, loss=0.023986770771443844\n", - "Surface training t=22785, loss=0.020103059709072113\n", - "Surface training t=22786, loss=0.02077085990458727\n", - "Surface training t=22787, loss=0.01901121996343136\n", - "Surface training t=22788, loss=0.015941699035465717\n", - "Surface training t=22789, loss=0.017569522373378277\n", - "Surface training t=22790, loss=0.013740325346589088\n", - "Surface training t=22791, loss=0.017816306091845036\n", - "Surface training t=22792, loss=0.025404788553714752\n", - "Surface training t=22793, loss=0.024193312041461468\n", - "Surface training t=22794, loss=0.028833742253482342\n", - "Surface training t=22795, loss=0.02327269408851862\n", - "Surface training t=22796, loss=0.018940377980470657\n", - "Surface training t=22797, loss=0.02307505626231432\n", - "Surface training t=22798, loss=0.01800351170822978\n", - "Surface training t=22799, loss=0.019864829257130623\n", - "Surface training t=22800, loss=0.01855380181223154\n", - "Surface training t=22801, loss=0.025690915063023567\n", - "Surface training t=22802, loss=0.028676125220954418\n", - "Surface training t=22803, loss=0.028295581229031086\n", - "Surface training t=22804, loss=0.015737399458885193\n", - "Surface training t=22805, loss=0.02212607115507126\n", - "Surface training t=22806, loss=0.020166666246950626\n", - "Surface training t=22807, loss=0.016704311594367027\n", - "Surface training t=22808, loss=0.02370729949325323\n", - "Surface training t=22809, loss=0.029651016928255558\n", - "Surface training t=22810, loss=0.028738588094711304\n", - "Surface training t=22811, loss=0.02877451665699482\n", - "Surface training t=22812, loss=0.028801207430660725\n", - "Surface training t=22813, loss=0.029641144908964634\n", - "Surface training t=22814, loss=0.030347133055329323\n", - "Surface training t=22815, loss=0.02399691753089428\n", - "Surface training t=22816, loss=0.023651085793972015\n", - "Surface training t=22817, loss=0.02783382497727871\n", - "Surface training t=22818, loss=0.03621098771691322\n", - "Surface training t=22819, loss=0.03279465530067682\n", - "Surface training t=22820, loss=0.03540278039872646\n", - "Surface training t=22821, loss=0.031537264585494995\n", - "Surface training t=22822, loss=0.03291630558669567\n", - "Surface training t=22823, loss=0.026874669827520847\n", - "Surface training t=22824, loss=0.022634195163846016\n", - "Surface training t=22825, loss=0.026387177407741547\n", - "Surface training t=22826, loss=0.02030871156603098\n", - "Surface training t=22827, loss=0.015954923816025257\n", - "Surface training t=22828, loss=0.01920056715607643\n", - "Surface training t=22829, loss=0.015832615550607443\n", - "Surface training t=22830, loss=0.014656399376690388\n", - "Surface training t=22831, loss=0.02047786768525839\n", - "Surface training t=22832, loss=0.01631587091833353\n", - "Surface training t=22833, loss=0.0203029727563262\n", - "Surface training t=22834, loss=0.02003179956227541\n", - "Surface training t=22835, loss=0.02250771503895521\n", - "Surface training t=22836, loss=0.024771008640527725\n", - "Surface training t=22837, loss=0.03502988442778587\n", - "Surface training t=22838, loss=0.03225509636104107\n", - "Surface training t=22839, loss=0.03281544055789709\n", - "Surface training t=22840, loss=0.03217726945877075\n", - "Surface training t=22841, loss=0.027700007893145084\n", - "Surface training t=22842, loss=0.02735261619091034\n", - "Surface training t=22843, loss=0.029034790582954884\n", - "Surface training t=22844, loss=0.027779879048466682\n", - "Surface training t=22845, loss=0.020009329542517662\n", - "Surface training t=22846, loss=0.028097398579120636\n", - "Surface training t=22847, loss=0.02695667650550604\n", - "Surface training t=22848, loss=0.02882428001612425\n", - "Surface training t=22849, loss=0.034248560667037964\n", - "Surface training t=22850, loss=0.02788608707487583\n", - "Surface training t=22851, loss=0.02627443615347147\n", - "Surface training t=22852, loss=0.020896032452583313\n", - "Surface training t=22853, loss=0.021562006324529648\n", - "Surface training t=22854, loss=0.018840277567505836\n", - "Surface training t=22855, loss=0.02192068099975586\n", - "Surface training t=22856, loss=0.016869695857167244\n", - "Surface training t=22857, loss=0.02001605648547411\n", - "Surface training t=22858, loss=0.015060756355524063\n", - "Surface training t=22859, loss=0.020661397837102413\n", - "Surface training t=22860, loss=0.020682898350059986\n", - "Surface training t=22861, loss=0.01675664260983467\n", - "Surface training t=22862, loss=0.01889043115079403\n", - "Surface training t=22863, loss=0.021259384229779243\n", - "Surface training t=22864, loss=0.016446887515485287\n", - "Surface training t=22865, loss=0.022378585301339626\n", - "Surface training t=22866, loss=0.02007357869297266\n", - "Surface training t=22867, loss=0.017078464850783348\n", - "Surface training t=22868, loss=0.01753619872033596\n", - "Surface training t=22869, loss=0.01649693213403225\n", - "Surface training t=22870, loss=0.019948464818298817\n", - "Surface training t=22871, loss=0.014325946103781462\n", - "Surface training t=22872, loss=0.019319025799632072\n", - "Surface training t=22873, loss=0.020699551329016685\n", - "Surface training t=22874, loss=0.015946295112371445\n", - "Surface training t=22875, loss=0.02235835138708353\n", - "Surface training t=22876, loss=0.02331687416881323\n", - "Surface training t=22877, loss=0.03161579184234142\n", - "Surface training t=22878, loss=0.031172492541372776\n", - "Surface training t=22879, loss=0.022224169224500656\n", - "Surface training t=22880, loss=0.03126116283237934\n", - "Surface training t=22881, loss=0.019265485927462578\n", - "Surface training t=22882, loss=0.024263651110231876\n", - "Surface training t=22883, loss=0.02340090274810791\n", - "Surface training t=22884, loss=0.02367774210870266\n", - "Surface training t=22885, loss=0.018324854783713818\n", - "Surface training t=22886, loss=0.023540649563074112\n", - "Surface training t=22887, loss=0.023077789694070816\n", - "Surface training t=22888, loss=0.02560554165393114\n", - "Surface training t=22889, loss=0.027957488782703876\n", - "Surface training t=22890, loss=0.03220486082136631\n", - "Surface training t=22891, loss=0.022574787959456444\n", - "Surface training t=22892, loss=0.029147337190806866\n", - "Surface training t=22893, loss=0.0266070868819952\n", - "Surface training t=22894, loss=0.028711343184113503\n", - "Surface training t=22895, loss=0.024732607416808605\n", - "Surface training t=22896, loss=0.02507853414863348\n", - "Surface training t=22897, loss=0.026106588542461395\n", - "Surface training t=22898, loss=0.034985775128006935\n", - "Surface training t=22899, loss=0.02583459671586752\n", - "Surface training t=22900, loss=0.021549432072788477\n", - "Surface training t=22901, loss=0.029733690433204174\n", - "Surface training t=22902, loss=0.024735026992857456\n", - "Surface training t=22903, loss=0.02534643653780222\n", - "Surface training t=22904, loss=0.028654325753450394\n", - "Surface training t=22905, loss=0.02548166550695896\n", - "Surface training t=22906, loss=0.02057568170130253\n", - "Surface training t=22907, loss=0.0226244879886508\n", - "Surface training t=22908, loss=0.019778229296207428\n", - "Surface training t=22909, loss=0.017599365673959255\n", - "Surface training t=22910, loss=0.019033554941415787\n", - "Surface training t=22911, loss=0.017523063346743584\n", - "Surface training t=22912, loss=0.021129626780748367\n", - "Surface training t=22913, loss=0.02207003440707922\n", - "Surface training t=22914, loss=0.016562895383685827\n", - "Surface training t=22915, loss=0.021362990140914917\n", - "Surface training t=22916, loss=0.021137493662536144\n", - "Surface training t=22917, loss=0.023651269264519215\n", - "Surface training t=22918, loss=0.030601290054619312\n", - "Surface training t=22919, loss=0.029950729571282864\n", - "Surface training t=22920, loss=0.023131873458623886\n", - "Surface training t=22921, loss=0.02825964242219925\n", - "Surface training t=22922, loss=0.02198271919041872\n", - "Surface training t=22923, loss=0.01997694279998541\n", - "Surface training t=22924, loss=0.019976377487182617\n", - "Surface training t=22925, loss=0.018159231171011925\n", - "Surface training t=22926, loss=0.018017632886767387\n", - "Surface training t=22927, loss=0.018514967523515224\n", - "Surface training t=22928, loss=0.020078214816749096\n", - "Surface training t=22929, loss=0.020123008638620377\n", - "Surface training t=22930, loss=0.021278321743011475\n", - "Surface training t=22931, loss=0.020723004825413227\n", - "Surface training t=22932, loss=0.020180830731987953\n", - "Surface training t=22933, loss=0.028885374777019024\n", - "Surface training t=22934, loss=0.026899158023297787\n", - "Surface training t=22935, loss=0.030066750943660736\n", - "Surface training t=22936, loss=0.025506037287414074\n", - "Surface training t=22937, loss=0.021455674432218075\n", - "Surface training t=22938, loss=0.021492344327270985\n", - "Surface training t=22939, loss=0.030193353071808815\n", - "Surface training t=22940, loss=0.03050162084400654\n", - "Surface training t=22941, loss=0.022769492119550705\n", - "Surface training t=22942, loss=0.020402222871780396\n", - "Surface training t=22943, loss=0.02350038383156061\n", - "Surface training t=22944, loss=0.021083710715174675\n", - "Surface training t=22945, loss=0.021947077475488186\n", - "Surface training t=22946, loss=0.01968113798648119\n", - "Surface training t=22947, loss=0.013290409930050373\n", - "Surface training t=22948, loss=0.013302762061357498\n", - "Surface training t=22949, loss=0.01684264186769724\n", - "Surface training t=22950, loss=0.015948932617902756\n", - "Surface training t=22951, loss=0.0171932615339756\n", - "Surface training t=22952, loss=0.019116740208119154\n", - "Surface training t=22953, loss=0.030812821350991726\n", - "Surface training t=22954, loss=0.028163578361272812\n", - "Surface training t=22955, loss=0.040820250287652016\n", - "Surface training t=22956, loss=0.02549532800912857\n", - "Surface training t=22957, loss=0.022922969423234463\n", - "Surface training t=22958, loss=0.0331702996045351\n", - "Surface training t=22959, loss=0.029447253793478012\n", - "Surface training t=22960, loss=0.04272705316543579\n", - "Surface training t=22961, loss=0.032994452863931656\n", - "Surface training t=22962, loss=0.02769674640148878\n", - "Surface training t=22963, loss=0.024573056027293205\n", - "Surface training t=22964, loss=0.024342410266399384\n", - "Surface training t=22965, loss=0.01858609914779663\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=22966, loss=0.020810185931622982\n", - "Surface training t=22967, loss=0.022420604713261127\n", - "Surface training t=22968, loss=0.019890956580638885\n", - "Surface training t=22969, loss=0.018306064419448376\n", - "Surface training t=22970, loss=0.01846898067742586\n", - "Surface training t=22971, loss=0.01793952565640211\n", - "Surface training t=22972, loss=0.01898689940571785\n", - "Surface training t=22973, loss=0.018811007030308247\n", - "Surface training t=22974, loss=0.018990831449627876\n", - "Surface training t=22975, loss=0.022754240781068802\n", - "Surface training t=22976, loss=0.03060944564640522\n", - "Surface training t=22977, loss=0.021636389661580324\n", - "Surface training t=22978, loss=0.02091937232762575\n", - "Surface training t=22979, loss=0.019454226829111576\n", - "Surface training t=22980, loss=0.020196945406496525\n", - "Surface training t=22981, loss=0.021786266937851906\n", - "Surface training t=22982, loss=0.01889200508594513\n", - "Surface training t=22983, loss=0.019903107546269894\n", - "Surface training t=22984, loss=0.023331118747591972\n", - "Surface training t=22985, loss=0.03090725839138031\n", - "Surface training t=22986, loss=0.03182671777904034\n", - "Surface training t=22987, loss=0.03095889464020729\n", - "Surface training t=22988, loss=0.027602847665548325\n", - "Surface training t=22989, loss=0.03089391253888607\n", - "Surface training t=22990, loss=0.02430675644427538\n", - "Surface training t=22991, loss=0.021167946979403496\n", - "Surface training t=22992, loss=0.018998978659510612\n", - "Surface training t=22993, loss=0.02284327521920204\n", - "Surface training t=22994, loss=0.029309479519724846\n", - "Surface training t=22995, loss=0.03283564932644367\n", - "Surface training t=22996, loss=0.029913638718426228\n", - "Surface training t=22997, loss=0.029608486220240593\n", - "Surface training t=22998, loss=0.031161745078861713\n", - "Surface training t=22999, loss=0.03442222531884909\n", - "Surface training t=23000, loss=0.03416651766747236\n", - "Surface training t=23001, loss=0.04429230839014053\n", - "Surface training t=23002, loss=0.03445982374250889\n", - "Surface training t=23003, loss=0.03534477297216654\n", - "Surface training t=23004, loss=0.0358018446713686\n", - "Surface training t=23005, loss=0.046246374025940895\n", - "Surface training t=23006, loss=0.03878795728087425\n", - "Surface training t=23007, loss=0.02962454780936241\n", - "Surface training t=23008, loss=0.030593259260058403\n", - "Surface training t=23009, loss=0.03867574408650398\n", - "Surface training t=23010, loss=0.027232258580625057\n", - "Surface training t=23011, loss=0.029122788459062576\n", - "Surface training t=23012, loss=0.025460204109549522\n", - "Surface training t=23013, loss=0.021777117624878883\n", - "Surface training t=23014, loss=0.020769139286130667\n", - "Surface training t=23015, loss=0.022428099066019058\n", - "Surface training t=23016, loss=0.025872960686683655\n", - "Surface training t=23017, loss=0.018817814998328686\n", - "Surface training t=23018, loss=0.024417441338300705\n", - "Surface training t=23019, loss=0.027030213735997677\n", - "Surface training t=23020, loss=0.0291155893355608\n", - "Surface training t=23021, loss=0.020487336441874504\n", - "Surface training t=23022, loss=0.021921824663877487\n", - "Surface training t=23023, loss=0.029026424512267113\n", - "Surface training t=23024, loss=0.019373321905732155\n", - "Surface training t=23025, loss=0.01702388096600771\n", - "Surface training t=23026, loss=0.02210531197488308\n", - "Surface training t=23027, loss=0.019246570765972137\n", - "Surface training t=23028, loss=0.017511649057269096\n", - "Surface training t=23029, loss=0.020889670588076115\n", - "Surface training t=23030, loss=0.014135616831481457\n", - "Surface training t=23031, loss=0.014771365094929934\n", - "Surface training t=23032, loss=0.022952549159526825\n", - "Surface training t=23033, loss=0.01945347525179386\n", - "Surface training t=23034, loss=0.017876995261758566\n", - "Surface training t=23035, loss=0.025575620122253895\n", - "Surface training t=23036, loss=0.023050344549119473\n", - "Surface training t=23037, loss=0.024714931845664978\n", - "Surface training t=23038, loss=0.025169490836560726\n", - "Surface training t=23039, loss=0.022945307195186615\n", - "Surface training t=23040, loss=0.027304179035127163\n", - "Surface training t=23041, loss=0.026845471933484077\n", - "Surface training t=23042, loss=0.025545697659254074\n", - "Surface training t=23043, loss=0.021501407027244568\n", - "Surface training t=23044, loss=0.020649454556405544\n", - "Surface training t=23045, loss=0.016040187794715166\n", - "Surface training t=23046, loss=0.028874034993350506\n", - "Surface training t=23047, loss=0.022121483460068703\n", - "Surface training t=23048, loss=0.018858304247260094\n", - "Surface training t=23049, loss=0.022460016421973705\n", - "Surface training t=23050, loss=0.024592269212007523\n", - "Surface training t=23051, loss=0.022588316351175308\n", - "Surface training t=23052, loss=0.023827219381928444\n", - "Surface training t=23053, loss=0.021394765004515648\n", - "Surface training t=23054, loss=0.016148113645613194\n", - "Surface training t=23055, loss=0.02175137121230364\n", - "Surface training t=23056, loss=0.020367255434393883\n", - "Surface training t=23057, loss=0.024531444534659386\n", - "Surface training t=23058, loss=0.02436983399093151\n", - "Surface training t=23059, loss=0.02445884421467781\n", - "Surface training t=23060, loss=0.0336702112108469\n", - "Surface training t=23061, loss=0.025012295693159103\n", - "Surface training t=23062, loss=0.019076145254075527\n", - "Surface training t=23063, loss=0.028849076479673386\n", - "Surface training t=23064, loss=0.024804122745990753\n", - "Surface training t=23065, loss=0.024703534319996834\n", - "Surface training t=23066, loss=0.02503760252147913\n", - "Surface training t=23067, loss=0.018088387325406075\n", - "Surface training t=23068, loss=0.02180431131273508\n", - "Surface training t=23069, loss=0.024300680495798588\n", - "Surface training t=23070, loss=0.024827024899423122\n", - "Surface training t=23071, loss=0.024166317656636238\n", - "Surface training t=23072, loss=0.02660258486866951\n", - "Surface training t=23073, loss=0.024463790468871593\n", - "Surface training t=23074, loss=0.02606711257249117\n", - "Surface training t=23075, loss=0.023501058109104633\n", - "Surface training t=23076, loss=0.01780781801789999\n", - "Surface training t=23077, loss=0.021754959598183632\n", - "Surface training t=23078, loss=0.02355797588825226\n", - "Surface training t=23079, loss=0.020151659846305847\n", - "Surface training t=23080, loss=0.01532899122685194\n", - "Surface training t=23081, loss=0.017118548043072224\n", - "Surface training t=23082, loss=0.02032193634659052\n", - "Surface training t=23083, loss=0.018634134903550148\n", - "Surface training t=23084, loss=0.017181046307086945\n", - "Surface training t=23085, loss=0.016961200162768364\n", - "Surface training t=23086, loss=0.021523932926356792\n", - "Surface training t=23087, loss=0.023894071578979492\n", - "Surface training t=23088, loss=0.02570805698633194\n", - "Surface training t=23089, loss=0.019761540461331606\n", - "Surface training t=23090, loss=0.01798206754028797\n", - "Surface training t=23091, loss=0.02273884415626526\n", - "Surface training t=23092, loss=0.023958592675626278\n", - "Surface training t=23093, loss=0.0235699275508523\n", - "Surface training t=23094, loss=0.0199443306773901\n", - "Surface training t=23095, loss=0.01945909857749939\n", - "Surface training t=23096, loss=0.02458925172686577\n", - "Surface training t=23097, loss=0.014673774130642414\n", - "Surface training t=23098, loss=0.02077208925038576\n", - "Surface training t=23099, loss=0.01574787963181734\n", - "Surface training t=23100, loss=0.021980788558721542\n", - "Surface training t=23101, loss=0.016332700848579407\n", - "Surface training t=23102, loss=0.025572625920176506\n", - "Surface training t=23103, loss=0.024305099621415138\n", - "Surface training t=23104, loss=0.019270870834589005\n", - "Surface training t=23105, loss=0.02016657032072544\n", - "Surface training t=23106, loss=0.02179652266204357\n", - "Surface training t=23107, loss=0.016084984876215458\n", - "Surface training t=23108, loss=0.021268324926495552\n", - "Surface training t=23109, loss=0.018560798838734627\n", - "Surface training t=23110, loss=0.022403755225241184\n", - "Surface training t=23111, loss=0.028986627236008644\n", - "Surface training t=23112, loss=0.03325813449919224\n", - "Surface training t=23113, loss=0.024867895059287548\n", - "Surface training t=23114, loss=0.028234686702489853\n", - "Surface training t=23115, loss=0.025861941277980804\n", - "Surface training t=23116, loss=0.0288078086450696\n", - "Surface training t=23117, loss=0.03174450248479843\n", - "Surface training t=23118, loss=0.023942535743117332\n", - "Surface training t=23119, loss=0.025973854586482048\n", - "Surface training t=23120, loss=0.03229693230241537\n", - "Surface training t=23121, loss=0.027935718186199665\n", - "Surface training t=23122, loss=0.033890943974256516\n", - "Surface training t=23123, loss=0.03250407055020332\n", - "Surface training t=23124, loss=0.02791687101125717\n", - "Surface training t=23125, loss=0.035477256402373314\n", - "Surface training t=23126, loss=0.025623268447816372\n", - "Surface training t=23127, loss=0.02294040098786354\n", - "Surface training t=23128, loss=0.029345670714974403\n", - "Surface training t=23129, loss=0.028728390112519264\n", - "Surface training t=23130, loss=0.029323258437216282\n", - "Surface training t=23131, loss=0.03129299730062485\n", - "Surface training t=23132, loss=0.03174035158008337\n", - "Surface training t=23133, loss=0.02837025374174118\n", - "Surface training t=23134, loss=0.03756197541952133\n", - "Surface training t=23135, loss=0.024093437008559704\n", - "Surface training t=23136, loss=0.03284675069153309\n", - "Surface training t=23137, loss=0.029778405092656612\n", - "Surface training t=23138, loss=0.023786373902112246\n", - "Surface training t=23139, loss=0.02285951469093561\n", - "Surface training t=23140, loss=0.024315943010151386\n", - "Surface training t=23141, loss=0.032352020032703876\n", - "Surface training t=23142, loss=0.022215518169105053\n", - "Surface training t=23143, loss=0.017960055265575647\n", - "Surface training t=23144, loss=0.03224565647542477\n", - "Surface training t=23145, loss=0.024140363559126854\n", - "Surface training t=23146, loss=0.024163994938135147\n", - "Surface training t=23147, loss=0.03046676516532898\n", - "Surface training t=23148, loss=0.02411932684481144\n", - "Surface training t=23149, loss=0.026800379157066345\n", - "Surface training t=23150, loss=0.028060972690582275\n", - "Surface training t=23151, loss=0.03866007551550865\n", - "Surface training t=23152, loss=0.03196517750620842\n", - "Surface training t=23153, loss=0.02601313591003418\n", - "Surface training t=23154, loss=0.021716615185141563\n", - "Surface training t=23155, loss=0.02629654761403799\n", - "Surface training t=23156, loss=0.022400901652872562\n", - "Surface training t=23157, loss=0.028313877061009407\n", - "Surface training t=23158, loss=0.022582807578146458\n", - "Surface training t=23159, loss=0.019339729100465775\n", - "Surface training t=23160, loss=0.018801217898726463\n", - "Surface training t=23161, loss=0.01949762413278222\n", - "Surface training t=23162, loss=0.021856694482266903\n", - "Surface training t=23163, loss=0.021923329681158066\n", - "Surface training t=23164, loss=0.017119644209742546\n", - "Surface training t=23165, loss=0.024753740057349205\n", - "Surface training t=23166, loss=0.02114837523549795\n", - "Surface training t=23167, loss=0.019217681139707565\n", - "Surface training t=23168, loss=0.021602883003652096\n", - "Surface training t=23169, loss=0.027323422022163868\n", - "Surface training t=23170, loss=0.01986233051866293\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=23171, loss=0.02185877040028572\n", - "Surface training t=23172, loss=0.021522635594010353\n", - "Surface training t=23173, loss=0.028858658857643604\n", - "Surface training t=23174, loss=0.032233826816082\n", - "Surface training t=23175, loss=0.03331300523132086\n", - "Surface training t=23176, loss=0.025094857439398766\n", - "Surface training t=23177, loss=0.021409827284514904\n", - "Surface training t=23178, loss=0.021513493731617928\n", - "Surface training t=23179, loss=0.019655576907098293\n", - "Surface training t=23180, loss=0.026804630644619465\n", - "Surface training t=23181, loss=0.02342487219721079\n", - "Surface training t=23182, loss=0.023406133987009525\n", - "Surface training t=23183, loss=0.021254414692521095\n", - "Surface training t=23184, loss=0.019422142300754786\n", - "Surface training t=23185, loss=0.02748110517859459\n", - "Surface training t=23186, loss=0.031565114855766296\n", - "Surface training t=23187, loss=0.028903922997415066\n", - "Surface training t=23188, loss=0.03821338526904583\n", - "Surface training t=23189, loss=0.024490720592439175\n", - "Surface training t=23190, loss=0.022167320363223553\n", - "Surface training t=23191, loss=0.02405381202697754\n", - "Surface training t=23192, loss=0.020726943388581276\n", - "Surface training t=23193, loss=0.019795347936451435\n", - "Surface training t=23194, loss=0.022146673873066902\n", - "Surface training t=23195, loss=0.023170925676822662\n", - "Surface training t=23196, loss=0.02046298887580633\n", - "Surface training t=23197, loss=0.017628994770348072\n", - "Surface training t=23198, loss=0.020685351453721523\n", - "Surface training t=23199, loss=0.02034070063382387\n", - "Surface training t=23200, loss=0.02372357528656721\n", - "Surface training t=23201, loss=0.019329371862113476\n", - "Surface training t=23202, loss=0.017133550718426704\n", - "Surface training t=23203, loss=0.020402790047228336\n", - "Surface training t=23204, loss=0.01963403820991516\n", - "Surface training t=23205, loss=0.019663344137370586\n", - "Surface training t=23206, loss=0.026078549213707447\n", - "Surface training t=23207, loss=0.027215056121349335\n", - "Surface training t=23208, loss=0.023921675980091095\n", - "Surface training t=23209, loss=0.027192377485334873\n", - "Surface training t=23210, loss=0.03195499628782272\n", - "Surface training t=23211, loss=0.03156762570142746\n", - "Surface training t=23212, loss=0.026244167238473892\n", - "Surface training t=23213, loss=0.028728967532515526\n", - "Surface training t=23214, loss=0.020810415968298912\n", - "Surface training t=23215, loss=0.024321299977600574\n", - "Surface training t=23216, loss=0.025641942396759987\n", - "Surface training t=23217, loss=0.025738859549164772\n", - "Surface training t=23218, loss=0.02493704203516245\n", - "Surface training t=23219, loss=0.02668660320341587\n", - "Surface training t=23220, loss=0.02884845808148384\n", - "Surface training t=23221, loss=0.026682274416089058\n", - "Surface training t=23222, loss=0.026906081475317478\n", - "Surface training t=23223, loss=0.020735032856464386\n", - "Surface training t=23224, loss=0.020976562052965164\n", - "Surface training t=23225, loss=0.02066629659384489\n", - "Surface training t=23226, loss=0.03374416381120682\n", - "Surface training t=23227, loss=0.02809277828782797\n", - "Surface training t=23228, loss=0.019937907345592976\n", - "Surface training t=23229, loss=0.025002005510032177\n", - "Surface training t=23230, loss=0.030436117202043533\n", - "Surface training t=23231, loss=0.016304221004247665\n", - "Surface training t=23232, loss=0.02507366891950369\n", - "Surface training t=23233, loss=0.021355903707444668\n", - "Surface training t=23234, loss=0.02122977189719677\n", - "Surface training t=23235, loss=0.016427663154900074\n", - "Surface training t=23236, loss=0.02553558349609375\n", - "Surface training t=23237, loss=0.03149818256497383\n", - "Surface training t=23238, loss=0.047218287363648415\n", - "Surface training t=23239, loss=0.03611228335648775\n", - "Surface training t=23240, loss=0.029520371928811073\n", - "Surface training t=23241, loss=0.022283130325376987\n", - "Surface training t=23242, loss=0.034960679709911346\n", - "Surface training t=23243, loss=0.03192464541643858\n", - "Surface training t=23244, loss=0.03519116621464491\n", - "Surface training t=23245, loss=0.026057783514261246\n", - "Surface training t=23246, loss=0.02780937682837248\n", - "Surface training t=23247, loss=0.028127672150731087\n", - "Surface training t=23248, loss=0.02404923364520073\n", - "Surface training t=23249, loss=0.016138048842549324\n", - "Surface training t=23250, loss=0.017908000387251377\n", - "Surface training t=23251, loss=0.01971401274204254\n", - "Surface training t=23252, loss=0.019239196553826332\n", - "Surface training t=23253, loss=0.019117744639515877\n", - "Surface training t=23254, loss=0.02151612378656864\n", - "Surface training t=23255, loss=0.021718185395002365\n", - "Surface training t=23256, loss=0.020445559173822403\n", - "Surface training t=23257, loss=0.025868529453873634\n", - "Surface training t=23258, loss=0.02857372723519802\n", - "Surface training t=23259, loss=0.024006351828575134\n", - "Surface training t=23260, loss=0.02005138900130987\n", - "Surface training t=23261, loss=0.019521146081387997\n", - "Surface training t=23262, loss=0.01902696955949068\n", - "Surface training t=23263, loss=0.022015871480107307\n", - "Surface training t=23264, loss=0.018113447353243828\n", - "Surface training t=23265, loss=0.01996792759746313\n", - "Surface training t=23266, loss=0.020722071640193462\n", - "Surface training t=23267, loss=0.0213732635602355\n", - "Surface training t=23268, loss=0.019363606348633766\n", - "Surface training t=23269, loss=0.02232264168560505\n", - "Surface training t=23270, loss=0.030458640307188034\n", - "Surface training t=23271, loss=0.023526144213974476\n", - "Surface training t=23272, loss=0.028998871333897114\n", - "Surface training t=23273, loss=0.033200254663825035\n", - "Surface training t=23274, loss=0.03766159899532795\n", - "Surface training t=23275, loss=0.03255739249289036\n", - "Surface training t=23276, loss=0.026439914479851723\n", - "Surface training t=23277, loss=0.01852928288280964\n", - "Surface training t=23278, loss=0.020562225952744484\n", - "Surface training t=23279, loss=0.02299086283892393\n", - "Surface training t=23280, loss=0.023730999790132046\n", - "Surface training t=23281, loss=0.03653527796268463\n", - "Surface training t=23282, loss=0.041782746091485023\n", - "Surface training t=23283, loss=0.034245869144797325\n", - "Surface training t=23284, loss=0.038992918096482754\n", - "Surface training t=23285, loss=0.03292234614491463\n", - "Surface training t=23286, loss=0.023947957903146744\n", - "Surface training t=23287, loss=0.02341119386255741\n", - "Surface training t=23288, loss=0.029620193876326084\n", - "Surface training t=23289, loss=0.025847258046269417\n", - "Surface training t=23290, loss=0.03095034696161747\n", - "Surface training t=23291, loss=0.0342219565063715\n", - "Surface training t=23292, loss=0.022817956283688545\n", - "Surface training t=23293, loss=0.021624435670673847\n", - "Surface training t=23294, loss=0.023284479044377804\n", - "Surface training t=23295, loss=0.02621051575988531\n", - "Surface training t=23296, loss=0.02029884047806263\n", - "Surface training t=23297, loss=0.023166473023593426\n", - "Surface training t=23298, loss=0.02403123490512371\n", - "Surface training t=23299, loss=0.03284483030438423\n", - "Surface training t=23300, loss=0.02547683473676443\n", - "Surface training t=23301, loss=0.032528446055948734\n", - "Surface training t=23302, loss=0.03183265868574381\n", - "Surface training t=23303, loss=0.021363118663430214\n", - "Surface training t=23304, loss=0.02235238766297698\n", - "Surface training t=23305, loss=0.03455622307956219\n", - "Surface training t=23306, loss=0.022208855487406254\n", - "Surface training t=23307, loss=0.031906397081911564\n", - "Surface training t=23308, loss=0.02175991889089346\n", - "Surface training t=23309, loss=0.031085120514035225\n", - "Surface training t=23310, loss=0.023278072476387024\n", - "Surface training t=23311, loss=0.021214826963841915\n", - "Surface training t=23312, loss=0.017451763153076172\n", - "Surface training t=23313, loss=0.020403483882546425\n", - "Surface training t=23314, loss=0.01851736195385456\n", - "Surface training t=23315, loss=0.01880366075783968\n", - "Surface training t=23316, loss=0.0175437293946743\n", - "Surface training t=23317, loss=0.01897067204117775\n", - "Surface training t=23318, loss=0.02569814957678318\n", - "Surface training t=23319, loss=0.025662586092948914\n", - "Surface training t=23320, loss=0.029756424017250538\n", - "Surface training t=23321, loss=0.022728556767106056\n", - "Surface training t=23322, loss=0.023910864256322384\n", - "Surface training t=23323, loss=0.025657296180725098\n", - "Surface training t=23324, loss=0.023686238564550877\n", - "Surface training t=23325, loss=0.022097712382674217\n", - "Surface training t=23326, loss=0.020125240087509155\n", - "Surface training t=23327, loss=0.017356744036078453\n", - "Surface training t=23328, loss=0.017147612757980824\n", - "Surface training t=23329, loss=0.0226743184030056\n", - "Surface training t=23330, loss=0.02995341829955578\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=23331, loss=0.02479317458346486\n", - "Surface training t=23332, loss=0.020763498730957508\n", - "Surface training t=23333, loss=0.018533642403781414\n", - "Surface training t=23334, loss=0.02522372081875801\n", - "Surface training t=23335, loss=0.026024318765848875\n", - "Surface training t=23336, loss=0.02504814974963665\n", - "Surface training t=23337, loss=0.024746809154748917\n", - "Surface training t=23338, loss=0.022996045649051666\n", - "Surface training t=23339, loss=0.026020117104053497\n", - "Surface training t=23340, loss=0.02577590849250555\n", - "Surface training t=23341, loss=0.022854062728583813\n", - "Surface training t=23342, loss=0.02756225038319826\n", - "Surface training t=23343, loss=0.02344699390232563\n", - "Surface training t=23344, loss=0.021900678053498268\n", - "Surface training t=23345, loss=0.023320600390434265\n", - "Surface training t=23346, loss=0.029027347452938557\n", - "Surface training t=23347, loss=0.025100531987845898\n", - "Surface training t=23348, loss=0.032823086716234684\n", - "Surface training t=23349, loss=0.02152380719780922\n", - "Surface training t=23350, loss=0.02335314592346549\n", - "Surface training t=23351, loss=0.023827578872442245\n", - "Surface training t=23352, loss=0.024813303723931313\n", - "Surface training t=23353, loss=0.024686583317816257\n", - "Surface training t=23354, loss=0.03551473468542099\n", - "Surface training t=23355, loss=0.027182008139789104\n", - "Surface training t=23356, loss=0.0293714739382267\n", - "Surface training t=23357, loss=0.04105146788060665\n", - "Surface training t=23358, loss=0.029171557165682316\n", - "Surface training t=23359, loss=0.035785309970378876\n", - "Surface training t=23360, loss=0.028289180248975754\n", - "Surface training t=23361, loss=0.03011910431087017\n", - "Surface training t=23362, loss=0.04393971338868141\n", - "Surface training t=23363, loss=0.032762862741947174\n", - "Surface training t=23364, loss=0.02897025365382433\n", - "Surface training t=23365, loss=0.03477114625275135\n", - "Surface training t=23366, loss=0.03819335624575615\n", - "Surface training t=23367, loss=0.0261400006711483\n", - "Surface training t=23368, loss=0.023455089889466763\n", - "Surface training t=23369, loss=0.023288868367671967\n", - "Surface training t=23370, loss=0.0187325244769454\n", - "Surface training t=23371, loss=0.031076960265636444\n", - "Surface training t=23372, loss=0.029859078116714954\n", - "Surface training t=23373, loss=0.021516910754144192\n", - "Surface training t=23374, loss=0.023590470664203167\n", - "Surface training t=23375, loss=0.021698784083127975\n", - "Surface training t=23376, loss=0.023140071891248226\n", - "Surface training t=23377, loss=0.020933245308697224\n", - "Surface training t=23378, loss=0.02636039350181818\n", - "Surface training t=23379, loss=0.02865874581038952\n", - "Surface training t=23380, loss=0.025857748463749886\n", - "Surface training t=23381, loss=0.028206873685121536\n", - "Surface training t=23382, loss=0.021400834433734417\n", - "Surface training t=23383, loss=0.019449755549430847\n", - "Surface training t=23384, loss=0.019155016168951988\n", - "Surface training t=23385, loss=0.018515233881771564\n", - "Surface training t=23386, loss=0.01903616636991501\n", - "Surface training t=23387, loss=0.02147543989121914\n", - "Surface training t=23388, loss=0.021960311569273472\n", - "Surface training t=23389, loss=0.022580501157790422\n", - "Surface training t=23390, loss=0.019670026376843452\n", - "Surface training t=23391, loss=0.01984657160937786\n", - "Surface training t=23392, loss=0.023882124572992325\n", - "Surface training t=23393, loss=0.020965960808098316\n", - "Surface training t=23394, loss=0.025755001232028008\n", - "Surface training t=23395, loss=0.02413350623100996\n", - "Surface training t=23396, loss=0.024074556306004524\n", - "Surface training t=23397, loss=0.029002427123486996\n", - "Surface training t=23398, loss=0.02664385922253132\n", - "Surface training t=23399, loss=0.020026396960020065\n", - "Surface training t=23400, loss=0.03527821972966194\n", - "Surface training t=23401, loss=0.01987580955028534\n", - "Surface training t=23402, loss=0.016172828618437052\n", - "Surface training t=23403, loss=0.01929635740816593\n", - "Surface training t=23404, loss=0.021121722646057606\n", - "Surface training t=23405, loss=0.01755037158727646\n", - "Surface training t=23406, loss=0.0173154566437006\n", - "Surface training t=23407, loss=0.015438181348145008\n", - "Surface training t=23408, loss=0.017605011351406574\n", - "Surface training t=23409, loss=0.015702959150075912\n", - "Surface training t=23410, loss=0.017230127938091755\n", - "Surface training t=23411, loss=0.02275510597974062\n", - "Surface training t=23412, loss=0.024145412258803844\n", - "Surface training t=23413, loss=0.028642456978559494\n", - "Surface training t=23414, loss=0.028867660090327263\n", - "Surface training t=23415, loss=0.01947722490876913\n", - "Surface training t=23416, loss=0.02451299224048853\n", - "Surface training t=23417, loss=0.026288246735930443\n", - "Surface training t=23418, loss=0.024383515119552612\n", - "Surface training t=23419, loss=0.020530881360173225\n", - "Surface training t=23420, loss=0.02419718075543642\n", - "Surface training t=23421, loss=0.02957678586244583\n", - "Surface training t=23422, loss=0.024354323744773865\n", - "Surface training t=23423, loss=0.019626307301223278\n", - "Surface training t=23424, loss=0.015446734614670277\n", - "Surface training t=23425, loss=0.02220419328659773\n", - "Surface training t=23426, loss=0.016137059777975082\n", - "Surface training t=23427, loss=0.02137209288775921\n", - "Surface training t=23428, loss=0.02124390471726656\n", - "Surface training t=23429, loss=0.019431285560131073\n", - "Surface training t=23430, loss=0.019675762858241796\n", - "Surface training t=23431, loss=0.021125500090420246\n", - "Surface training t=23432, loss=0.018250396475195885\n", - "Surface training t=23433, loss=0.022214218974113464\n", - "Surface training t=23434, loss=0.0204799585044384\n", - "Surface training t=23435, loss=0.018755493685603142\n", - "Surface training t=23436, loss=0.017809227108955383\n", - "Surface training t=23437, loss=0.022833186201751232\n", - "Surface training t=23438, loss=0.028749486431479454\n", - "Surface training t=23439, loss=0.025791170075535774\n", - "Surface training t=23440, loss=0.023750796914100647\n", - "Surface training t=23441, loss=0.027842633426189423\n", - "Surface training t=23442, loss=0.023059850558638573\n", - "Surface training t=23443, loss=0.022618172224611044\n", - "Surface training t=23444, loss=0.021105737425386906\n", - "Surface training t=23445, loss=0.033070825040340424\n", - "Surface training t=23446, loss=0.025894599966704845\n", - "Surface training t=23447, loss=0.03663904778659344\n", - "Surface training t=23448, loss=0.03377251606434584\n", - "Surface training t=23449, loss=0.026327311992645264\n", - "Surface training t=23450, loss=0.01743443263694644\n", - "Surface training t=23451, loss=0.01775344740599394\n", - "Surface training t=23452, loss=0.015080845914781094\n", - "Surface training t=23453, loss=0.01989607699215412\n", - "Surface training t=23454, loss=0.017630111426115036\n", - "Surface training t=23455, loss=0.017355136573314667\n", - "Surface training t=23456, loss=0.021196061745285988\n", - "Surface training t=23457, loss=0.019416775554418564\n", - "Surface training t=23458, loss=0.015286494046449661\n", - "Surface training t=23459, loss=0.02105474192649126\n", - "Surface training t=23460, loss=0.029111651703715324\n", - "Surface training t=23461, loss=0.03153366129845381\n", - "Surface training t=23462, loss=0.03847695700824261\n", - "Surface training t=23463, loss=0.02449604868888855\n", - "Surface training t=23464, loss=0.027555035427212715\n", - "Surface training t=23465, loss=0.03190810792148113\n", - "Surface training t=23466, loss=0.03538217302411795\n", - "Surface training t=23467, loss=0.03359710983932018\n", - "Surface training t=23468, loss=0.033207548782229424\n", - "Surface training t=23469, loss=0.03607994690537453\n", - "Surface training t=23470, loss=0.04520828276872635\n", - "Surface training t=23471, loss=0.04152160696685314\n", - "Surface training t=23472, loss=0.03853919915854931\n", - "Surface training t=23473, loss=0.038185840472579\n", - "Surface training t=23474, loss=0.029365376569330692\n", - "Surface training t=23475, loss=0.046509988605976105\n", - "Surface training t=23476, loss=0.04015609622001648\n", - "Surface training t=23477, loss=0.033378930762410164\n", - "Surface training t=23478, loss=0.03506769984960556\n", - "Surface training t=23479, loss=0.04457950219511986\n", - "Surface training t=23480, loss=0.033468637615442276\n", - "Surface training t=23481, loss=0.026817046105861664\n", - "Surface training t=23482, loss=0.030815185979008675\n", - "Surface training t=23483, loss=0.027232706546783447\n", - "Surface training t=23484, loss=0.022439611610025167\n", - "Surface training t=23485, loss=0.032078973948955536\n", - "Surface training t=23486, loss=0.02736726962029934\n", - "Surface training t=23487, loss=0.03784852661192417\n", - "Surface training t=23488, loss=0.032084157690405846\n", - "Surface training t=23489, loss=0.03855193965137005\n", - "Surface training t=23490, loss=0.042518250644207\n", - "Surface training t=23491, loss=0.03828423377126455\n", - "Surface training t=23492, loss=0.033475483767688274\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=23493, loss=0.055427027866244316\n", - "Surface training t=23494, loss=0.040791542269289494\n", - "Surface training t=23495, loss=0.05062909983098507\n", - "Surface training t=23496, loss=0.045418862253427505\n", - "Surface training t=23497, loss=0.05255349725484848\n", - "Surface training t=23498, loss=0.03914847411215305\n", - "Surface training t=23499, loss=0.03140517696738243\n", - "Surface training t=23500, loss=0.021623247303068638\n", - "Surface training t=23501, loss=0.01991522591561079\n", - "Surface training t=23502, loss=0.02328023500740528\n", - "Surface training t=23503, loss=0.03149035945534706\n", - "Surface training t=23504, loss=0.029404347762465477\n", - "Surface training t=23505, loss=0.028267022222280502\n", - "Surface training t=23506, loss=0.03135021589696407\n", - "Surface training t=23507, loss=0.03284649737179279\n", - "Surface training t=23508, loss=0.04399744234979153\n", - "Surface training t=23509, loss=0.03288703877478838\n", - "Surface training t=23510, loss=0.03653874807059765\n", - "Surface training t=23511, loss=0.025972740724682808\n", - "Surface training t=23512, loss=0.025734572671353817\n", - "Surface training t=23513, loss=0.024638373404741287\n", - "Surface training t=23514, loss=0.024480419233441353\n", - "Surface training t=23515, loss=0.026391902938485146\n", - "Surface training t=23516, loss=0.031713674776256084\n", - "Surface training t=23517, loss=0.02422836795449257\n", - "Surface training t=23518, loss=0.02188394032418728\n", - "Surface training t=23519, loss=0.02046989742666483\n", - "Surface training t=23520, loss=0.021330779418349266\n", - "Surface training t=23521, loss=0.029330575838685036\n", - "Surface training t=23522, loss=0.026010854169726372\n", - "Surface training t=23523, loss=0.023165633901953697\n", - "Surface training t=23524, loss=0.036219941452145576\n", - "Surface training t=23525, loss=0.02755069825798273\n", - "Surface training t=23526, loss=0.030943142250180244\n", - "Surface training t=23527, loss=0.023655912838876247\n", - "Surface training t=23528, loss=0.02022387459874153\n", - "Surface training t=23529, loss=0.025286145508289337\n", - "Surface training t=23530, loss=0.025502740405499935\n", - "Surface training t=23531, loss=0.022045082412660122\n", - "Surface training t=23532, loss=0.01991075649857521\n", - "Surface training t=23533, loss=0.035861698910593987\n", - "Surface training t=23534, loss=0.03286212496459484\n", - "Surface training t=23535, loss=0.023091751150786877\n", - "Surface training t=23536, loss=0.026143459603190422\n", - "Surface training t=23537, loss=0.025862740352749825\n", - "Surface training t=23538, loss=0.03026433289051056\n", - "Surface training t=23539, loss=0.028567306697368622\n", - "Surface training t=23540, loss=0.029192445799708366\n", - "Surface training t=23541, loss=0.0386394876986742\n", - "Surface training t=23542, loss=0.02901976928114891\n", - "Surface training t=23543, loss=0.02857169881463051\n", - "Surface training t=23544, loss=0.028774472884833813\n", - "Surface training t=23545, loss=0.028958739712834358\n", - "Surface training t=23546, loss=0.04240043833851814\n", - "Surface training t=23547, loss=0.03264937736093998\n", - "Surface training t=23548, loss=0.03287384007126093\n", - "Surface training t=23549, loss=0.03052646853029728\n", - "Surface training t=23550, loss=0.030511487275362015\n", - "Surface training t=23551, loss=0.034251997247338295\n", - "Surface training t=23552, loss=0.03274659439921379\n", - "Surface training t=23553, loss=0.029144037514925003\n", - "Surface training t=23554, loss=0.028291945345699787\n", - "Surface training t=23555, loss=0.03425130061805248\n", - "Surface training t=23556, loss=0.02928513754159212\n", - "Surface training t=23557, loss=0.03507532738149166\n", - "Surface training t=23558, loss=0.03421641979366541\n", - "Surface training t=23559, loss=0.03030022606253624\n", - "Surface training t=23560, loss=0.026405805721879005\n", - "Surface training t=23561, loss=0.02720945980399847\n", - "Surface training t=23562, loss=0.02014261484146118\n", - "Surface training t=23563, loss=0.025232858955860138\n", - "Surface training t=23564, loss=0.023999215103685856\n", - "Surface training t=23565, loss=0.025199413299560547\n", - "Surface training t=23566, loss=0.02556647453457117\n", - "Surface training t=23567, loss=0.02638288028538227\n", - "Surface training t=23568, loss=0.023078657686710358\n", - "Surface training t=23569, loss=0.021669195033609867\n", - "Surface training t=23570, loss=0.020044546574354172\n", - "Surface training t=23571, loss=0.024944485165178776\n", - "Surface training t=23572, loss=0.02300167828798294\n", - "Surface training t=23573, loss=0.024612476117908955\n", - "Surface training t=23574, loss=0.030654583126306534\n", - "Surface training t=23575, loss=0.024717316031455994\n", - "Surface training t=23576, loss=0.02362103946506977\n", - "Surface training t=23577, loss=0.026454851031303406\n", - "Surface training t=23578, loss=0.02345424611121416\n", - "Surface training t=23579, loss=0.032187518663704395\n", - "Surface training t=23580, loss=0.01960866805166006\n", - "Surface training t=23581, loss=0.028465164825320244\n", - "Surface training t=23582, loss=0.02870920766144991\n", - "Surface training t=23583, loss=0.023702009581029415\n", - "Surface training t=23584, loss=0.022108503617346287\n", - "Surface training t=23585, loss=0.027671503834426403\n", - "Surface training t=23586, loss=0.025305273942649364\n", - "Surface training t=23587, loss=0.016941430047154427\n", - "Surface training t=23588, loss=0.020243016071617603\n", - "Surface training t=23589, loss=0.02162999100983143\n", - "Surface training t=23590, loss=0.022707410156726837\n", - "Surface training t=23591, loss=0.017398326192051172\n", - "Surface training t=23592, loss=0.01932370476424694\n", - "Surface training t=23593, loss=0.022217020392417908\n", - "Surface training t=23594, loss=0.03656799532473087\n", - "Surface training t=23595, loss=0.02829224243760109\n", - "Surface training t=23596, loss=0.033405386842787266\n", - "Surface training t=23597, loss=0.0277737807482481\n", - "Surface training t=23598, loss=0.02633110899478197\n", - "Surface training t=23599, loss=0.024657935835421085\n", - "Surface training t=23600, loss=0.02250022254884243\n", - "Surface training t=23601, loss=0.01592212077230215\n", - "Surface training t=23602, loss=0.015824919566512108\n", - "Surface training t=23603, loss=0.016661236062645912\n", - "Surface training t=23604, loss=0.01698865182697773\n", - "Surface training t=23605, loss=0.018039457499980927\n", - "Surface training t=23606, loss=0.016623561270534992\n", - "Surface training t=23607, loss=0.018207954242825508\n", - "Surface training t=23608, loss=0.024925739504396915\n", - "Surface training t=23609, loss=0.019841407425701618\n", - "Surface training t=23610, loss=0.031661782413721085\n", - "Surface training t=23611, loss=0.03839608281850815\n", - "Surface training t=23612, loss=0.029561727307736874\n", - "Surface training t=23613, loss=0.030210287310183048\n", - "Surface training t=23614, loss=0.026549361646175385\n", - "Surface training t=23615, loss=0.02365761250257492\n", - "Surface training t=23616, loss=0.030646358616650105\n", - "Surface training t=23617, loss=0.031241513788700104\n", - "Surface training t=23618, loss=0.023033468052744865\n", - "Surface training t=23619, loss=0.02782402280718088\n", - "Surface training t=23620, loss=0.029729328118264675\n", - "Surface training t=23621, loss=0.026666775345802307\n", - "Surface training t=23622, loss=0.036035263910889626\n", - "Surface training t=23623, loss=0.036502547562122345\n", - "Surface training t=23624, loss=0.024246089160442352\n", - "Surface training t=23625, loss=0.026104074902832508\n", - "Surface training t=23626, loss=0.029702224768698215\n", - "Surface training t=23627, loss=0.029471098445355892\n", - "Surface training t=23628, loss=0.023226357996463776\n", - "Surface training t=23629, loss=0.028746962547302246\n", - "Surface training t=23630, loss=0.028830124996602535\n", - "Surface training t=23631, loss=0.026238284073770046\n", - "Surface training t=23632, loss=0.024033035151660442\n", - "Surface training t=23633, loss=0.019998296163976192\n", - "Surface training t=23634, loss=0.024234643206000328\n", - "Surface training t=23635, loss=0.026170819997787476\n", - "Surface training t=23636, loss=0.028080391697585583\n", - "Surface training t=23637, loss=0.0318838506937027\n", - "Surface training t=23638, loss=0.026121455244719982\n", - "Surface training t=23639, loss=0.028733573853969574\n", - "Surface training t=23640, loss=0.021265928633511066\n", - "Surface training t=23641, loss=0.02400832623243332\n", - "Surface training t=23642, loss=0.020181632600724697\n", - "Surface training t=23643, loss=0.029096882790327072\n", - "Surface training t=23644, loss=0.026912719942629337\n", - "Surface training t=23645, loss=0.03190780244767666\n", - "Surface training t=23646, loss=0.03169453330338001\n", - "Surface training t=23647, loss=0.03937350772321224\n", - "Surface training t=23648, loss=0.02559081930667162\n", - "Surface training t=23649, loss=0.024684127420186996\n", - "Surface training t=23650, loss=0.03473486751317978\n", - "Surface training t=23651, loss=0.025233098305761814\n", - "Surface training t=23652, loss=0.029799741692841053\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=23653, loss=0.027030743658542633\n", - "Surface training t=23654, loss=0.02778300642967224\n", - "Surface training t=23655, loss=0.029834473505616188\n", - "Surface training t=23656, loss=0.023635723628103733\n", - "Surface training t=23657, loss=0.01868475042283535\n", - "Surface training t=23658, loss=0.023073244839906693\n", - "Surface training t=23659, loss=0.02164463046938181\n", - "Surface training t=23660, loss=0.028778203763067722\n", - "Surface training t=23661, loss=0.01960688177496195\n", - "Surface training t=23662, loss=0.025060582906007767\n", - "Surface training t=23663, loss=0.020480399951338768\n", - "Surface training t=23664, loss=0.020359236747026443\n", - "Surface training t=23665, loss=0.01971256360411644\n", - "Surface training t=23666, loss=0.022409881465137005\n", - "Surface training t=23667, loss=0.01712147891521454\n", - "Surface training t=23668, loss=0.015432432293891907\n", - "Surface training t=23669, loss=0.013606593012809753\n", - "Surface training t=23670, loss=0.016927563585340977\n", - "Surface training t=23671, loss=0.017459805589169264\n", - "Surface training t=23672, loss=0.014459885191172361\n", - "Surface training t=23673, loss=0.018218674696981907\n", - "Surface training t=23674, loss=0.0180191183462739\n", - "Surface training t=23675, loss=0.012968584895133972\n", - "Surface training t=23676, loss=0.016039030626416206\n", - "Surface training t=23677, loss=0.019929238595068455\n", - "Surface training t=23678, loss=0.021597066894173622\n", - "Surface training t=23679, loss=0.019465740770101547\n", - "Surface training t=23680, loss=0.019279024563729763\n", - "Surface training t=23681, loss=0.024209288880228996\n", - "Surface training t=23682, loss=0.029330501332879066\n", - "Surface training t=23683, loss=0.02373940870165825\n", - "Surface training t=23684, loss=0.024647468701004982\n", - "Surface training t=23685, loss=0.018323122523725033\n", - "Surface training t=23686, loss=0.018991151824593544\n", - "Surface training t=23687, loss=0.0229779789224267\n", - "Surface training t=23688, loss=0.022253609262406826\n", - "Surface training t=23689, loss=0.023792964406311512\n", - "Surface training t=23690, loss=0.018229668028652668\n", - "Surface training t=23691, loss=0.01992736803367734\n", - "Surface training t=23692, loss=0.021170747466385365\n", - "Surface training t=23693, loss=0.01706693647429347\n", - "Surface training t=23694, loss=0.026317493990063667\n", - "Surface training t=23695, loss=0.017739323899149895\n", - "Surface training t=23696, loss=0.01859431527554989\n", - "Surface training t=23697, loss=0.01419068081304431\n", - "Surface training t=23698, loss=0.019044047221541405\n", - "Surface training t=23699, loss=0.01482229633256793\n", - "Surface training t=23700, loss=0.018794759176671505\n", - "Surface training t=23701, loss=0.024928899481892586\n", - "Surface training t=23702, loss=0.01828186586499214\n", - "Surface training t=23703, loss=0.02266431227326393\n", - "Surface training t=23704, loss=0.029608347453176975\n", - "Surface training t=23705, loss=0.022366030141711235\n", - "Surface training t=23706, loss=0.02390029840171337\n", - "Surface training t=23707, loss=0.022931539453566074\n", - "Surface training t=23708, loss=0.023076684214174747\n", - "Surface training t=23709, loss=0.023331278935074806\n", - "Surface training t=23710, loss=0.024920394644141197\n", - "Surface training t=23711, loss=0.02460301388055086\n", - "Surface training t=23712, loss=0.03145258128643036\n", - "Surface training t=23713, loss=0.025344830006361008\n", - "Surface training t=23714, loss=0.02440347708761692\n", - "Surface training t=23715, loss=0.03150648809969425\n", - "Surface training t=23716, loss=0.022665993310511112\n", - "Surface training t=23717, loss=0.03330790903419256\n", - "Surface training t=23718, loss=0.020898034796118736\n", - "Surface training t=23719, loss=0.022731679491698742\n", - "Surface training t=23720, loss=0.024714468978345394\n", - "Surface training t=23721, loss=0.02583344653248787\n", - "Surface training t=23722, loss=0.016155574470758438\n", - "Surface training t=23723, loss=0.02133171074092388\n", - "Surface training t=23724, loss=0.022568109445273876\n", - "Surface training t=23725, loss=0.019890311174094677\n", - "Surface training t=23726, loss=0.016628206707537174\n", - "Surface training t=23727, loss=0.021947670727968216\n", - "Surface training t=23728, loss=0.025766829028725624\n", - "Surface training t=23729, loss=0.02000632928684354\n", - "Surface training t=23730, loss=0.0209744181483984\n", - "Surface training t=23731, loss=0.01736885029822588\n", - "Surface training t=23732, loss=0.020092211198061705\n", - "Surface training t=23733, loss=0.014094098471105099\n", - "Surface training t=23734, loss=0.016642852686345577\n", - "Surface training t=23735, loss=0.016567429527640343\n", - "Surface training t=23736, loss=0.016323570162057877\n", - "Surface training t=23737, loss=0.02326312381774187\n", - "Surface training t=23738, loss=0.019765262492001057\n", - "Surface training t=23739, loss=0.018437574617564678\n", - "Surface training t=23740, loss=0.014643467031419277\n", - "Surface training t=23741, loss=0.0184996509924531\n", - "Surface training t=23742, loss=0.015448745340108871\n", - "Surface training t=23743, loss=0.023645748384296894\n", - "Surface training t=23744, loss=0.03363283537328243\n", - "Surface training t=23745, loss=0.03417823649942875\n", - "Surface training t=23746, loss=0.03387232590466738\n", - "Surface training t=23747, loss=0.049611328169703484\n", - "Surface training t=23748, loss=0.0434954259544611\n", - "Surface training t=23749, loss=0.036845484748482704\n", - "Surface training t=23750, loss=0.04493635147809982\n", - "Surface training t=23751, loss=0.04587579891085625\n", - "Surface training t=23752, loss=0.03609337005764246\n", - "Surface training t=23753, loss=0.034546250477433205\n", - "Surface training t=23754, loss=0.03108236286789179\n", - "Surface training t=23755, loss=0.0317680174484849\n", - "Surface training t=23756, loss=0.02911088988184929\n", - "Surface training t=23757, loss=0.028685709461569786\n", - "Surface training t=23758, loss=0.02890244498848915\n", - "Surface training t=23759, loss=0.02980031445622444\n", - "Surface training t=23760, loss=0.024243319407105446\n", - "Surface training t=23761, loss=0.028455485589802265\n", - "Surface training t=23762, loss=0.02887000236660242\n", - "Surface training t=23763, loss=0.02205041330307722\n", - "Surface training t=23764, loss=0.02516128681600094\n", - "Surface training t=23765, loss=0.026265683583915234\n", - "Surface training t=23766, loss=0.018920287489891052\n", - "Surface training t=23767, loss=0.020372452680021524\n", - "Surface training t=23768, loss=0.013936765491962433\n", - "Surface training t=23769, loss=0.01794961979612708\n", - "Surface training t=23770, loss=0.017645190469920635\n", - "Surface training t=23771, loss=0.01534958090633154\n", - "Surface training t=23772, loss=0.017493772320449352\n", - "Surface training t=23773, loss=0.026442689821124077\n", - "Surface training t=23774, loss=0.02838108502328396\n", - "Surface training t=23775, loss=0.0242025014013052\n", - "Surface training t=23776, loss=0.025612199679017067\n", - "Surface training t=23777, loss=0.022892635315656662\n", - "Surface training t=23778, loss=0.02406386472284794\n", - "Surface training t=23779, loss=0.01735875802114606\n", - "Surface training t=23780, loss=0.02277999371290207\n", - "Surface training t=23781, loss=0.021972027607262135\n", - "Surface training t=23782, loss=0.024874690920114517\n", - "Surface training t=23783, loss=0.030471653677523136\n", - "Surface training t=23784, loss=0.02644809614866972\n", - "Surface training t=23785, loss=0.036992816254496574\n", - "Surface training t=23786, loss=0.021032363176345825\n", - "Surface training t=23787, loss=0.027486290782690048\n", - "Surface training t=23788, loss=0.02699306048452854\n", - "Surface training t=23789, loss=0.02340146992355585\n", - "Surface training t=23790, loss=0.02405219804495573\n", - "Surface training t=23791, loss=0.026173196732997894\n", - "Surface training t=23792, loss=0.017133643850684166\n", - "Surface training t=23793, loss=0.01767836231738329\n", - "Surface training t=23794, loss=0.018133617006242275\n", - "Surface training t=23795, loss=0.020568926818668842\n", - "Surface training t=23796, loss=0.017935609444975853\n", - "Surface training t=23797, loss=0.021092993207275867\n", - "Surface training t=23798, loss=0.019641499035060406\n", - "Surface training t=23799, loss=0.017796404659748077\n", - "Surface training t=23800, loss=0.015584879089146852\n", - "Surface training t=23801, loss=0.016194675117731094\n", - "Surface training t=23802, loss=0.01710453350096941\n", - "Surface training t=23803, loss=0.014929444063454866\n", - "Surface training t=23804, loss=0.021348051726818085\n", - "Surface training t=23805, loss=0.020085246302187443\n", - "Surface training t=23806, loss=0.01882150024175644\n", - "Surface training t=23807, loss=0.022980067878961563\n", - "Surface training t=23808, loss=0.027568158693611622\n", - "Surface training t=23809, loss=0.023269275203347206\n", - "Surface training t=23810, loss=0.02314777672290802\n", - "Surface training t=23811, loss=0.021867183968424797\n", - "Surface training t=23812, loss=0.021370683796703815\n", - "Surface training t=23813, loss=0.020562785677611828\n", - "Surface training t=23814, loss=0.019930595066398382\n", - "Surface training t=23815, loss=0.0252804858610034\n", - "Surface training t=23816, loss=0.02291283942759037\n", - "Surface training t=23817, loss=0.017306958325207233\n", - "Surface training t=23818, loss=0.020965203642845154\n", - "Surface training t=23819, loss=0.01522746216505766\n", - "Surface training t=23820, loss=0.02038631495088339\n", - "Surface training t=23821, loss=0.02060072124004364\n", - "Surface training t=23822, loss=0.01920344028621912\n", - "Surface training t=23823, loss=0.018323122523725033\n", - "Surface training t=23824, loss=0.020426036790013313\n", - "Surface training t=23825, loss=0.021532798185944557\n", - "Surface training t=23826, loss=0.01764401886612177\n", - "Surface training t=23827, loss=0.020774158649146557\n", - "Surface training t=23828, loss=0.02358590066432953\n", - "Surface training t=23829, loss=0.030697423964738846\n", - "Surface training t=23830, loss=0.028116931207478046\n", - "Surface training t=23831, loss=0.026917876675724983\n", - "Surface training t=23832, loss=0.023135224357247353\n", - "Surface training t=23833, loss=0.023997925221920013\n", - "Surface training t=23834, loss=0.01655254326760769\n", - "Surface training t=23835, loss=0.017064256593585014\n", - "Surface training t=23836, loss=0.018607332836836576\n", - "Surface training t=23837, loss=0.016915615182369947\n", - "Surface training t=23838, loss=0.022829881869256496\n", - "Surface training t=23839, loss=0.01710368972271681\n", - "Surface training t=23840, loss=0.017548952251672745\n", - "Surface training t=23841, loss=0.020778296515345573\n", - "Surface training t=23842, loss=0.02972070872783661\n", - "Surface training t=23843, loss=0.025713663548231125\n", - "Surface training t=23844, loss=0.02046295627951622\n", - "Surface training t=23845, loss=0.029916911385953426\n", - "Surface training t=23846, loss=0.023654223419725895\n", - "Surface training t=23847, loss=0.02538934350013733\n", - "Surface training t=23848, loss=0.02252848632633686\n", - "Surface training t=23849, loss=0.029124373570084572\n", - "Surface training t=23850, loss=0.029676628299057484\n", - "Surface training t=23851, loss=0.024154641665518284\n", - "Surface training t=23852, loss=0.01732125971466303\n", - "Surface training t=23853, loss=0.02504779677838087\n", - "Surface training t=23854, loss=0.02076296415179968\n", - "Surface training t=23855, loss=0.022672809660434723\n", - "Surface training t=23856, loss=0.04016690142452717\n", - "Surface training t=23857, loss=0.02682401891797781\n", - "Surface training t=23858, loss=0.04072926007211208\n", - "Surface training t=23859, loss=0.03914570715278387\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=23860, loss=0.03508002124726772\n", - "Surface training t=23861, loss=0.039055110886693\n", - "Surface training t=23862, loss=0.030381455086171627\n", - "Surface training t=23863, loss=0.029810900799930096\n", - "Surface training t=23864, loss=0.038490286096930504\n", - "Surface training t=23865, loss=0.0250209653750062\n", - "Surface training t=23866, loss=0.048862285912036896\n", - "Surface training t=23867, loss=0.03527831006795168\n", - "Surface training t=23868, loss=0.05283898487687111\n", - "Surface training t=23869, loss=0.03712929226458073\n", - "Surface training t=23870, loss=0.03564193658530712\n", - "Surface training t=23871, loss=0.028633346781134605\n", - "Surface training t=23872, loss=0.045320065692067146\n", - "Surface training t=23873, loss=0.03699982538819313\n", - "Surface training t=23874, loss=0.04028995335102081\n", - "Surface training t=23875, loss=0.029908147640526295\n", - "Surface training t=23876, loss=0.026734775863587856\n", - "Surface training t=23877, loss=0.029188125394284725\n", - "Surface training t=23878, loss=0.024613388814032078\n", - "Surface training t=23879, loss=0.018701076041907072\n", - "Surface training t=23880, loss=0.02394176460802555\n", - "Surface training t=23881, loss=0.024522271938621998\n", - "Surface training t=23882, loss=0.01873468328267336\n", - "Surface training t=23883, loss=0.017898951657116413\n", - "Surface training t=23884, loss=0.022626031190156937\n", - "Surface training t=23885, loss=0.018449287861585617\n", - "Surface training t=23886, loss=0.02046248782426119\n", - "Surface training t=23887, loss=0.019035776611417532\n", - "Surface training t=23888, loss=0.026994734071195126\n", - "Surface training t=23889, loss=0.024386429227888584\n", - "Surface training t=23890, loss=0.021570656448602676\n", - "Surface training t=23891, loss=0.023164953105151653\n", - "Surface training t=23892, loss=0.02046556118875742\n", - "Surface training t=23893, loss=0.02124151773750782\n", - "Surface training t=23894, loss=0.018651064485311508\n", - "Surface training t=23895, loss=0.01994914561510086\n", - "Surface training t=23896, loss=0.027878825552761555\n", - "Surface training t=23897, loss=0.026888566091656685\n", - "Surface training t=23898, loss=0.020930327475070953\n", - "Surface training t=23899, loss=0.016368322540074587\n", - "Surface training t=23900, loss=0.019532451406121254\n", - "Surface training t=23901, loss=0.016032644547522068\n", - "Surface training t=23902, loss=0.01812341809272766\n", - "Surface training t=23903, loss=0.022804051637649536\n", - "Surface training t=23904, loss=0.02117680385708809\n", - "Surface training t=23905, loss=0.03944307938218117\n", - "Surface training t=23906, loss=0.03193271905183792\n", - "Surface training t=23907, loss=0.02723050769418478\n", - "Surface training t=23908, loss=0.03209011070430279\n", - "Surface training t=23909, loss=0.04060693643987179\n", - "Surface training t=23910, loss=0.035654387436807156\n", - "Surface training t=23911, loss=0.028642368502914906\n", - "Surface training t=23912, loss=0.029734225943684578\n", - "Surface training t=23913, loss=0.02643914893269539\n", - "Surface training t=23914, loss=0.029090045019984245\n", - "Surface training t=23915, loss=0.02491890639066696\n", - "Surface training t=23916, loss=0.024271915666759014\n", - "Surface training t=23917, loss=0.025133785791695118\n", - "Surface training t=23918, loss=0.02233101148158312\n", - "Surface training t=23919, loss=0.029675332829356194\n", - "Surface training t=23920, loss=0.027425430715084076\n", - "Surface training t=23921, loss=0.02879667654633522\n", - "Surface training t=23922, loss=0.028886955231428146\n", - "Surface training t=23923, loss=0.021088894456624985\n", - "Surface training t=23924, loss=0.022936235181987286\n", - "Surface training t=23925, loss=0.018392333760857582\n", - "Surface training t=23926, loss=0.023906138725578785\n", - "Surface training t=23927, loss=0.020822491496801376\n", - "Surface training t=23928, loss=0.02257853839546442\n", - "Surface training t=23929, loss=0.03183992579579353\n", - "Surface training t=23930, loss=0.02177009917795658\n", - "Surface training t=23931, loss=0.016892410349100828\n", - "Surface training t=23932, loss=0.018423867411911488\n", - "Surface training t=23933, loss=0.0326184406876564\n", - "Surface training t=23934, loss=0.02432321012020111\n", - "Surface training t=23935, loss=0.021322202868759632\n", - "Surface training t=23936, loss=0.020443957298994064\n", - "Surface training t=23937, loss=0.020040078088641167\n", - "Surface training t=23938, loss=0.02645850833505392\n", - "Surface training t=23939, loss=0.03246927633881569\n", - "Surface training t=23940, loss=0.03449936676770449\n", - "Surface training t=23941, loss=0.04791809245944023\n", - "Surface training t=23942, loss=0.03273216634988785\n", - "Surface training t=23943, loss=0.034472880885005\n", - "Surface training t=23944, loss=0.03727286122739315\n", - "Surface training t=23945, loss=0.04162879101932049\n", - "Surface training t=23946, loss=0.03184441290795803\n", - "Surface training t=23947, loss=0.03294198773801327\n", - "Surface training t=23948, loss=0.051393331959843636\n", - "Surface training t=23949, loss=0.04097073432058096\n", - "Surface training t=23950, loss=0.03269065171480179\n", - "Surface training t=23951, loss=0.046188315376639366\n", - "Surface training t=23952, loss=0.03962214570492506\n", - "Surface training t=23953, loss=0.03661109507083893\n", - "Surface training t=23954, loss=0.04549826681613922\n", - "Surface training t=23955, loss=0.06058629788458347\n", - "Surface training t=23956, loss=0.039170099422335625\n", - "Surface training t=23957, loss=0.04359846189618111\n", - "Surface training t=23958, loss=0.05748901329934597\n", - "Surface training t=23959, loss=0.038688259199261665\n", - "Surface training t=23960, loss=0.03744478337466717\n", - "Surface training t=23961, loss=0.04599969834089279\n", - "Surface training t=23962, loss=0.03365834429860115\n", - "Surface training t=23963, loss=0.025814230553805828\n", - "Surface training t=23964, loss=0.024943478405475616\n", - "Surface training t=23965, loss=0.020895483903586864\n", - "Surface training t=23966, loss=0.020152870565652847\n", - "Surface training t=23967, loss=0.018684104084968567\n", - "Surface training t=23968, loss=0.01777506759390235\n", - "Surface training t=23969, loss=0.023230016231536865\n", - "Surface training t=23970, loss=0.03149883821606636\n", - "Surface training t=23971, loss=0.04031679406762123\n", - "Surface training t=23972, loss=0.031857569701969624\n", - "Surface training t=23973, loss=0.03098261170089245\n", - "Surface training t=23974, loss=0.03364037722349167\n", - "Surface training t=23975, loss=0.028006598353385925\n", - "Surface training t=23976, loss=0.030253874137997627\n", - "Surface training t=23977, loss=0.035097829066216946\n", - "Surface training t=23978, loss=0.028697297908365726\n", - "Surface training t=23979, loss=0.027351459488272667\n", - "Surface training t=23980, loss=0.02425482776015997\n", - "Surface training t=23981, loss=0.027714116498827934\n", - "Surface training t=23982, loss=0.023387745954096317\n", - "Surface training t=23983, loss=0.029428365640342236\n", - "Surface training t=23984, loss=0.018069341778755188\n", - "Surface training t=23985, loss=0.021071651950478554\n", - "Surface training t=23986, loss=0.01725451834499836\n", - "Surface training t=23987, loss=0.021527234464883804\n", - "Surface training t=23988, loss=0.019044716842472553\n", - "Surface training t=23989, loss=0.022311494685709476\n", - "Surface training t=23990, loss=0.022641419433057308\n", - "Surface training t=23991, loss=0.021193576976656914\n", - "Surface training t=23992, loss=0.022256365045905113\n", - "Surface training t=23993, loss=0.027944156900048256\n", - "Surface training t=23994, loss=0.016859923489391804\n", - "Surface training t=23995, loss=0.019723997451364994\n", - "Surface training t=23996, loss=0.016468544490635395\n", - "Surface training t=23997, loss=0.01881803385913372\n", - "Surface training t=23998, loss=0.021525840274989605\n", - "Surface training t=23999, loss=0.021507572382688522\n", - "Surface training t=24000, loss=0.026565564796328545\n", - "Surface training t=24001, loss=0.023118837736546993\n", - "Surface training t=24002, loss=0.023477360606193542\n", - "Surface training t=24003, loss=0.02553329337388277\n", - "Surface training t=24004, loss=0.026097041554749012\n", - "Surface training t=24005, loss=0.027732789516448975\n", - "Surface training t=24006, loss=0.024717335123568773\n", - "Surface training t=24007, loss=0.03981667384505272\n", - "Surface training t=24008, loss=0.0337945893406868\n", - "Surface training t=24009, loss=0.03828629292547703\n", - "Surface training t=24010, loss=0.03087077010422945\n", - "Surface training t=24011, loss=0.029822196811437607\n", - "Surface training t=24012, loss=0.03417731635272503\n", - "Surface training t=24013, loss=0.028354719281196594\n", - "Surface training t=24014, loss=0.031443201936781406\n", - "Surface training t=24015, loss=0.035472478717565536\n", - "Surface training t=24016, loss=0.02335258387029171\n", - "Surface training t=24017, loss=0.03380269464105368\n", - "Surface training t=24018, loss=0.02926311083137989\n", - "Surface training t=24019, loss=0.02315056324005127\n", - "Surface training t=24020, loss=0.028401542454957962\n", - "Surface training t=24021, loss=0.02883324772119522\n", - "Surface training t=24022, loss=0.030104284174740314\n", - "Surface training t=24023, loss=0.03173305373638868\n", - "Surface training t=24024, loss=0.02343927137553692\n", - "Surface training t=24025, loss=0.026000590063631535\n", - "Surface training t=24026, loss=0.027393667958676815\n", - "Surface training t=24027, loss=0.020672827027738094\n", - "Surface training t=24028, loss=0.022399033419787884\n", - "Surface training t=24029, loss=0.02726426161825657\n", - "Surface training t=24030, loss=0.026008933782577515\n", - "Surface training t=24031, loss=0.025019021704792976\n", - "Surface training t=24032, loss=0.028699728660285473\n", - "Surface training t=24033, loss=0.02963721938431263\n", - "Surface training t=24034, loss=0.01938446005806327\n", - "Surface training t=24035, loss=0.021966406144201756\n", - "Surface training t=24036, loss=0.023115482181310654\n", - "Surface training t=24037, loss=0.026107722893357277\n", - "Surface training t=24038, loss=0.02299553155899048\n", - "Surface training t=24039, loss=0.028217139653861523\n", - "Surface training t=24040, loss=0.024225122295320034\n", - "Surface training t=24041, loss=0.017504564486443996\n", - "Surface training t=24042, loss=0.019272311590611935\n", - "Surface training t=24043, loss=0.01751226931810379\n", - "Surface training t=24044, loss=0.017338212113827467\n", - "Surface training t=24045, loss=0.015871389769017696\n", - "Surface training t=24046, loss=0.02227693609893322\n", - "Surface training t=24047, loss=0.023302584886550903\n", - "Surface training t=24048, loss=0.02366452105343342\n", - "Surface training t=24049, loss=0.027030768804252148\n", - "Surface training t=24050, loss=0.029205829836428165\n", - "Surface training t=24051, loss=0.026782541535794735\n", - "Surface training t=24052, loss=0.02591296099126339\n", - "Surface training t=24053, loss=0.019730377476662397\n", - "Surface training t=24054, loss=0.017152302898466587\n", - "Surface training t=24055, loss=0.017629866488277912\n", - "Surface training t=24056, loss=0.023387562483549118\n", - "Surface training t=24057, loss=0.015082762110978365\n", - "Surface training t=24058, loss=0.02297549694776535\n", - "Surface training t=24059, loss=0.02143122861161828\n", - "Surface training t=24060, loss=0.027359742671251297\n", - "Surface training t=24061, loss=0.022920074872672558\n", - "Surface training t=24062, loss=0.01935040857642889\n", - "Surface training t=24063, loss=0.01914039347320795\n", - "Surface training t=24064, loss=0.017954373732209206\n", - "Surface training t=24065, loss=0.019082603976130486\n", - "Surface training t=24066, loss=0.024052314460277557\n", - "Surface training t=24067, loss=0.019923877902328968\n", - "Surface training t=24068, loss=0.019764218013733625\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=24069, loss=0.018456708639860153\n", - "Surface training t=24070, loss=0.018967899959534407\n", - "Surface training t=24071, loss=0.018609724938869476\n", - "Surface training t=24072, loss=0.015668293461203575\n", - "Surface training t=24073, loss=0.021623844746500254\n", - "Surface training t=24074, loss=0.0290896138176322\n", - "Surface training t=24075, loss=0.028510880656540394\n", - "Surface training t=24076, loss=0.028877031058073044\n", - "Surface training t=24077, loss=0.029490215703845024\n", - "Surface training t=24078, loss=0.023739867843687534\n", - "Surface training t=24079, loss=0.019847549498081207\n", - "Surface training t=24080, loss=0.014894234947860241\n", - "Surface training t=24081, loss=0.02299910970032215\n", - "Surface training t=24082, loss=0.02204595785588026\n", - "Surface training t=24083, loss=0.0203066635876894\n", - "Surface training t=24084, loss=0.01955382339656353\n", - "Surface training t=24085, loss=0.0179265015758574\n", - "Surface training t=24086, loss=0.017331514041870832\n", - "Surface training t=24087, loss=0.01779606007039547\n", - "Surface training t=24088, loss=0.01807573065161705\n", - "Surface training t=24089, loss=0.0189517205581069\n", - "Surface training t=24090, loss=0.020219755358994007\n", - "Surface training t=24091, loss=0.023287134245038033\n", - "Surface training t=24092, loss=0.014990216121077538\n", - "Surface training t=24093, loss=0.02337639592587948\n", - "Surface training t=24094, loss=0.029773044399917126\n", - "Surface training t=24095, loss=0.035607077181339264\n", - "Surface training t=24096, loss=0.026850898750126362\n", - "Surface training t=24097, loss=0.021894952282309532\n", - "Surface training t=24098, loss=0.03937356173992157\n", - "Surface training t=24099, loss=0.030809713527560234\n", - "Surface training t=24100, loss=0.030247933231294155\n", - "Surface training t=24101, loss=0.03652751259505749\n", - "Surface training t=24102, loss=0.028516092337667942\n", - "Surface training t=24103, loss=0.046124087646603584\n", - "Surface training t=24104, loss=0.027308322489261627\n", - "Surface training t=24105, loss=0.03426409512758255\n", - "Surface training t=24106, loss=0.024447236210107803\n", - "Surface training t=24107, loss=0.027939253486692905\n", - "Surface training t=24108, loss=0.034637706354260445\n", - "Surface training t=24109, loss=0.03470133524388075\n", - "Surface training t=24110, loss=0.028333834372460842\n", - "Surface training t=24111, loss=0.02656617294996977\n", - "Surface training t=24112, loss=0.02801360934972763\n", - "Surface training t=24113, loss=0.03094431571662426\n", - "Surface training t=24114, loss=0.019291198812425137\n", - "Surface training t=24115, loss=0.02373772580176592\n", - "Surface training t=24116, loss=0.020768272690474987\n", - "Surface training t=24117, loss=0.01885025855153799\n", - "Surface training t=24118, loss=0.019629978574812412\n", - "Surface training t=24119, loss=0.018339691683650017\n", - "Surface training t=24120, loss=0.018813075497746468\n", - "Surface training t=24121, loss=0.017028331756591797\n", - "Surface training t=24122, loss=0.020617106929421425\n", - "Surface training t=24123, loss=0.020579738542437553\n", - "Surface training t=24124, loss=0.020365658216178417\n", - "Surface training t=24125, loss=0.024331754073500633\n", - "Surface training t=24126, loss=0.028529802337288857\n", - "Surface training t=24127, loss=0.03428547829389572\n", - "Surface training t=24128, loss=0.024618654511868954\n", - "Surface training t=24129, loss=0.0319839958101511\n", - "Surface training t=24130, loss=0.032163359224796295\n", - "Surface training t=24131, loss=0.023841903544962406\n", - "Surface training t=24132, loss=0.03055589087307453\n", - "Surface training t=24133, loss=0.026566488668322563\n", - "Surface training t=24134, loss=0.025159728713333607\n", - "Surface training t=24135, loss=0.020948982797563076\n", - "Surface training t=24136, loss=0.02270231582224369\n", - "Surface training t=24137, loss=0.02079377043992281\n", - "Surface training t=24138, loss=0.02431449480354786\n", - "Surface training t=24139, loss=0.034290675073862076\n", - "Surface training t=24140, loss=0.02920118346810341\n", - "Surface training t=24141, loss=0.02410818822681904\n", - "Surface training t=24142, loss=0.024620712734758854\n", - "Surface training t=24143, loss=0.027604623697698116\n", - "Surface training t=24144, loss=0.02453127782791853\n", - "Surface training t=24145, loss=0.017862774431705475\n", - "Surface training t=24146, loss=0.018359174020588398\n", - "Surface training t=24147, loss=0.01631457917392254\n", - "Surface training t=24148, loss=0.012731597293168306\n", - "Surface training t=24149, loss=0.014812758192420006\n", - "Surface training t=24150, loss=0.022778392769396305\n", - "Surface training t=24151, loss=0.020940487273037434\n", - "Surface training t=24152, loss=0.024025222286581993\n", - "Surface training t=24153, loss=0.025797984562814236\n", - "Surface training t=24154, loss=0.03159750532358885\n", - "Surface training t=24155, loss=0.027269579470157623\n", - "Surface training t=24156, loss=0.029502899385988712\n", - "Surface training t=24157, loss=0.025043443776667118\n", - "Surface training t=24158, loss=0.027932659722864628\n", - "Surface training t=24159, loss=0.03474484756588936\n", - "Surface training t=24160, loss=0.03395727835595608\n", - "Surface training t=24161, loss=0.02890571393072605\n", - "Surface training t=24162, loss=0.02343921083956957\n", - "Surface training t=24163, loss=0.023082640953361988\n", - "Surface training t=24164, loss=0.02852203417569399\n", - "Surface training t=24165, loss=0.02220343053340912\n", - "Surface training t=24166, loss=0.024886040948331356\n", - "Surface training t=24167, loss=0.02253977209329605\n", - "Surface training t=24168, loss=0.02129892259836197\n", - "Surface training t=24169, loss=0.022231843322515488\n", - "Surface training t=24170, loss=0.017683498561382294\n", - "Surface training t=24171, loss=0.02196275908499956\n", - "Surface training t=24172, loss=0.018554038368165493\n", - "Surface training t=24173, loss=0.019768825732171535\n", - "Surface training t=24174, loss=0.02169213443994522\n", - "Surface training t=24175, loss=0.021025839261710644\n", - "Surface training t=24176, loss=0.02272786572575569\n", - "Surface training t=24177, loss=0.025752953253686428\n", - "Surface training t=24178, loss=0.02350522018969059\n", - "Surface training t=24179, loss=0.023659895174205303\n", - "Surface training t=24180, loss=0.02780338004231453\n", - "Surface training t=24181, loss=0.025324327871203423\n", - "Surface training t=24182, loss=0.02250317856669426\n", - "Surface training t=24183, loss=0.019903342705219984\n", - "Surface training t=24184, loss=0.022086692973971367\n", - "Surface training t=24185, loss=0.02589159458875656\n", - "Surface training t=24186, loss=0.018652746453881264\n", - "Surface training t=24187, loss=0.030231988057494164\n", - "Surface training t=24188, loss=0.03198078740388155\n", - "Surface training t=24189, loss=0.022716492414474487\n", - "Surface training t=24190, loss=0.02178516797721386\n", - "Surface training t=24191, loss=0.023940935730934143\n", - "Surface training t=24192, loss=0.01636439375579357\n", - "Surface training t=24193, loss=0.01734157744795084\n", - "Surface training t=24194, loss=0.020954030565917492\n", - "Surface training t=24195, loss=0.0257868729531765\n", - "Surface training t=24196, loss=0.017492933198809624\n", - "Surface training t=24197, loss=0.01920816581696272\n", - "Surface training t=24198, loss=0.01789349876344204\n", - "Surface training t=24199, loss=0.019606863148510456\n", - "Surface training t=24200, loss=0.021012666635215282\n", - "Surface training t=24201, loss=0.02481306903064251\n", - "Surface training t=24202, loss=0.038051022216677666\n", - "Surface training t=24203, loss=0.032996824476867914\n", - "Surface training t=24204, loss=0.029451431706547737\n", - "Surface training t=24205, loss=0.03314622864127159\n", - "Surface training t=24206, loss=0.0269938288256526\n", - "Surface training t=24207, loss=0.045037051662802696\n", - "Surface training t=24208, loss=0.036358634009957314\n", - "Surface training t=24209, loss=0.05334201268851757\n", - "Surface training t=24210, loss=0.04380165599286556\n", - "Surface training t=24211, loss=0.038880977779626846\n", - "Surface training t=24212, loss=0.027595842722803354\n", - "Surface training t=24213, loss=0.03444468695670366\n", - "Surface training t=24214, loss=0.024021917954087257\n", - "Surface training t=24215, loss=0.021509294398128986\n", - "Surface training t=24216, loss=0.022646593861281872\n", - "Surface training t=24217, loss=0.018081129528582096\n", - "Surface training t=24218, loss=0.027647736482322216\n", - "Surface training t=24219, loss=0.024912936612963676\n", - "Surface training t=24220, loss=0.02866862528026104\n", - "Surface training t=24221, loss=0.02155833411961794\n", - "Surface training t=24222, loss=0.02103340905159712\n", - "Surface training t=24223, loss=0.02120968885719776\n", - "Surface training t=24224, loss=0.02033099578693509\n", - "Surface training t=24225, loss=0.019757643342018127\n", - "Surface training t=24226, loss=0.018431679345667362\n", - "Surface training t=24227, loss=0.014241628348827362\n", - "Surface training t=24228, loss=0.022052823565900326\n", - "Surface training t=24229, loss=0.020172061398625374\n", - "Surface training t=24230, loss=0.017476567067205906\n", - "Surface training t=24231, loss=0.01688275532796979\n", - "Surface training t=24232, loss=0.018980202730745077\n", - "Surface training t=24233, loss=0.035704899579286575\n", - "Surface training t=24234, loss=0.02836638782173395\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=24235, loss=0.029431117698550224\n", - "Surface training t=24236, loss=0.030764632858335972\n", - "Surface training t=24237, loss=0.029017888940870762\n", - "Surface training t=24238, loss=0.02230396680533886\n", - "Surface training t=24239, loss=0.024568399414420128\n", - "Surface training t=24240, loss=0.035365914925932884\n", - "Surface training t=24241, loss=0.025149904191493988\n", - "Surface training t=24242, loss=0.028360729105770588\n", - "Surface training t=24243, loss=0.024866316467523575\n", - "Surface training t=24244, loss=0.026829511858522892\n", - "Surface training t=24245, loss=0.031952972523868084\n", - "Surface training t=24246, loss=0.030071319080889225\n", - "Surface training t=24247, loss=0.03308090474456549\n", - "Surface training t=24248, loss=0.025366585701704025\n", - "Surface training t=24249, loss=0.026512790471315384\n", - "Surface training t=24250, loss=0.026920937933027744\n", - "Surface training t=24251, loss=0.024449496529996395\n", - "Surface training t=24252, loss=0.02720729075372219\n", - "Surface training t=24253, loss=0.027464100159704685\n", - "Surface training t=24254, loss=0.02243716735392809\n", - "Surface training t=24255, loss=0.023340496234595776\n", - "Surface training t=24256, loss=0.022082967683672905\n", - "Surface training t=24257, loss=0.023151453584432602\n", - "Surface training t=24258, loss=0.016120227053761482\n", - "Surface training t=24259, loss=0.02153079491108656\n", - "Surface training t=24260, loss=0.01582588069140911\n", - "Surface training t=24261, loss=0.020507248118519783\n", - "Surface training t=24262, loss=0.026454927399754524\n", - "Surface training t=24263, loss=0.027522971853613853\n", - "Surface training t=24264, loss=0.023758266121149063\n", - "Surface training t=24265, loss=0.02546065580099821\n", - "Surface training t=24266, loss=0.024591093882918358\n", - "Surface training t=24267, loss=0.0393141433596611\n", - "Surface training t=24268, loss=0.03132497798651457\n", - "Surface training t=24269, loss=0.02420995570719242\n", - "Surface training t=24270, loss=0.022166544571518898\n", - "Surface training t=24271, loss=0.021705674938857555\n", - "Surface training t=24272, loss=0.01942136138677597\n", - "Surface training t=24273, loss=0.01503969356417656\n", - "Surface training t=24274, loss=0.016587150283157825\n", - "Surface training t=24275, loss=0.02347560878843069\n", - "Surface training t=24276, loss=0.021005267277359962\n", - "Surface training t=24277, loss=0.03021604474633932\n", - "Surface training t=24278, loss=0.03857902251183987\n", - "Surface training t=24279, loss=0.028093503788113594\n", - "Surface training t=24280, loss=0.018942469730973244\n", - "Surface training t=24281, loss=0.0203732717782259\n", - "Surface training t=24282, loss=0.021005885675549507\n", - "Surface training t=24283, loss=0.022018952295184135\n", - "Surface training t=24284, loss=0.017208196222782135\n", - "Surface training t=24285, loss=0.01839314214885235\n", - "Surface training t=24286, loss=0.022016601637005806\n", - "Surface training t=24287, loss=0.022630237974226475\n", - "Surface training t=24288, loss=0.024393831379711628\n", - "Surface training t=24289, loss=0.028208745643496513\n", - "Surface training t=24290, loss=0.025738894939422607\n", - "Surface training t=24291, loss=0.02504750806838274\n", - "Surface training t=24292, loss=0.018722230568528175\n", - "Surface training t=24293, loss=0.01686588115990162\n", - "Surface training t=24294, loss=0.018054364249110222\n", - "Surface training t=24295, loss=0.02008084300905466\n", - "Surface training t=24296, loss=0.023287992924451828\n", - "Surface training t=24297, loss=0.02301853708922863\n", - "Surface training t=24298, loss=0.01697339490056038\n", - "Surface training t=24299, loss=0.02095738146454096\n", - "Surface training t=24300, loss=0.02671127673238516\n", - "Surface training t=24301, loss=0.016241856385022402\n", - "Surface training t=24302, loss=0.02019959595054388\n", - "Surface training t=24303, loss=0.018778659403324127\n", - "Surface training t=24304, loss=0.021920213475823402\n", - "Surface training t=24305, loss=0.016880127601325512\n", - "Surface training t=24306, loss=0.01638636225834489\n", - "Surface training t=24307, loss=0.019662643782794476\n", - "Surface training t=24308, loss=0.02224690280854702\n", - "Surface training t=24309, loss=0.021936757490038872\n", - "Surface training t=24310, loss=0.021317587234079838\n", - "Surface training t=24311, loss=0.016841608099639416\n", - "Surface training t=24312, loss=0.019330201670527458\n", - "Surface training t=24313, loss=0.019830819219350815\n", - "Surface training t=24314, loss=0.015623167622834444\n", - "Surface training t=24315, loss=0.022045116871595383\n", - "Surface training t=24316, loss=0.017282942309975624\n", - "Surface training t=24317, loss=0.015789379831403494\n", - "Surface training t=24318, loss=0.018994132988154888\n", - "Surface training t=24319, loss=0.014497527852654457\n", - "Surface training t=24320, loss=0.016621286049485207\n", - "Surface training t=24321, loss=0.01807365659624338\n", - "Surface training t=24322, loss=0.0255258958786726\n", - "Surface training t=24323, loss=0.025975855998694897\n", - "Surface training t=24324, loss=0.02583344653248787\n", - "Surface training t=24325, loss=0.028685085475444794\n", - "Surface training t=24326, loss=0.03931574150919914\n", - "Surface training t=24327, loss=0.028978198766708374\n", - "Surface training t=24328, loss=0.030003994703292847\n", - "Surface training t=24329, loss=0.03561544418334961\n", - "Surface training t=24330, loss=0.03803864307701588\n", - "Surface training t=24331, loss=0.03421092312783003\n", - "Surface training t=24332, loss=0.022375128231942654\n", - "Surface training t=24333, loss=0.02902670670300722\n", - "Surface training t=24334, loss=0.025707777589559555\n", - "Surface training t=24335, loss=0.02333742380142212\n", - "Surface training t=24336, loss=0.019978549797087908\n", - "Surface training t=24337, loss=0.020821508951485157\n", - "Surface training t=24338, loss=0.015785296447575092\n", - "Surface training t=24339, loss=0.024046171456575394\n", - "Surface training t=24340, loss=0.021181712858378887\n", - "Surface training t=24341, loss=0.02889133896678686\n", - "Surface training t=24342, loss=0.03209903836250305\n", - "Surface training t=24343, loss=0.03781003877520561\n", - "Surface training t=24344, loss=0.0384582094848156\n", - "Surface training t=24345, loss=0.042992278933525085\n", - "Surface training t=24346, loss=0.05713377892971039\n", - "Surface training t=24347, loss=0.03880435973405838\n", - "Surface training t=24348, loss=0.04291091486811638\n", - "Surface training t=24349, loss=0.05045563355088234\n", - "Surface training t=24350, loss=0.03745260275900364\n", - "Surface training t=24351, loss=0.04827527888119221\n", - "Surface training t=24352, loss=0.03445811569690704\n", - "Surface training t=24353, loss=0.045347146689891815\n", - "Surface training t=24354, loss=0.026431042701005936\n", - "Surface training t=24355, loss=0.034139495342969894\n", - "Surface training t=24356, loss=0.03956049121916294\n", - "Surface training t=24357, loss=0.02828859258443117\n", - "Surface training t=24358, loss=0.02974858507514\n", - "Surface training t=24359, loss=0.023430241271853447\n", - "Surface training t=24360, loss=0.024130485020577908\n", - "Surface training t=24361, loss=0.02363589685410261\n", - "Surface training t=24362, loss=0.017484692856669426\n", - "Surface training t=24363, loss=0.02024171594530344\n", - "Surface training t=24364, loss=0.01653517410159111\n", - "Surface training t=24365, loss=0.028260103426873684\n", - "Surface training t=24366, loss=0.020749004557728767\n", - "Surface training t=24367, loss=0.02437371388077736\n", - "Surface training t=24368, loss=0.022784415632486343\n", - "Surface training t=24369, loss=0.02116508549079299\n", - "Surface training t=24370, loss=0.023707237094640732\n", - "Surface training t=24371, loss=0.016096909996122122\n", - "Surface training t=24372, loss=0.019618852995336056\n", - "Surface training t=24373, loss=0.020088788121938705\n", - "Surface training t=24374, loss=0.019880881533026695\n", - "Surface training t=24375, loss=0.019733119755983353\n", - "Surface training t=24376, loss=0.03099615778774023\n", - "Surface training t=24377, loss=0.03414437361061573\n", - "Surface training t=24378, loss=0.026509910821914673\n", - "Surface training t=24379, loss=0.03192860074341297\n", - "Surface training t=24380, loss=0.028530134819447994\n", - "Surface training t=24381, loss=0.03238315600901842\n", - "Surface training t=24382, loss=0.023293571546673775\n", - "Surface training t=24383, loss=0.016434292774647474\n", - "Surface training t=24384, loss=0.02109675481915474\n", - "Surface training t=24385, loss=0.017652915325015783\n", - "Surface training t=24386, loss=0.019688774831593037\n", - "Surface training t=24387, loss=0.016208921559154987\n", - "Surface training t=24388, loss=0.017854743637144566\n", - "Surface training t=24389, loss=0.02190714329481125\n", - "Surface training t=24390, loss=0.02526597213000059\n", - "Surface training t=24391, loss=0.023786470293998718\n", - "Surface training t=24392, loss=0.024337726645171642\n", - "Surface training t=24393, loss=0.030707004480063915\n", - "Surface training t=24394, loss=0.02360427100211382\n", - "Surface training t=24395, loss=0.02446001023054123\n", - "Surface training t=24396, loss=0.027048563584685326\n", - "Surface training t=24397, loss=0.025794735178351402\n", - "Surface training t=24398, loss=0.02235567383468151\n", - "Surface training t=24399, loss=0.024359796196222305\n", - "Surface training t=24400, loss=0.031100787222385406\n", - "Surface training t=24401, loss=0.021783754229545593\n", - "Surface training t=24402, loss=0.02349412627518177\n", - "Surface training t=24403, loss=0.025792664848268032\n", - "Surface training t=24404, loss=0.02973511815071106\n", - "Surface training t=24405, loss=0.022362560033798218\n", - "Surface training t=24406, loss=0.021433047018945217\n", - "Surface training t=24407, loss=0.018104800023138523\n", - "Surface training t=24408, loss=0.019139419309794903\n", - "Surface training t=24409, loss=0.025358572602272034\n", - "Surface training t=24410, loss=0.026935288682579994\n", - "Surface training t=24411, loss=0.02667329553514719\n", - "Surface training t=24412, loss=0.03962009400129318\n", - "Surface training t=24413, loss=0.02548518031835556\n", - "Surface training t=24414, loss=0.02623449359089136\n", - "Surface training t=24415, loss=0.02483230922371149\n", - "Surface training t=24416, loss=0.02472069300711155\n", - "Surface training t=24417, loss=0.023617872036993504\n", - "Surface training t=24418, loss=0.02365717664361\n", - "Surface training t=24419, loss=0.02090450469404459\n", - "Surface training t=24420, loss=0.025258376263082027\n", - "Surface training t=24421, loss=0.022562346421182156\n", - "Surface training t=24422, loss=0.023372291587293148\n", - "Surface training t=24423, loss=0.02229615766555071\n", - "Surface training t=24424, loss=0.02416175790131092\n", - "Surface training t=24425, loss=0.02336957398802042\n", - "Surface training t=24426, loss=0.025042681489139795\n", - "Surface training t=24427, loss=0.025639685802161694\n", - "Surface training t=24428, loss=0.02423565648496151\n", - "Surface training t=24429, loss=0.020403550937771797\n", - "Surface training t=24430, loss=0.019735798239707947\n", - "Surface training t=24431, loss=0.023433327674865723\n", - "Surface training t=24432, loss=0.030197424814105034\n", - "Surface training t=24433, loss=0.02713035698980093\n", - "Surface training t=24434, loss=0.0207053255289793\n", - "Surface training t=24435, loss=0.025830545462667942\n", - "Surface training t=24436, loss=0.025975153781473637\n", - "Surface training t=24437, loss=0.023979326710104942\n", - "Surface training t=24438, loss=0.030530767515301704\n", - "Surface training t=24439, loss=0.024889066815376282\n", - "Surface training t=24440, loss=0.03668628539890051\n", - "Surface training t=24441, loss=0.028919867239892483\n", - "Surface training t=24442, loss=0.021652339957654476\n", - "Surface training t=24443, loss=0.0245128208771348\n", - "Surface training t=24444, loss=0.02750615868717432\n", - "Surface training t=24445, loss=0.023379080928862095\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=24446, loss=0.019326655194163322\n", - "Surface training t=24447, loss=0.017700647469609976\n", - "Surface training t=24448, loss=0.018600571434944868\n", - "Surface training t=24449, loss=0.02038352284580469\n", - "Surface training t=24450, loss=0.019864547066390514\n", - "Surface training t=24451, loss=0.02413209807127714\n", - "Surface training t=24452, loss=0.023868044838309288\n", - "Surface training t=24453, loss=0.01942525990307331\n", - "Surface training t=24454, loss=0.021224738098680973\n", - "Surface training t=24455, loss=0.013295837212353945\n", - "Surface training t=24456, loss=0.01827707327902317\n", - "Surface training t=24457, loss=0.013202410191297531\n", - "Surface training t=24458, loss=0.015887568704783916\n", - "Surface training t=24459, loss=0.01617238065227866\n", - "Surface training t=24460, loss=0.01513050775974989\n", - "Surface training t=24461, loss=0.013651833403855562\n", - "Surface training t=24462, loss=0.01491389237344265\n", - "Surface training t=24463, loss=0.017613745294511318\n", - "Surface training t=24464, loss=0.014255138579756021\n", - "Surface training t=24465, loss=0.01511358143761754\n", - "Surface training t=24466, loss=0.019216026179492474\n", - "Surface training t=24467, loss=0.01537136035040021\n", - "Surface training t=24468, loss=0.020156382583081722\n", - "Surface training t=24469, loss=0.03361455164849758\n", - "Surface training t=24470, loss=0.03142819833010435\n", - "Surface training t=24471, loss=0.02315574698150158\n", - "Surface training t=24472, loss=0.02501042652875185\n", - "Surface training t=24473, loss=0.01866584736853838\n", - "Surface training t=24474, loss=0.02517116628587246\n", - "Surface training t=24475, loss=0.0186933777295053\n", - "Surface training t=24476, loss=0.02255596872419119\n", - "Surface training t=24477, loss=0.018626117147505283\n", - "Surface training t=24478, loss=0.018475660122931004\n", - "Surface training t=24479, loss=0.020192078314721584\n", - "Surface training t=24480, loss=0.017826254479587078\n", - "Surface training t=24481, loss=0.0210864394903183\n", - "Surface training t=24482, loss=0.020412730984389782\n", - "Surface training t=24483, loss=0.026109442114830017\n", - "Surface training t=24484, loss=0.02209686115384102\n", - "Surface training t=24485, loss=0.0307996841147542\n", - "Surface training t=24486, loss=0.03174459561705589\n", - "Surface training t=24487, loss=0.030961436219513416\n", - "Surface training t=24488, loss=0.022370247170329094\n", - "Surface training t=24489, loss=0.02469549886882305\n", - "Surface training t=24490, loss=0.025900719687342644\n", - "Surface training t=24491, loss=0.025074469856917858\n", - "Surface training t=24492, loss=0.022784770466387272\n", - "Surface training t=24493, loss=0.02289571985602379\n", - "Surface training t=24494, loss=0.020046920515596867\n", - "Surface training t=24495, loss=0.018347113393247128\n", - "Surface training t=24496, loss=0.02300132066011429\n", - "Surface training t=24497, loss=0.01946395169943571\n", - "Surface training t=24498, loss=0.023392713628709316\n", - "Surface training t=24499, loss=0.030327207408845425\n", - "Surface training t=24500, loss=0.025872155092656612\n", - "Surface training t=24501, loss=0.017444989178329706\n", - "Surface training t=24502, loss=0.022582062520086765\n", - "Surface training t=24503, loss=0.02230458240956068\n", - "Surface training t=24504, loss=0.019435014575719833\n", - "Surface training t=24505, loss=0.018970857374370098\n", - "Surface training t=24506, loss=0.02969285100698471\n", - "Surface training t=24507, loss=0.020455091260373592\n", - "Surface training t=24508, loss=0.021730824373662472\n", - "Surface training t=24509, loss=0.02719734888523817\n", - "Surface training t=24510, loss=0.019799048081040382\n", - "Surface training t=24511, loss=0.017330261878669262\n", - "Surface training t=24512, loss=0.020370683632791042\n", - "Surface training t=24513, loss=0.018974384292960167\n", - "Surface training t=24514, loss=0.019129587337374687\n", - "Surface training t=24515, loss=0.017614914570003748\n", - "Surface training t=24516, loss=0.02200247347354889\n", - "Surface training t=24517, loss=0.02030332013964653\n", - "Surface training t=24518, loss=0.019079305231571198\n", - "Surface training t=24519, loss=0.0326828807592392\n", - "Surface training t=24520, loss=0.021945557557046413\n", - "Surface training t=24521, loss=0.019834193401038647\n", - "Surface training t=24522, loss=0.018246997613459826\n", - "Surface training t=24523, loss=0.018849214538931847\n", - "Surface training t=24524, loss=0.017704802565276623\n", - "Surface training t=24525, loss=0.018318203277885914\n", - "Surface training t=24526, loss=0.019454190507531166\n", - "Surface training t=24527, loss=0.019616232253611088\n", - "Surface training t=24528, loss=0.021115890704095364\n", - "Surface training t=24529, loss=0.01745190192013979\n", - "Surface training t=24530, loss=0.018561082892119884\n", - "Surface training t=24531, loss=0.02109838742762804\n", - "Surface training t=24532, loss=0.019475210458040237\n", - "Surface training t=24533, loss=0.019728603772819042\n", - "Surface training t=24534, loss=0.01787187810987234\n", - "Surface training t=24535, loss=0.020052803680300713\n", - "Surface training t=24536, loss=0.019903970882296562\n", - "Surface training t=24537, loss=0.024730393663048744\n", - "Surface training t=24538, loss=0.02011914737522602\n", - "Surface training t=24539, loss=0.022491988725960255\n", - "Surface training t=24540, loss=0.02677441854029894\n", - "Surface training t=24541, loss=0.020905332639813423\n", - "Surface training t=24542, loss=0.014201279263943434\n", - "Surface training t=24543, loss=0.01301009813323617\n", - "Surface training t=24544, loss=0.014649382792413235\n", - "Surface training t=24545, loss=0.015274304430931807\n", - "Surface training t=24546, loss=0.013892509508877993\n", - "Surface training t=24547, loss=0.022486543282866478\n", - "Surface training t=24548, loss=0.026455117389559746\n", - "Surface training t=24549, loss=0.02807141561061144\n", - "Surface training t=24550, loss=0.027765902690589428\n", - "Surface training t=24551, loss=0.030370441265404224\n", - "Surface training t=24552, loss=0.030976194888353348\n", - "Surface training t=24553, loss=0.042077112942934036\n", - "Surface training t=24554, loss=0.035709718242287636\n", - "Surface training t=24555, loss=0.03393140621483326\n", - "Surface training t=24556, loss=0.06197355315089226\n", - "Surface training t=24557, loss=0.0439567044377327\n", - "Surface training t=24558, loss=0.04726744815707207\n", - "Surface training t=24559, loss=0.05401712469756603\n", - "Surface training t=24560, loss=0.03387601301074028\n", - "Surface training t=24561, loss=0.041131677106022835\n", - "Surface training t=24562, loss=0.03417373262345791\n", - "Surface training t=24563, loss=0.03523594792932272\n", - "Surface training t=24564, loss=0.03006610833108425\n", - "Surface training t=24565, loss=0.03473059367388487\n", - "Surface training t=24566, loss=0.025109093636274338\n", - "Surface training t=24567, loss=0.025696356780827045\n", - "Surface training t=24568, loss=0.03317727893590927\n", - "Surface training t=24569, loss=0.02655978314578533\n", - "Surface training t=24570, loss=0.028519372455775738\n", - "Surface training t=24571, loss=0.03478716127574444\n", - "Surface training t=24572, loss=0.027518518269062042\n", - "Surface training t=24573, loss=0.02714358549565077\n", - "Surface training t=24574, loss=0.024185274727642536\n", - "Surface training t=24575, loss=0.025016906671226025\n", - "Surface training t=24576, loss=0.02102421037852764\n", - "Surface training t=24577, loss=0.01955303829163313\n", - "Surface training t=24578, loss=0.01885862834751606\n", - "Surface training t=24579, loss=0.02044948237016797\n", - "Surface training t=24580, loss=0.019342033192515373\n", - "Surface training t=24581, loss=0.019819509238004684\n", - "Surface training t=24582, loss=0.015520778018981218\n", - "Surface training t=24583, loss=0.015056094154715538\n", - "Surface training t=24584, loss=0.013355834409594536\n", - "Surface training t=24585, loss=0.01289444975554943\n", - "Surface training t=24586, loss=0.020040846429765224\n", - "Surface training t=24587, loss=0.02606348693370819\n", - "Surface training t=24588, loss=0.025066626258194447\n", - "Surface training t=24589, loss=0.03902066871523857\n", - "Surface training t=24590, loss=0.028064703568816185\n", - "Surface training t=24591, loss=0.0370326042175293\n", - "Surface training t=24592, loss=0.030936324037611485\n", - "Surface training t=24593, loss=0.029160739853978157\n", - "Surface training t=24594, loss=0.03839077614247799\n", - "Surface training t=24595, loss=0.03215864859521389\n", - "Surface training t=24596, loss=0.03392723482102156\n", - "Surface training t=24597, loss=0.030585854314267635\n", - "Surface training t=24598, loss=0.037171514704823494\n", - "Surface training t=24599, loss=0.039547648280858994\n", - "Surface training t=24600, loss=0.028763524256646633\n", - "Surface training t=24601, loss=0.023891850374639034\n", - "Surface training t=24602, loss=0.034291947260499\n", - "Surface training t=24603, loss=0.027057082392275333\n", - "Surface training t=24604, loss=0.036454951390624046\n", - "Surface training t=24605, loss=0.030760533176362514\n", - "Surface training t=24606, loss=0.05120840482413769\n", - "Surface training t=24607, loss=0.037133761681616306\n", - "Surface training t=24608, loss=0.037997154518961906\n", - "Surface training t=24609, loss=0.027239272370934486\n", - "Surface training t=24610, loss=0.027863947674632072\n", - "Surface training t=24611, loss=0.036384232342243195\n", - "Surface training t=24612, loss=0.031338038854300976\n", - "Surface training t=24613, loss=0.029949277639389038\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=24614, loss=0.027186334133148193\n", - "Surface training t=24615, loss=0.021678593009710312\n", - "Surface training t=24616, loss=0.02327617723494768\n", - "Surface training t=24617, loss=0.02926935814321041\n", - "Surface training t=24618, loss=0.023597086779773235\n", - "Surface training t=24619, loss=0.02266837190836668\n", - "Surface training t=24620, loss=0.046285491436719894\n", - "Surface training t=24621, loss=0.036155592650175095\n", - "Surface training t=24622, loss=0.038519540801644325\n", - "Surface training t=24623, loss=0.05837083421647549\n", - "Surface training t=24624, loss=0.04314818046987057\n", - "Surface training t=24625, loss=0.05011076666414738\n", - "Surface training t=24626, loss=0.03925767168402672\n", - "Surface training t=24627, loss=0.02936523500829935\n", - "Surface training t=24628, loss=0.028231754899024963\n", - "Surface training t=24629, loss=0.03563481383025646\n", - "Surface training t=24630, loss=0.043626539409160614\n", - "Surface training t=24631, loss=0.03549885377287865\n", - "Surface training t=24632, loss=0.03500443138182163\n", - "Surface training t=24633, loss=0.029184071347117424\n", - "Surface training t=24634, loss=0.02638370357453823\n", - "Surface training t=24635, loss=0.02228625863790512\n", - "Surface training t=24636, loss=0.0301862433552742\n", - "Surface training t=24637, loss=0.03308489918708801\n", - "Surface training t=24638, loss=0.031954726204276085\n", - "Surface training t=24639, loss=0.02822640910744667\n", - "Surface training t=24640, loss=0.026276497170329094\n", - "Surface training t=24641, loss=0.036384472623467445\n", - "Surface training t=24642, loss=0.039903296157717705\n", - "Surface training t=24643, loss=0.02772387769073248\n", - "Surface training t=24644, loss=0.03563457727432251\n", - "Surface training t=24645, loss=0.03003850858658552\n", - "Surface training t=24646, loss=0.03223149944096804\n", - "Surface training t=24647, loss=0.03227218985557556\n", - "Surface training t=24648, loss=0.029985131695866585\n", - "Surface training t=24649, loss=0.028350336477160454\n", - "Surface training t=24650, loss=0.030002402141690254\n", - "Surface training t=24651, loss=0.031481245532631874\n", - "Surface training t=24652, loss=0.029688398353755474\n", - "Surface training t=24653, loss=0.030201954767107964\n", - "Surface training t=24654, loss=0.028582477942109108\n", - "Surface training t=24655, loss=0.025922742672264576\n", - "Surface training t=24656, loss=0.022261913865804672\n", - "Surface training t=24657, loss=0.020666957832872868\n", - "Surface training t=24658, loss=0.029665490612387657\n", - "Surface training t=24659, loss=0.03161126561462879\n", - "Surface training t=24660, loss=0.03378203697502613\n", - "Surface training t=24661, loss=0.022789902985095978\n", - "Surface training t=24662, loss=0.021723506040871143\n", - "Surface training t=24663, loss=0.029291468672454357\n", - "Surface training t=24664, loss=0.02390007022768259\n", - "Surface training t=24665, loss=0.023983745835721493\n", - "Surface training t=24666, loss=0.026495952159166336\n", - "Surface training t=24667, loss=0.01955211767926812\n", - "Surface training t=24668, loss=0.03707221336662769\n", - "Surface training t=24669, loss=0.04569254815578461\n", - "Surface training t=24670, loss=0.035283828154206276\n", - "Surface training t=24671, loss=0.03875001519918442\n", - "Surface training t=24672, loss=0.04042832925915718\n", - "Surface training t=24673, loss=0.03140588290989399\n", - "Surface training t=24674, loss=0.030816231854259968\n", - "Surface training t=24675, loss=0.02688337303698063\n", - "Surface training t=24676, loss=0.03708232194185257\n", - "Surface training t=24677, loss=0.0332245621830225\n", - "Surface training t=24678, loss=0.030664090998470783\n", - "Surface training t=24679, loss=0.02963544800877571\n", - "Surface training t=24680, loss=0.029518562369048595\n", - "Surface training t=24681, loss=0.03672577440738678\n", - "Surface training t=24682, loss=0.029129193630069494\n", - "Surface training t=24683, loss=0.030478017404675484\n", - "Surface training t=24684, loss=0.027441917918622494\n", - "Surface training t=24685, loss=0.030859138816595078\n", - "Surface training t=24686, loss=0.0187684940174222\n", - "Surface training t=24687, loss=0.021407919004559517\n", - "Surface training t=24688, loss=0.020725032314658165\n", - "Surface training t=24689, loss=0.01764658698812127\n", - "Surface training t=24690, loss=0.02518519852310419\n", - "Surface training t=24691, loss=0.025002731941640377\n", - "Surface training t=24692, loss=0.02100531943142414\n", - "Surface training t=24693, loss=0.024109916761517525\n", - "Surface training t=24694, loss=0.02310088649392128\n", - "Surface training t=24695, loss=0.021791997365653515\n", - "Surface training t=24696, loss=0.01784101827070117\n", - "Surface training t=24697, loss=0.023449506610631943\n", - "Surface training t=24698, loss=0.023587009869515896\n", - "Surface training t=24699, loss=0.027888072654604912\n", - "Surface training t=24700, loss=0.021687700413167477\n", - "Surface training t=24701, loss=0.022662164643406868\n", - "Surface training t=24702, loss=0.024636970832943916\n", - "Surface training t=24703, loss=0.023435452952980995\n", - "Surface training t=24704, loss=0.026634756475687027\n", - "Surface training t=24705, loss=0.029589390382170677\n", - "Surface training t=24706, loss=0.024476589635014534\n", - "Surface training t=24707, loss=0.02150743454694748\n", - "Surface training t=24708, loss=0.02227618359029293\n", - "Surface training t=24709, loss=0.02175186015665531\n", - "Surface training t=24710, loss=0.01791747659444809\n", - "Surface training t=24711, loss=0.02219502069056034\n", - "Surface training t=24712, loss=0.019340284168720245\n", - "Surface training t=24713, loss=0.025700544007122517\n", - "Surface training t=24714, loss=0.02065717987716198\n", - "Surface training t=24715, loss=0.023407254368066788\n", - "Surface training t=24716, loss=0.02310365531593561\n", - "Surface training t=24717, loss=0.021155912429094315\n", - "Surface training t=24718, loss=0.017661787569522858\n", - "Surface training t=24719, loss=0.02254288550466299\n", - "Surface training t=24720, loss=0.020225192420184612\n", - "Surface training t=24721, loss=0.018668601289391518\n", - "Surface training t=24722, loss=0.017286257818341255\n", - "Surface training t=24723, loss=0.030169925652444363\n", - "Surface training t=24724, loss=0.01838577864691615\n", - "Surface training t=24725, loss=0.021954025141894817\n", - "Surface training t=24726, loss=0.029650459066033363\n", - "Surface training t=24727, loss=0.03039564471691847\n", - "Surface training t=24728, loss=0.024000519886612892\n", - "Surface training t=24729, loss=0.0331959817558527\n", - "Surface training t=24730, loss=0.0258789686486125\n", - "Surface training t=24731, loss=0.02308228611946106\n", - "Surface training t=24732, loss=0.019915452226996422\n", - "Surface training t=24733, loss=0.02026959042996168\n", - "Surface training t=24734, loss=0.0198289193212986\n", - "Surface training t=24735, loss=0.017059347592294216\n", - "Surface training t=24736, loss=0.018004189245402813\n", - "Surface training t=24737, loss=0.0174268358387053\n", - "Surface training t=24738, loss=0.017552402801811695\n", - "Surface training t=24739, loss=0.018502073362469673\n", - "Surface training t=24740, loss=0.02078610472381115\n", - "Surface training t=24741, loss=0.0315677747130394\n", - "Surface training t=24742, loss=0.024285221472382545\n", - "Surface training t=24743, loss=0.029510275460779667\n", - "Surface training t=24744, loss=0.017001288942992687\n", - "Surface training t=24745, loss=0.019585011061280966\n", - "Surface training t=24746, loss=0.024363349191844463\n", - "Surface training t=24747, loss=0.025036116130650043\n", - "Surface training t=24748, loss=0.03005098458379507\n", - "Surface training t=24749, loss=0.02219038177281618\n", - "Surface training t=24750, loss=0.01928936503827572\n", - "Surface training t=24751, loss=0.02193350624293089\n", - "Surface training t=24752, loss=0.024533899500966072\n", - "Surface training t=24753, loss=0.021104000974446535\n", - "Surface training t=24754, loss=0.02462043520063162\n", - "Surface training t=24755, loss=0.021629715338349342\n", - "Surface training t=24756, loss=0.017675841227173805\n", - "Surface training t=24757, loss=0.01937356125563383\n", - "Surface training t=24758, loss=0.019486960023641586\n", - "Surface training t=24759, loss=0.01894121104851365\n", - "Surface training t=24760, loss=0.019452339969575405\n", - "Surface training t=24761, loss=0.0251253480091691\n", - "Surface training t=24762, loss=0.025567300617694855\n", - "Surface training t=24763, loss=0.027761250734329224\n", - "Surface training t=24764, loss=0.02965497598052025\n", - "Surface training t=24765, loss=0.02398786600679159\n", - "Surface training t=24766, loss=0.016961215995252132\n", - "Surface training t=24767, loss=0.027962234802544117\n", - "Surface training t=24768, loss=0.022387034259736538\n", - "Surface training t=24769, loss=0.02879498526453972\n", - "Surface training t=24770, loss=0.029400288127362728\n", - "Surface training t=24771, loss=0.028954009525477886\n", - "Surface training t=24772, loss=0.02574250102043152\n", - "Surface training t=24773, loss=0.024376469664275646\n", - "Surface training t=24774, loss=0.020791977643966675\n", - "Surface training t=24775, loss=0.023686829954385757\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=24776, loss=0.0234497906640172\n", - "Surface training t=24777, loss=0.02543217223137617\n", - "Surface training t=24778, loss=0.025116094388067722\n", - "Surface training t=24779, loss=0.02117135375738144\n", - "Surface training t=24780, loss=0.021295434795320034\n", - "Surface training t=24781, loss=0.026742832735180855\n", - "Surface training t=24782, loss=0.022362682968378067\n", - "Surface training t=24783, loss=0.01936657540500164\n", - "Surface training t=24784, loss=0.02140447311103344\n", - "Surface training t=24785, loss=0.025563166476786137\n", - "Surface training t=24786, loss=0.02647928148508072\n", - "Surface training t=24787, loss=0.021168389357626438\n", - "Surface training t=24788, loss=0.016929167322814465\n", - "Surface training t=24789, loss=0.026067666709423065\n", - "Surface training t=24790, loss=0.02557314559817314\n", - "Surface training t=24791, loss=0.018449955619871616\n", - "Surface training t=24792, loss=0.02291901223361492\n", - "Surface training t=24793, loss=0.022017589770257473\n", - "Surface training t=24794, loss=0.021123915910720825\n", - "Surface training t=24795, loss=0.028244088403880596\n", - "Surface training t=24796, loss=0.022930840030312538\n", - "Surface training t=24797, loss=0.022916074842214584\n", - "Surface training t=24798, loss=0.023884876631200314\n", - "Surface training t=24799, loss=0.02459654677659273\n", - "Surface training t=24800, loss=0.02014997322112322\n", - "Surface training t=24801, loss=0.020556063391268253\n", - "Surface training t=24802, loss=0.015992307104170322\n", - "Surface training t=24803, loss=0.015301617328077555\n", - "Surface training t=24804, loss=0.01649028016254306\n", - "Surface training t=24805, loss=0.014589030761271715\n", - "Surface training t=24806, loss=0.016883578151464462\n", - "Surface training t=24807, loss=0.016105125658214092\n", - "Surface training t=24808, loss=0.018948741257190704\n", - "Surface training t=24809, loss=0.013334173709154129\n", - "Surface training t=24810, loss=0.018111486919224262\n", - "Surface training t=24811, loss=0.01683998527005315\n", - "Surface training t=24812, loss=0.028618765994906425\n", - "Surface training t=24813, loss=0.03004281036555767\n", - "Surface training t=24814, loss=0.03187442012131214\n", - "Surface training t=24815, loss=0.020829313900321722\n", - "Surface training t=24816, loss=0.015554610639810562\n", - "Surface training t=24817, loss=0.016662550158798695\n", - "Surface training t=24818, loss=0.01940213330090046\n", - "Surface training t=24819, loss=0.02503162994980812\n", - "Surface training t=24820, loss=0.02456788159906864\n", - "Surface training t=24821, loss=0.029749508947134018\n", - "Surface training t=24822, loss=0.035719141364097595\n", - "Surface training t=24823, loss=0.02371475100517273\n", - "Surface training t=24824, loss=0.03019625972956419\n", - "Surface training t=24825, loss=0.019873454235494137\n", - "Surface training t=24826, loss=0.024078321643173695\n", - "Surface training t=24827, loss=0.02196818683296442\n", - "Surface training t=24828, loss=0.02949175238609314\n", - "Surface training t=24829, loss=0.026450559496879578\n", - "Surface training t=24830, loss=0.023921264335513115\n", - "Surface training t=24831, loss=0.02544342540204525\n", - "Surface training t=24832, loss=0.03786570392549038\n", - "Surface training t=24833, loss=0.01983922068029642\n", - "Surface training t=24834, loss=0.02976209856569767\n", - "Surface training t=24835, loss=0.03770826198160648\n", - "Surface training t=24836, loss=0.03504731133580208\n", - "Surface training t=24837, loss=0.028775643557310104\n", - "Surface training t=24838, loss=0.03863954916596413\n", - "Surface training t=24839, loss=0.04009263403713703\n", - "Surface training t=24840, loss=0.02978503704071045\n", - "Surface training t=24841, loss=0.02575088432058692\n", - "Surface training t=24842, loss=0.024848484434187412\n", - "Surface training t=24843, loss=0.034660251811146736\n", - "Surface training t=24844, loss=0.03927295282483101\n", - "Surface training t=24845, loss=0.03422961011528969\n", - "Surface training t=24846, loss=0.03880068473517895\n", - "Surface training t=24847, loss=0.04496571235358715\n", - "Surface training t=24848, loss=0.03266526572406292\n", - "Surface training t=24849, loss=0.03593851998448372\n", - "Surface training t=24850, loss=0.050609463825821877\n", - "Surface training t=24851, loss=0.035855747759342194\n", - "Surface training t=24852, loss=0.034511033445596695\n", - "Surface training t=24853, loss=0.03002011403441429\n", - "Surface training t=24854, loss=0.03236511442810297\n", - "Surface training t=24855, loss=0.03212125785648823\n", - "Surface training t=24856, loss=0.023019191808998585\n", - "Surface training t=24857, loss=0.02109126839786768\n", - "Surface training t=24858, loss=0.02767473179847002\n", - "Surface training t=24859, loss=0.03476087749004364\n", - "Surface training t=24860, loss=0.022606716491281986\n", - "Surface training t=24861, loss=0.021335361525416374\n", - "Surface training t=24862, loss=0.01815170794725418\n", - "Surface training t=24863, loss=0.018244552426040173\n", - "Surface training t=24864, loss=0.026720646768808365\n", - "Surface training t=24865, loss=0.023932214826345444\n", - "Surface training t=24866, loss=0.020289700478315353\n", - "Surface training t=24867, loss=0.021841559559106827\n", - "Surface training t=24868, loss=0.019745071418583393\n", - "Surface training t=24869, loss=0.017811487428843975\n", - "Surface training t=24870, loss=0.025107719004154205\n", - "Surface training t=24871, loss=0.024846943095326424\n", - "Surface training t=24872, loss=0.019182780757546425\n", - "Surface training t=24873, loss=0.023253183811903\n", - "Surface training t=24874, loss=0.021627248264849186\n", - "Surface training t=24875, loss=0.026119337417185307\n", - "Surface training t=24876, loss=0.019190200604498386\n", - "Surface training t=24877, loss=0.021990060806274414\n", - "Surface training t=24878, loss=0.0237968061119318\n", - "Surface training t=24879, loss=0.021081543527543545\n", - "Surface training t=24880, loss=0.020021517761051655\n", - "Surface training t=24881, loss=0.01758600678294897\n", - "Surface training t=24882, loss=0.020332514308393\n", - "Surface training t=24883, loss=0.02225561160594225\n", - "Surface training t=24884, loss=0.020474780350923538\n", - "Surface training t=24885, loss=0.01993782352656126\n", - "Surface training t=24886, loss=0.016342919785529375\n", - "Surface training t=24887, loss=0.034221114590764046\n", - "Surface training t=24888, loss=0.0243205763399601\n", - "Surface training t=24889, loss=0.020046881400048733\n", - "Surface training t=24890, loss=0.023426412604749203\n", - "Surface training t=24891, loss=0.02047646837309003\n", - "Surface training t=24892, loss=0.0212984262034297\n", - "Surface training t=24893, loss=0.024860290810465813\n", - "Surface training t=24894, loss=0.025788641534745693\n", - "Surface training t=24895, loss=0.02276515681296587\n", - "Surface training t=24896, loss=0.024280966259539127\n", - "Surface training t=24897, loss=0.01742746215313673\n", - "Surface training t=24898, loss=0.016000673174858093\n", - "Surface training t=24899, loss=0.01592878671362996\n", - "Surface training t=24900, loss=0.015113485045731068\n", - "Surface training t=24901, loss=0.01747591607272625\n", - "Surface training t=24902, loss=0.01746713276952505\n", - "Surface training t=24903, loss=0.012725756503641605\n", - "Surface training t=24904, loss=0.01570793939754367\n", - "Surface training t=24905, loss=0.015334578696638346\n", - "Surface training t=24906, loss=0.014570512808859348\n", - "Surface training t=24907, loss=0.022354211658239365\n", - "Surface training t=24908, loss=0.02333452831953764\n", - "Surface training t=24909, loss=0.021848278120160103\n", - "Surface training t=24910, loss=0.022570747882127762\n", - "Surface training t=24911, loss=0.01714305579662323\n", - "Surface training t=24912, loss=0.019824840128421783\n", - "Surface training t=24913, loss=0.024795496836304665\n", - "Surface training t=24914, loss=0.018650724552571774\n", - "Surface training t=24915, loss=0.02477736584842205\n", - "Surface training t=24916, loss=0.028712528757750988\n", - "Surface training t=24917, loss=0.024815388955175877\n", - "Surface training t=24918, loss=0.023818531073629856\n", - "Surface training t=24919, loss=0.028850032947957516\n", - "Surface training t=24920, loss=0.026553185656666756\n", - "Surface training t=24921, loss=0.03116922453045845\n", - "Surface training t=24922, loss=0.023037568666040897\n", - "Surface training t=24923, loss=0.027521376498043537\n", - "Surface training t=24924, loss=0.022392078302800655\n", - "Surface training t=24925, loss=0.031014501117169857\n", - "Surface training t=24926, loss=0.030149228870868683\n", - "Surface training t=24927, loss=0.028759625740349293\n", - "Surface training t=24928, loss=0.028086830861866474\n", - "Surface training t=24929, loss=0.02912208065390587\n", - "Surface training t=24930, loss=0.019334330689162016\n", - "Surface training t=24931, loss=0.024110897444188595\n", - "Surface training t=24932, loss=0.029766688123345375\n", - "Surface training t=24933, loss=0.021000304259359837\n", - "Surface training t=24934, loss=0.026416616514325142\n", - "Surface training t=24935, loss=0.032673549838364124\n", - "Surface training t=24936, loss=0.02332491148263216\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=24937, loss=0.027642227709293365\n", - "Surface training t=24938, loss=0.029370224103331566\n", - "Surface training t=24939, loss=0.020792610943317413\n", - "Surface training t=24940, loss=0.02703062817454338\n", - "Surface training t=24941, loss=0.017312389332801104\n", - "Surface training t=24942, loss=0.02668983768671751\n", - "Surface training t=24943, loss=0.02274408843368292\n", - "Surface training t=24944, loss=0.026066064834594727\n", - "Surface training t=24945, loss=0.018148934468626976\n", - "Surface training t=24946, loss=0.025959407910704613\n", - "Surface training t=24947, loss=0.0247995276004076\n", - "Surface training t=24948, loss=0.02142307348549366\n", - "Surface training t=24949, loss=0.031914626248180866\n", - "Surface training t=24950, loss=0.02346357051283121\n", - "Surface training t=24951, loss=0.01820026570931077\n", - "Surface training t=24952, loss=0.019767852500081062\n", - "Surface training t=24953, loss=0.018744084052741528\n", - "Surface training t=24954, loss=0.02132601384073496\n", - "Surface training t=24955, loss=0.023453806526958942\n", - "Surface training t=24956, loss=0.016728718765079975\n", - "Surface training t=24957, loss=0.01609613560140133\n", - "Surface training t=24958, loss=0.014495160896331072\n", - "Surface training t=24959, loss=0.01621580682694912\n", - "Surface training t=24960, loss=0.017747086938470602\n", - "Surface training t=24961, loss=0.019983284175395966\n", - "Surface training t=24962, loss=0.021051708608865738\n", - "Surface training t=24963, loss=0.025193754583597183\n", - "Surface training t=24964, loss=0.022622416727244854\n", - "Surface training t=24965, loss=0.022189089562743902\n", - "Surface training t=24966, loss=0.01524408534169197\n", - "Surface training t=24967, loss=0.026843564584851265\n", - "Surface training t=24968, loss=0.03672259487211704\n", - "Surface training t=24969, loss=0.031084402464330196\n", - "Surface training t=24970, loss=0.027328495867550373\n", - "Surface training t=24971, loss=0.02606387622654438\n", - "Surface training t=24972, loss=0.025380133651196957\n", - "Surface training t=24973, loss=0.023717068135738373\n", - "Surface training t=24974, loss=0.02255861461162567\n", - "Surface training t=24975, loss=0.021030833944678307\n", - "Surface training t=24976, loss=0.02650914154946804\n", - "Surface training t=24977, loss=0.021890530362725258\n", - "Surface training t=24978, loss=0.03455406241118908\n", - "Surface training t=24979, loss=0.031227678060531616\n", - "Surface training t=24980, loss=0.02864706516265869\n", - "Surface training t=24981, loss=0.02592475153505802\n", - "Surface training t=24982, loss=0.02103268261998892\n", - "Surface training t=24983, loss=0.024426324293017387\n", - "Surface training t=24984, loss=0.03099494706839323\n", - "Surface training t=24985, loss=0.026425874792039394\n", - "Surface training t=24986, loss=0.032365525141358376\n", - "Surface training t=24987, loss=0.02898641023784876\n", - "Surface training t=24988, loss=0.03328433632850647\n", - "Surface training t=24989, loss=0.02423693146556616\n", - "Surface training t=24990, loss=0.020626423880457878\n", - "Surface training t=24991, loss=0.021265532821416855\n", - "Surface training t=24992, loss=0.021460670046508312\n", - "Surface training t=24993, loss=0.023537807166576385\n", - "Surface training t=24994, loss=0.025786065496504307\n", - "Surface training t=24995, loss=0.03237398341298103\n", - "Surface training t=24996, loss=0.0453384593129158\n", - "Surface training t=24997, loss=0.03701953403651714\n", - "Surface training t=24998, loss=0.031039781868457794\n", - "Surface training t=24999, loss=0.03789188526570797\n", - "Surface training t=25000, loss=0.023646775633096695\n", - "Surface training t=25001, loss=0.029194608330726624\n", - "Surface training t=25002, loss=0.038666337728500366\n", - "Surface training t=25003, loss=0.030752630904316902\n", - "Surface training t=25004, loss=0.029747363179922104\n", - "Surface training t=25005, loss=0.027739343233406544\n", - "Surface training t=25006, loss=0.03302113339304924\n", - "Surface training t=25007, loss=0.02409530058503151\n", - "Surface training t=25008, loss=0.02743207849562168\n", - "Surface training t=25009, loss=0.031365721486508846\n", - "Surface training t=25010, loss=0.03858520835638046\n", - "Surface training t=25011, loss=0.03313485532999039\n", - "Surface training t=25012, loss=0.028273235075175762\n", - "Surface training t=25013, loss=0.027024822309613228\n", - "Surface training t=25014, loss=0.0194927379488945\n", - "Surface training t=25015, loss=0.01703095156699419\n", - "Surface training t=25016, loss=0.018778029829263687\n", - "Surface training t=25017, loss=0.016759137623012066\n", - "Surface training t=25018, loss=0.0133100226521492\n", - "Surface training t=25019, loss=0.02780374325811863\n", - "Surface training t=25020, loss=0.023879115469753742\n", - "Surface training t=25021, loss=0.02261835616081953\n", - "Surface training t=25022, loss=0.024562496691942215\n", - "Surface training t=25023, loss=0.022751535288989544\n", - "Surface training t=25024, loss=0.017781550996005535\n", - "Surface training t=25025, loss=0.026232007890939713\n", - "Surface training t=25026, loss=0.02064344985410571\n", - "Surface training t=25027, loss=0.026099546812474728\n", - "Surface training t=25028, loss=0.022493495605885983\n", - "Surface training t=25029, loss=0.027916566468775272\n", - "Surface training t=25030, loss=0.03235070966184139\n", - "Surface training t=25031, loss=0.02020355686545372\n", - "Surface training t=25032, loss=0.03022086527198553\n", - "Surface training t=25033, loss=0.025647605769336224\n", - "Surface training t=25034, loss=0.028145535849034786\n", - "Surface training t=25035, loss=0.02473635971546173\n", - "Surface training t=25036, loss=0.02241745311766863\n", - "Surface training t=25037, loss=0.022105054929852486\n", - "Surface training t=25038, loss=0.019564812071621418\n", - "Surface training t=25039, loss=0.02266277652233839\n", - "Surface training t=25040, loss=0.0239686518907547\n", - "Surface training t=25041, loss=0.025860373862087727\n", - "Surface training t=25042, loss=0.02651820983737707\n", - "Surface training t=25043, loss=0.02268554549664259\n", - "Surface training t=25044, loss=0.02757798694074154\n", - "Surface training t=25045, loss=0.023668057285249233\n", - "Surface training t=25046, loss=0.02608572691679001\n", - "Surface training t=25047, loss=0.027757512405514717\n", - "Surface training t=25048, loss=0.03083165269345045\n", - "Surface training t=25049, loss=0.025089473463594913\n", - "Surface training t=25050, loss=0.03388449735939503\n", - "Surface training t=25051, loss=0.05007918365299702\n", - "Surface training t=25052, loss=0.0335985217243433\n", - "Surface training t=25053, loss=0.05062031373381615\n", - "Surface training t=25054, loss=0.03695842996239662\n", - "Surface training t=25055, loss=0.03749245963990688\n", - "Surface training t=25056, loss=0.04354299604892731\n", - "Surface training t=25057, loss=0.032476767897605896\n", - "Surface training t=25058, loss=0.03226679004728794\n", - "Surface training t=25059, loss=0.03210506774485111\n", - "Surface training t=25060, loss=0.02326615620404482\n", - "Surface training t=25061, loss=0.034689342603087425\n", - "Surface training t=25062, loss=0.03159155696630478\n", - "Surface training t=25063, loss=0.027453508228063583\n", - "Surface training t=25064, loss=0.02578583173453808\n", - "Surface training t=25065, loss=0.02625636663287878\n", - "Surface training t=25066, loss=0.022066663019359112\n", - "Surface training t=25067, loss=0.01948129665106535\n", - "Surface training t=25068, loss=0.025719428434967995\n", - "Surface training t=25069, loss=0.0242355614900589\n", - "Surface training t=25070, loss=0.03127459064126015\n", - "Surface training t=25071, loss=0.01827115472406149\n", - "Surface training t=25072, loss=0.022814324125647545\n", - "Surface training t=25073, loss=0.02342603076249361\n", - "Surface training t=25074, loss=0.027155166491866112\n", - "Surface training t=25075, loss=0.02857499849051237\n", - "Surface training t=25076, loss=0.026440100744366646\n", - "Surface training t=25077, loss=0.02864893339574337\n", - "Surface training t=25078, loss=0.02393971011042595\n", - "Surface training t=25079, loss=0.03288828115910292\n", - "Surface training t=25080, loss=0.02758968435227871\n", - "Surface training t=25081, loss=0.029808873310685158\n", - "Surface training t=25082, loss=0.02468438632786274\n", - "Surface training t=25083, loss=0.022554324474185705\n", - "Surface training t=25084, loss=0.024415191262960434\n", - "Surface training t=25085, loss=0.02646581642329693\n", - "Surface training t=25086, loss=0.024281145073473454\n", - "Surface training t=25087, loss=0.03246915340423584\n", - "Surface training t=25088, loss=0.03381715901196003\n", - "Surface training t=25089, loss=0.030959484167397022\n", - "Surface training t=25090, loss=0.028785726986825466\n", - "Surface training t=25091, loss=0.030943073332309723\n", - "Surface training t=25092, loss=0.027346313931047916\n", - "Surface training t=25093, loss=0.025938723236322403\n", - "Surface training t=25094, loss=0.05435704067349434\n", - "Surface training t=25095, loss=0.031135824508965015\n", - "Surface training t=25096, loss=0.03549663908779621\n", - "Surface training t=25097, loss=0.03210121486335993\n", - "Surface training t=25098, loss=0.03408307023346424\n", - "Surface training t=25099, loss=0.025904268492013216\n", - "Surface training t=25100, loss=0.03092486597597599\n", - "Surface training t=25101, loss=0.027059923857450485\n", - "Surface training t=25102, loss=0.040535058826208115\n", - "Surface training t=25103, loss=0.031131687574088573\n", - "Surface training t=25104, loss=0.027980643324553967\n", - "Surface training t=25105, loss=0.02433778438717127\n", - "Surface training t=25106, loss=0.02528899908065796\n", - "Surface training t=25107, loss=0.028588425368070602\n", - "Surface training t=25108, loss=0.02041742205619812\n", - "Surface training t=25109, loss=0.023711517453193665\n", - "Surface training t=25110, loss=0.02566349506378174\n", - "Surface training t=25111, loss=0.02074903529137373\n", - "Surface training t=25112, loss=0.02270680759102106\n", - "Surface training t=25113, loss=0.028710308484733105\n", - "Surface training t=25114, loss=0.035100383684039116\n", - "Surface training t=25115, loss=0.026772869750857353\n", - "Surface training t=25116, loss=0.03143131174147129\n", - "Surface training t=25117, loss=0.029604263603687286\n", - "Surface training t=25118, loss=0.02050147671252489\n", - "Surface training t=25119, loss=0.025786845944821835\n", - "Surface training t=25120, loss=0.020932450890541077\n", - "Surface training t=25121, loss=0.026610896922647953\n", - "Surface training t=25122, loss=0.027362381108105183\n", - "Surface training t=25123, loss=0.03274894692003727\n", - "Surface training t=25124, loss=0.023075876757502556\n", - "Surface training t=25125, loss=0.024369528517127037\n", - "Surface training t=25126, loss=0.02151415031403303\n", - "Surface training t=25127, loss=0.017823122441768646\n", - "Surface training t=25128, loss=0.022927354089915752\n", - "Surface training t=25129, loss=0.017772656865417957\n", - "Surface training t=25130, loss=0.01667376048862934\n", - "Surface training t=25131, loss=0.01734847715124488\n", - "Surface training t=25132, loss=0.01440013526007533\n", - "Surface training t=25133, loss=0.016505584120750427\n", - "Surface training t=25134, loss=0.018274972215294838\n", - "Surface training t=25135, loss=0.02183330524712801\n", - "Surface training t=25136, loss=0.026046181097626686\n", - "Surface training t=25137, loss=0.024348328821361065\n", - "Surface training t=25138, loss=0.023297270759940147\n", - "Surface training t=25139, loss=0.02063558530062437\n", - "Surface training t=25140, loss=0.020862361416220665\n", - "Surface training t=25141, loss=0.023330737836658955\n", - "Surface training t=25142, loss=0.023110785521566868\n", - "Surface training t=25143, loss=0.023100548423826694\n", - "Surface training t=25144, loss=0.021222486160695553\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=25145, loss=0.02532987203449011\n", - "Surface training t=25146, loss=0.019822368398308754\n", - "Surface training t=25147, loss=0.01885306043550372\n", - "Surface training t=25148, loss=0.01676005218178034\n", - "Surface training t=25149, loss=0.017611338756978512\n", - "Surface training t=25150, loss=0.016554929316043854\n", - "Surface training t=25151, loss=0.014521467499434948\n", - "Surface training t=25152, loss=0.01869719848036766\n", - "Surface training t=25153, loss=0.014104453381150961\n", - "Surface training t=25154, loss=0.018364903517067432\n", - "Surface training t=25155, loss=0.02524417359381914\n", - "Surface training t=25156, loss=0.03302543982863426\n", - "Surface training t=25157, loss=0.021026122383773327\n", - "Surface training t=25158, loss=0.027764728292822838\n", - "Surface training t=25159, loss=0.016263006255030632\n", - "Surface training t=25160, loss=0.014805252198129892\n", - "Surface training t=25161, loss=0.013374218717217445\n", - "Surface training t=25162, loss=0.019733188673853874\n", - "Surface training t=25163, loss=0.0195614043623209\n", - "Surface training t=25164, loss=0.02099058125168085\n", - "Surface training t=25165, loss=0.013459712266921997\n", - "Surface training t=25166, loss=0.014133503660559654\n", - "Surface training t=25167, loss=0.017674545757472515\n", - "Surface training t=25168, loss=0.01801193505525589\n", - "Surface training t=25169, loss=0.02597154676914215\n", - "Surface training t=25170, loss=0.019472556188702583\n", - "Surface training t=25171, loss=0.02398193534463644\n", - "Surface training t=25172, loss=0.021104171872138977\n", - "Surface training t=25173, loss=0.027444412000477314\n", - "Surface training t=25174, loss=0.019687477499246597\n", - "Surface training t=25175, loss=0.02394095901399851\n", - "Surface training t=25176, loss=0.022330861538648605\n", - "Surface training t=25177, loss=0.01633703149855137\n", - "Surface training t=25178, loss=0.02211651299148798\n", - "Surface training t=25179, loss=0.014879585709422827\n", - "Surface training t=25180, loss=0.018353261053562164\n", - "Surface training t=25181, loss=0.02025480940937996\n", - "Surface training t=25182, loss=0.021841201931238174\n", - "Surface training t=25183, loss=0.031146707013249397\n", - "Surface training t=25184, loss=0.02867879718542099\n", - "Surface training t=25185, loss=0.027785029262304306\n", - "Surface training t=25186, loss=0.01868349127471447\n", - "Surface training t=25187, loss=0.01786344638094306\n", - "Surface training t=25188, loss=0.0230961125344038\n", - "Surface training t=25189, loss=0.02373537514358759\n", - "Surface training t=25190, loss=0.02413630299270153\n", - "Surface training t=25191, loss=0.014962906017899513\n", - "Surface training t=25192, loss=0.02021876722574234\n", - "Surface training t=25193, loss=0.015369321219623089\n", - "Surface training t=25194, loss=0.017535896971821785\n", - "Surface training t=25195, loss=0.014454496093094349\n", - "Surface training t=25196, loss=0.01961237285286188\n", - "Surface training t=25197, loss=0.027037326246500015\n", - "Surface training t=25198, loss=0.024677003733813763\n", - "Surface training t=25199, loss=0.024150948971509933\n", - "Surface training t=25200, loss=0.02401896845549345\n", - "Surface training t=25201, loss=0.019427917897701263\n", - "Surface training t=25202, loss=0.02543935924768448\n", - "Surface training t=25203, loss=0.027842020615935326\n", - "Surface training t=25204, loss=0.019219581969082355\n", - "Surface training t=25205, loss=0.017239635810256004\n", - "Surface training t=25206, loss=0.024148655124008656\n", - "Surface training t=25207, loss=0.019103530794382095\n", - "Surface training t=25208, loss=0.028645670041441917\n", - "Surface training t=25209, loss=0.02549688797444105\n", - "Surface training t=25210, loss=0.022293545305728912\n", - "Surface training t=25211, loss=0.01856753882020712\n", - "Surface training t=25212, loss=0.02094398532062769\n", - "Surface training t=25213, loss=0.01716485619544983\n", - "Surface training t=25214, loss=0.017659052275121212\n", - "Surface training t=25215, loss=0.020261290483176708\n", - "Surface training t=25216, loss=0.019375885371118784\n", - "Surface training t=25217, loss=0.02757772896438837\n", - "Surface training t=25218, loss=0.02818256989121437\n", - "Surface training t=25219, loss=0.03891916759312153\n", - "Surface training t=25220, loss=0.03304537013173103\n", - "Surface training t=25221, loss=0.04486101306974888\n", - "Surface training t=25222, loss=0.041394198313355446\n", - "Surface training t=25223, loss=0.04618752747774124\n", - "Surface training t=25224, loss=0.03345916699618101\n", - "Surface training t=25225, loss=0.03629178926348686\n", - "Surface training t=25226, loss=0.028070599772036076\n", - "Surface training t=25227, loss=0.019968608394265175\n", - "Surface training t=25228, loss=0.021526126191020012\n", - "Surface training t=25229, loss=0.01651813741773367\n", - "Surface training t=25230, loss=0.021337277255952358\n", - "Surface training t=25231, loss=0.017825962509959936\n", - "Surface training t=25232, loss=0.022247960790991783\n", - "Surface training t=25233, loss=0.027851346880197525\n", - "Surface training t=25234, loss=0.028211455792188644\n", - "Surface training t=25235, loss=0.029446623288094997\n", - "Surface training t=25236, loss=0.026458664797246456\n", - "Surface training t=25237, loss=0.022013223730027676\n", - "Surface training t=25238, loss=0.020748560316860676\n", - "Surface training t=25239, loss=0.02474132739007473\n", - "Surface training t=25240, loss=0.028772108256816864\n", - "Surface training t=25241, loss=0.01813441328704357\n", - "Surface training t=25242, loss=0.019636228680610657\n", - "Surface training t=25243, loss=0.030304275453090668\n", - "Surface training t=25244, loss=0.023656046018004417\n", - "Surface training t=25245, loss=0.04097078554332256\n", - "Surface training t=25246, loss=0.029218103736639023\n", - "Surface training t=25247, loss=0.03517906367778778\n", - "Surface training t=25248, loss=0.04239959083497524\n", - "Surface training t=25249, loss=0.030565720051527023\n", - "Surface training t=25250, loss=0.044447923079133034\n", - "Surface training t=25251, loss=0.024040463380515575\n", - "Surface training t=25252, loss=0.034559642896056175\n", - "Surface training t=25253, loss=0.027864781208336353\n", - "Surface training t=25254, loss=0.02457293588668108\n", - "Surface training t=25255, loss=0.03084256872534752\n", - "Surface training t=25256, loss=0.026973742060363293\n", - "Surface training t=25257, loss=0.0215669022873044\n", - "Surface training t=25258, loss=0.018912075087428093\n", - "Surface training t=25259, loss=0.019365016371011734\n", - "Surface training t=25260, loss=0.022187822498381138\n", - "Surface training t=25261, loss=0.023584455251693726\n", - "Surface training t=25262, loss=0.023112746886909008\n", - "Surface training t=25263, loss=0.01939390879124403\n", - "Surface training t=25264, loss=0.032768139615654945\n", - "Surface training t=25265, loss=0.021837158128619194\n", - "Surface training t=25266, loss=0.025895217433571815\n", - "Surface training t=25267, loss=0.04097312502563\n", - "Surface training t=25268, loss=0.027270134538412094\n", - "Surface training t=25269, loss=0.03349324129521847\n", - "Surface training t=25270, loss=0.027011231519281864\n", - "Surface training t=25271, loss=0.026835624128580093\n", - "Surface training t=25272, loss=0.02544598001986742\n", - "Surface training t=25273, loss=0.025224176235496998\n", - "Surface training t=25274, loss=0.02113727480173111\n", - "Surface training t=25275, loss=0.024716327898204327\n", - "Surface training t=25276, loss=0.03189728036522865\n", - "Surface training t=25277, loss=0.03435036912560463\n", - "Surface training t=25278, loss=0.025105595588684082\n", - "Surface training t=25279, loss=0.02713841199874878\n", - "Surface training t=25280, loss=0.023623740300536156\n", - "Surface training t=25281, loss=0.02467618603259325\n", - "Surface training t=25282, loss=0.018639013171195984\n", - "Surface training t=25283, loss=0.022206539288163185\n", - "Surface training t=25284, loss=0.04285930097103119\n", - "Surface training t=25285, loss=0.03410189040005207\n", - "Surface training t=25286, loss=0.025089802220463753\n", - "Surface training t=25287, loss=0.02891758270561695\n", - "Surface training t=25288, loss=0.03714473731815815\n", - "Surface training t=25289, loss=0.02310357242822647\n", - "Surface training t=25290, loss=0.028758099302649498\n", - "Surface training t=25291, loss=0.02701034676283598\n", - "Surface training t=25292, loss=0.028770742937922478\n", - "Surface training t=25293, loss=0.030583580955863\n", - "Surface training t=25294, loss=0.03168867714703083\n", - "Surface training t=25295, loss=0.05614188313484192\n", - "Surface training t=25296, loss=0.04087523929774761\n", - "Surface training t=25297, loss=0.03444763086736202\n", - "Surface training t=25298, loss=0.03887521103024483\n", - "Surface training t=25299, loss=0.02595620509237051\n", - "Surface training t=25300, loss=0.029369293712079525\n", - "Surface training t=25301, loss=0.025886310264468193\n", - "Surface training t=25302, loss=0.035361915826797485\n", - "Surface training t=25303, loss=0.02418292174115777\n", - "Surface training t=25304, loss=0.0398100521415472\n", - "Surface training t=25305, loss=0.022211583331227303\n", - "Surface training t=25306, loss=0.022218854166567326\n", - "Surface training t=25307, loss=0.023898951709270477\n", - "Surface training t=25308, loss=0.02071616519242525\n", - "Surface training t=25309, loss=0.033226373605430126\n", - "Surface training t=25310, loss=0.02822334785014391\n", - "Surface training t=25311, loss=0.02745882421731949\n", - "Surface training t=25312, loss=0.02448983769863844\n", - "Surface training t=25313, loss=0.024676882661879063\n", - "Surface training t=25314, loss=0.0207015136256814\n", - "Surface training t=25315, loss=0.023466600105166435\n", - "Surface training t=25316, loss=0.019991434179246426\n", - "Surface training t=25317, loss=0.02026380691677332\n", - "Surface training t=25318, loss=0.01864876039326191\n", - "Surface training t=25319, loss=0.01706516556441784\n", - "Surface training t=25320, loss=0.015618091449141502\n", - "Surface training t=25321, loss=0.016106833703815937\n", - "Surface training t=25322, loss=0.023858544416725636\n", - "Surface training t=25323, loss=0.022884592413902283\n", - "Surface training t=25324, loss=0.028765934519469738\n", - "Surface training t=25325, loss=0.03312401659786701\n", - "Surface training t=25326, loss=0.0296213086694479\n", - "Surface training t=25327, loss=0.02425203938037157\n", - "Surface training t=25328, loss=0.034160676412284374\n", - "Surface training t=25329, loss=0.02242820430546999\n", - "Surface training t=25330, loss=0.027040266431868076\n", - "Surface training t=25331, loss=0.02337533514946699\n", - "Surface training t=25332, loss=0.02109189983457327\n", - "Surface training t=25333, loss=0.024128173477947712\n", - "Surface training t=25334, loss=0.022954806685447693\n", - "Surface training t=25335, loss=0.022748545743525028\n", - "Surface training t=25336, loss=0.019846762530505657\n", - "Surface training t=25337, loss=0.016054854728281498\n", - "Surface training t=25338, loss=0.01996555458754301\n", - "Surface training t=25339, loss=0.025384049862623215\n", - "Surface training t=25340, loss=0.02546792011708021\n", - "Surface training t=25341, loss=0.024518187157809734\n", - "Surface training t=25342, loss=0.02837597392499447\n", - "Surface training t=25343, loss=0.02596965618431568\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=25344, loss=0.03298359178006649\n", - "Surface training t=25345, loss=0.03824907820671797\n", - "Surface training t=25346, loss=0.03316009324043989\n", - "Surface training t=25347, loss=0.039894500747323036\n", - "Surface training t=25348, loss=0.03208634816110134\n", - "Surface training t=25349, loss=0.029599588364362717\n", - "Surface training t=25350, loss=0.03098861314356327\n", - "Surface training t=25351, loss=0.03544170502573252\n", - "Surface training t=25352, loss=0.05059606768190861\n", - "Surface training t=25353, loss=0.03600303269922733\n", - "Surface training t=25354, loss=0.03560692444443703\n", - "Surface training t=25355, loss=0.03565572388470173\n", - "Surface training t=25356, loss=0.04150239750742912\n", - "Surface training t=25357, loss=0.036693694069981575\n", - "Surface training t=25358, loss=0.052349673584103584\n", - "Surface training t=25359, loss=0.03353555127978325\n", - "Surface training t=25360, loss=0.03690676391124725\n", - "Surface training t=25361, loss=0.05987539142370224\n", - "Surface training t=25362, loss=0.0360682737082243\n", - "Surface training t=25363, loss=0.03507891297340393\n", - "Surface training t=25364, loss=0.031086232513189316\n", - "Surface training t=25365, loss=0.03125259932130575\n", - "Surface training t=25366, loss=0.03324167989194393\n", - "Surface training t=25367, loss=0.03968166187405586\n", - "Surface training t=25368, loss=0.04318119026720524\n", - "Surface training t=25369, loss=0.02999412827193737\n", - "Surface training t=25370, loss=0.03282505087554455\n", - "Surface training t=25371, loss=0.02926436811685562\n", - "Surface training t=25372, loss=0.0225664796307683\n", - "Surface training t=25373, loss=0.021740569733083248\n", - "Surface training t=25374, loss=0.02327120676636696\n", - "Surface training t=25375, loss=0.02084483439102769\n", - "Surface training t=25376, loss=0.027673578821122646\n", - "Surface training t=25377, loss=0.031822058372199535\n", - "Surface training t=25378, loss=0.02548348531126976\n", - "Surface training t=25379, loss=0.020743221044540405\n", - "Surface training t=25380, loss=0.03557993099093437\n", - "Surface training t=25381, loss=0.024896481074392796\n", - "Surface training t=25382, loss=0.027888654731214046\n", - "Surface training t=25383, loss=0.025239921174943447\n", - "Surface training t=25384, loss=0.023156654089689255\n", - "Surface training t=25385, loss=0.023656845092773438\n", - "Surface training t=25386, loss=0.017361332662403584\n", - "Surface training t=25387, loss=0.019802022259682417\n", - "Surface training t=25388, loss=0.022469954565167427\n", - "Surface training t=25389, loss=0.01881337631493807\n", - "Surface training t=25390, loss=0.020369185134768486\n", - "Surface training t=25391, loss=0.030222161673009396\n", - "Surface training t=25392, loss=0.02187006175518036\n", - "Surface training t=25393, loss=0.03257958870381117\n", - "Surface training t=25394, loss=0.0236892718821764\n", - "Surface training t=25395, loss=0.026694761589169502\n", - "Surface training t=25396, loss=0.026097135618329048\n", - "Surface training t=25397, loss=0.02906861249357462\n", - "Surface training t=25398, loss=0.02899215929210186\n", - "Surface training t=25399, loss=0.018240297213196754\n", - "Surface training t=25400, loss=0.02274729125201702\n", - "Surface training t=25401, loss=0.025622934103012085\n", - "Surface training t=25402, loss=0.025635752826929092\n", - "Surface training t=25403, loss=0.026149955578148365\n", - "Surface training t=25404, loss=0.02687913691624999\n", - "Surface training t=25405, loss=0.03738403879106045\n", - "Surface training t=25406, loss=0.036269038915634155\n", - "Surface training t=25407, loss=0.036629851907491684\n", - "Surface training t=25408, loss=0.030756326392292976\n", - "Surface training t=25409, loss=0.02965926006436348\n", - "Surface training t=25410, loss=0.028808724135160446\n", - "Surface training t=25411, loss=0.03624962270259857\n", - "Surface training t=25412, loss=0.021395171992480755\n", - "Surface training t=25413, loss=0.02090608887374401\n", - "Surface training t=25414, loss=0.022198382765054703\n", - "Surface training t=25415, loss=0.03199412859976292\n", - "Surface training t=25416, loss=0.024884921498596668\n", - "Surface training t=25417, loss=0.023118619807064533\n", - "Surface training t=25418, loss=0.018014593049883842\n", - "Surface training t=25419, loss=0.02495306357741356\n", - "Surface training t=25420, loss=0.02793180476874113\n", - "Surface training t=25421, loss=0.02792929206043482\n", - "Surface training t=25422, loss=0.027494169771671295\n", - "Surface training t=25423, loss=0.026479844003915787\n", - "Surface training t=25424, loss=0.02598282601684332\n", - "Surface training t=25425, loss=0.020427259616553783\n", - "Surface training t=25426, loss=0.025436257012188435\n", - "Surface training t=25427, loss=0.021094467490911484\n", - "Surface training t=25428, loss=0.02750188671052456\n", - "Surface training t=25429, loss=0.02390916272997856\n", - "Surface training t=25430, loss=0.021736664697527885\n", - "Surface training t=25431, loss=0.02232437115162611\n", - "Surface training t=25432, loss=0.024306770414114\n", - "Surface training t=25433, loss=0.021335316821932793\n", - "Surface training t=25434, loss=0.02082967199385166\n", - "Surface training t=25435, loss=0.01882214331999421\n", - "Surface training t=25436, loss=0.022589463740587234\n", - "Surface training t=25437, loss=0.02045197505503893\n", - "Surface training t=25438, loss=0.017180009745061398\n", - "Surface training t=25439, loss=0.02642266359180212\n", - "Surface training t=25440, loss=0.018608233891427517\n", - "Surface training t=25441, loss=0.02590202074497938\n", - "Surface training t=25442, loss=0.02475468721240759\n", - "Surface training t=25443, loss=0.022605844773352146\n", - "Surface training t=25444, loss=0.025268860161304474\n", - "Surface training t=25445, loss=0.027427264489233494\n", - "Surface training t=25446, loss=0.03225110564380884\n", - "Surface training t=25447, loss=0.02536468580365181\n", - "Surface training t=25448, loss=0.022383825853466988\n", - "Surface training t=25449, loss=0.02490629442036152\n", - "Surface training t=25450, loss=0.02250690758228302\n", - "Surface training t=25451, loss=0.029491757974028587\n", - "Surface training t=25452, loss=0.02333789598196745\n", - "Surface training t=25453, loss=0.02593341562896967\n", - "Surface training t=25454, loss=0.022083666175603867\n", - "Surface training t=25455, loss=0.023399380035698414\n", - "Surface training t=25456, loss=0.02434423565864563\n", - "Surface training t=25457, loss=0.02077268622815609\n", - "Surface training t=25458, loss=0.021512404084205627\n", - "Surface training t=25459, loss=0.02384368423372507\n", - "Surface training t=25460, loss=0.0185659471899271\n", - "Surface training t=25461, loss=0.015866960398852825\n", - "Surface training t=25462, loss=0.017460248433053493\n", - "Surface training t=25463, loss=0.023875465616583824\n", - "Surface training t=25464, loss=0.019545862451195717\n", - "Surface training t=25465, loss=0.02314182184636593\n", - "Surface training t=25466, loss=0.01971027534455061\n", - "Surface training t=25467, loss=0.024747868068516254\n", - "Surface training t=25468, loss=0.018303081393241882\n", - "Surface training t=25469, loss=0.01747992541640997\n", - "Surface training t=25470, loss=0.021822000853717327\n", - "Surface training t=25471, loss=0.022298868745565414\n", - "Surface training t=25472, loss=0.023899272084236145\n", - "Surface training t=25473, loss=0.019775761757045984\n", - "Surface training t=25474, loss=0.023884018883109093\n", - "Surface training t=25475, loss=0.018828387837857008\n", - "Surface training t=25476, loss=0.021722599864006042\n", - "Surface training t=25477, loss=0.017709466628730297\n", - "Surface training t=25478, loss=0.015512581914663315\n", - "Surface training t=25479, loss=0.026035979390144348\n", - "Surface training t=25480, loss=0.02758148033171892\n", - "Surface training t=25481, loss=0.021494846791028976\n", - "Surface training t=25482, loss=0.029958650469779968\n", - "Surface training t=25483, loss=0.026272490620613098\n", - "Surface training t=25484, loss=0.019159489311277866\n", - "Surface training t=25485, loss=0.02605881541967392\n", - "Surface training t=25486, loss=0.023144054226577282\n", - "Surface training t=25487, loss=0.02023387234658003\n", - "Surface training t=25488, loss=0.020639758557081223\n", - "Surface training t=25489, loss=0.01858506351709366\n", - "Surface training t=25490, loss=0.02723813895136118\n", - "Surface training t=25491, loss=0.02097827009856701\n", - "Surface training t=25492, loss=0.019504106603562832\n", - "Surface training t=25493, loss=0.021233647130429745\n", - "Surface training t=25494, loss=0.021209697239100933\n", - "Surface training t=25495, loss=0.023064153268933296\n", - "Surface training t=25496, loss=0.017380270175635815\n", - "Surface training t=25497, loss=0.021832664497196674\n", - "Surface training t=25498, loss=0.01851908303797245\n", - "Surface training t=25499, loss=0.02232729271054268\n", - "Surface training t=25500, loss=0.028324385173618793\n", - "Surface training t=25501, loss=0.040806327015161514\n", - "Surface training t=25502, loss=0.030697082169353962\n", - "Surface training t=25503, loss=0.0287933386862278\n", - "Surface training t=25504, loss=0.02221822924911976\n", - "Surface training t=25505, loss=0.017983097583055496\n", - "Surface training t=25506, loss=0.022870387881994247\n", - "Surface training t=25507, loss=0.022605673409998417\n", - "Surface training t=25508, loss=0.024411250837147236\n", - "Surface training t=25509, loss=0.02581708785146475\n", - "Surface training t=25510, loss=0.025957980193197727\n", - "Surface training t=25511, loss=0.023692253977060318\n", - "Surface training t=25512, loss=0.0203227736055851\n", - "Surface training t=25513, loss=0.01946657244116068\n", - "Surface training t=25514, loss=0.01847468689084053\n", - "Surface training t=25515, loss=0.016117084305733442\n", - "Surface training t=25516, loss=0.018165182322263718\n", - "Surface training t=25517, loss=0.021646492183208466\n", - "Surface training t=25518, loss=0.01864696852862835\n", - "Surface training t=25519, loss=0.019357748329639435\n", - "Surface training t=25520, loss=0.018488282337784767\n", - "Surface training t=25521, loss=0.01830611750483513\n", - "Surface training t=25522, loss=0.02078997064381838\n", - "Surface training t=25523, loss=0.01984356064349413\n", - "Surface training t=25524, loss=0.02024283166974783\n", - "Surface training t=25525, loss=0.02029405813664198\n", - "Surface training t=25526, loss=0.020867187529802322\n", - "Surface training t=25527, loss=0.02561183273792267\n", - "Surface training t=25528, loss=0.03047313168644905\n", - "Surface training t=25529, loss=0.023161543533205986\n", - "Surface training t=25530, loss=0.024968989193439484\n", - "Surface training t=25531, loss=0.02417956478893757\n", - "Surface training t=25532, loss=0.01975651178508997\n", - "Surface training t=25533, loss=0.02217080444097519\n", - "Surface training t=25534, loss=0.029018192552030087\n", - "Surface training t=25535, loss=0.022046837024390697\n", - "Surface training t=25536, loss=0.02128061093389988\n", - "Surface training t=25537, loss=0.02415867242962122\n", - "Surface training t=25538, loss=0.040521303191781044\n", - "Surface training t=25539, loss=0.022793828509747982\n", - "Surface training t=25540, loss=0.036739081144332886\n", - "Surface training t=25541, loss=0.024858810007572174\n", - "Surface training t=25542, loss=0.02414438035339117\n", - "Surface training t=25543, loss=0.029531102627515793\n", - "Surface training t=25544, loss=0.029156535863876343\n", - "Surface training t=25545, loss=0.02812040224671364\n", - "Surface training t=25546, loss=0.029483048245310783\n", - "Surface training t=25547, loss=0.026278321631252766\n", - "Surface training t=25548, loss=0.02387564815580845\n", - "Surface training t=25549, loss=0.018992948345839977\n", - "Surface training t=25550, loss=0.020966523326933384\n", - "Surface training t=25551, loss=0.020843081176280975\n", - "Surface training t=25552, loss=0.020757521502673626\n", - "Surface training t=25553, loss=0.021523275412619114\n", - "Surface training t=25554, loss=0.020537178963422775\n", - "Surface training t=25555, loss=0.02028775028884411\n", - "Surface training t=25556, loss=0.015984296798706055\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=25557, loss=0.014315573498606682\n", - "Surface training t=25558, loss=0.01612668763846159\n", - "Surface training t=25559, loss=0.021890953183174133\n", - "Surface training t=25560, loss=0.01848017703741789\n", - "Surface training t=25561, loss=0.021947551518678665\n", - "Surface training t=25562, loss=0.022206070832908154\n", - "Surface training t=25563, loss=0.0259588910266757\n", - "Surface training t=25564, loss=0.02835606224834919\n", - "Surface training t=25565, loss=0.023410013876855373\n", - "Surface training t=25566, loss=0.023275159299373627\n", - "Surface training t=25567, loss=0.02227958384901285\n", - "Surface training t=25568, loss=0.020934651605784893\n", - "Surface training t=25569, loss=0.01764390943571925\n", - "Surface training t=25570, loss=0.023236106149852276\n", - "Surface training t=25571, loss=0.01970903854817152\n", - "Surface training t=25572, loss=0.020893815904855728\n", - "Surface training t=25573, loss=0.017892081756144762\n", - "Surface training t=25574, loss=0.017896133475005627\n", - "Surface training t=25575, loss=0.01321687689051032\n", - "Surface training t=25576, loss=0.017418858595192432\n", - "Surface training t=25577, loss=0.01905258186161518\n", - "Surface training t=25578, loss=0.01753958873450756\n", - "Surface training t=25579, loss=0.01943167019635439\n", - "Surface training t=25580, loss=0.01749364659190178\n", - "Surface training t=25581, loss=0.016131241340190172\n", - "Surface training t=25582, loss=0.015003554057329893\n", - "Surface training t=25583, loss=0.01496450137346983\n", - "Surface training t=25584, loss=0.02345181256532669\n", - "Surface training t=25585, loss=0.02377036027610302\n", - "Surface training t=25586, loss=0.022251233458518982\n", - "Surface training t=25587, loss=0.021383737213909626\n", - "Surface training t=25588, loss=0.027492529712617397\n", - "Surface training t=25589, loss=0.02917314227670431\n", - "Surface training t=25590, loss=0.03152989502996206\n", - "Surface training t=25591, loss=0.028407875448465347\n", - "Surface training t=25592, loss=0.026817694306373596\n", - "Surface training t=25593, loss=0.03097974695265293\n", - "Surface training t=25594, loss=0.022484284825623035\n", - "Surface training t=25595, loss=0.028138594701886177\n", - "Surface training t=25596, loss=0.02560642920434475\n", - "Surface training t=25597, loss=0.025903555564582348\n", - "Surface training t=25598, loss=0.020479360595345497\n", - "Surface training t=25599, loss=0.018132697325199842\n", - "Surface training t=25600, loss=0.02283062320202589\n", - "Surface training t=25601, loss=0.02053021639585495\n", - "Surface training t=25602, loss=0.025156592950224876\n", - "Surface training t=25603, loss=0.022927354089915752\n", - "Surface training t=25604, loss=0.028237721882760525\n", - "Surface training t=25605, loss=0.02135831955820322\n", - "Surface training t=25606, loss=0.0217271251603961\n", - "Surface training t=25607, loss=0.017092383466660976\n", - "Surface training t=25608, loss=0.022182360291481018\n", - "Surface training t=25609, loss=0.023878020234405994\n", - "Surface training t=25610, loss=0.021230305545032024\n", - "Surface training t=25611, loss=0.021035240963101387\n", - "Surface training t=25612, loss=0.02273527905344963\n", - "Surface training t=25613, loss=0.01779570197686553\n", - "Surface training t=25614, loss=0.018905431032180786\n", - "Surface training t=25615, loss=0.01967084128409624\n", - "Surface training t=25616, loss=0.016034326516091824\n", - "Surface training t=25617, loss=0.020962018985301256\n", - "Surface training t=25618, loss=0.01691754348576069\n", - "Surface training t=25619, loss=0.019109838642179966\n", - "Surface training t=25620, loss=0.0191449336707592\n", - "Surface training t=25621, loss=0.020229751244187355\n", - "Surface training t=25622, loss=0.018750272691249847\n", - "Surface training t=25623, loss=0.02082290407270193\n", - "Surface training t=25624, loss=0.015707463026046753\n", - "Surface training t=25625, loss=0.021151194348931313\n", - "Surface training t=25626, loss=0.022644270211458206\n", - "Surface training t=25627, loss=0.014464202336966991\n", - "Surface training t=25628, loss=0.020852629095315933\n", - "Surface training t=25629, loss=0.01303118234500289\n", - "Surface training t=25630, loss=0.017936937510967255\n", - "Surface training t=25631, loss=0.016174550633877516\n", - "Surface training t=25632, loss=0.015807458199560642\n", - "Surface training t=25633, loss=0.01715722307562828\n", - "Surface training t=25634, loss=0.0189004042185843\n", - "Surface training t=25635, loss=0.026054115034639835\n", - "Surface training t=25636, loss=0.02441966999322176\n", - "Surface training t=25637, loss=0.023346315138041973\n", - "Surface training t=25638, loss=0.03672834113240242\n", - "Surface training t=25639, loss=0.028209397569298744\n", - "Surface training t=25640, loss=0.032068828120827675\n", - "Surface training t=25641, loss=0.03504300303757191\n", - "Surface training t=25642, loss=0.023805980570614338\n", - "Surface training t=25643, loss=0.042916858568787575\n", - "Surface training t=25644, loss=0.026255209930241108\n", - "Surface training t=25645, loss=0.0411812923848629\n", - "Surface training t=25646, loss=0.027211026288568974\n", - "Surface training t=25647, loss=0.03263949602842331\n", - "Surface training t=25648, loss=0.030869358219206333\n", - "Surface training t=25649, loss=0.026495045982301235\n", - "Surface training t=25650, loss=0.027359060011804104\n", - "Surface training t=25651, loss=0.03574367240071297\n", - "Surface training t=25652, loss=0.03051568567752838\n", - "Surface training t=25653, loss=0.03139944840222597\n", - "Surface training t=25654, loss=0.028580354060977697\n", - "Surface training t=25655, loss=0.034867098554968834\n", - "Surface training t=25656, loss=0.031084110960364342\n", - "Surface training t=25657, loss=0.027347720228135586\n", - "Surface training t=25658, loss=0.018874807748943567\n", - "Surface training t=25659, loss=0.019718648865818977\n", - "Surface training t=25660, loss=0.016180839389562607\n", - "Surface training t=25661, loss=0.020874689798802137\n", - "Surface training t=25662, loss=0.01967284455895424\n", - "Surface training t=25663, loss=0.016101860906928778\n", - "Surface training t=25664, loss=0.02019859477877617\n", - "Surface training t=25665, loss=0.015011515468358994\n", - "Surface training t=25666, loss=0.016263131983578205\n", - "Surface training t=25667, loss=0.014143619686365128\n", - "Surface training t=25668, loss=0.024313931353390217\n", - "Surface training t=25669, loss=0.027570840902626514\n", - "Surface training t=25670, loss=0.036827925592660904\n", - "Surface training t=25671, loss=0.021663335151970387\n", - "Surface training t=25672, loss=0.029200338758528233\n", - "Surface training t=25673, loss=0.031894344836473465\n", - "Surface training t=25674, loss=0.03164403513073921\n", - "Surface training t=25675, loss=0.028630714863538742\n", - "Surface training t=25676, loss=0.0311949010938406\n", - "Surface training t=25677, loss=0.021446376107633114\n", - "Surface training t=25678, loss=0.028399810194969177\n", - "Surface training t=25679, loss=0.02657295111566782\n", - "Surface training t=25680, loss=0.023721981793642044\n", - "Surface training t=25681, loss=0.029863247647881508\n", - "Surface training t=25682, loss=0.02377215586602688\n", - "Surface training t=25683, loss=0.029873887076973915\n", - "Surface training t=25684, loss=0.028745674528181553\n", - "Surface training t=25685, loss=0.03157408349215984\n", - "Surface training t=25686, loss=0.03941971994936466\n", - "Surface training t=25687, loss=0.03277873247861862\n", - "Surface training t=25688, loss=0.03277645632624626\n", - "Surface training t=25689, loss=0.025075328536331654\n", - "Surface training t=25690, loss=0.02846369706094265\n", - "Surface training t=25691, loss=0.02909095399081707\n", - "Surface training t=25692, loss=0.028021630831062794\n", - "Surface training t=25693, loss=0.032740107737481594\n", - "Surface training t=25694, loss=0.024218272417783737\n", - "Surface training t=25695, loss=0.02904611825942993\n", - "Surface training t=25696, loss=0.03319159150123596\n", - "Surface training t=25697, loss=0.024649355560541153\n", - "Surface training t=25698, loss=0.027095087803900242\n", - "Surface training t=25699, loss=0.02372085675597191\n", - "Surface training t=25700, loss=0.026077366434037685\n", - "Surface training t=25701, loss=0.02854209393262863\n", - "Surface training t=25702, loss=0.03061431460082531\n", - "Surface training t=25703, loss=0.025805439800024033\n", - "Surface training t=25704, loss=0.019017992541193962\n", - "Surface training t=25705, loss=0.015651815105229616\n", - "Surface training t=25706, loss=0.018447767477482557\n", - "Surface training t=25707, loss=0.020837496034801006\n", - "Surface training t=25708, loss=0.017928894143551588\n", - "Surface training t=25709, loss=0.018953148275613785\n", - "Surface training t=25710, loss=0.031020470894873142\n", - "Surface training t=25711, loss=0.027047475799918175\n", - "Surface training t=25712, loss=0.0214694794267416\n", - "Surface training t=25713, loss=0.028320617973804474\n", - "Surface training t=25714, loss=0.02183082140982151\n", - "Surface training t=25715, loss=0.022580676712095737\n", - "Surface training t=25716, loss=0.04022762551903725\n", - "Surface training t=25717, loss=0.02509088534861803\n", - "Surface training t=25718, loss=0.024699688889086246\n", - "Surface training t=25719, loss=0.028330682776868343\n", - "Surface training t=25720, loss=0.025968116708099842\n", - "Surface training t=25721, loss=0.029273267835378647\n", - "Surface training t=25722, loss=0.033627305179834366\n", - "Surface training t=25723, loss=0.023526650853455067\n", - "Surface training t=25724, loss=0.026956576853990555\n", - "Surface training t=25725, loss=0.04067613743245602\n", - "Surface training t=25726, loss=0.033829646185040474\n", - "Surface training t=25727, loss=0.03965052217245102\n", - "Surface training t=25728, loss=0.02789386175572872\n", - "Surface training t=25729, loss=0.02985723689198494\n", - "Surface training t=25730, loss=0.028523455373942852\n", - "Surface training t=25731, loss=0.041855594143271446\n", - "Surface training t=25732, loss=0.029739927500486374\n", - "Surface training t=25733, loss=0.034604890272021294\n", - "Surface training t=25734, loss=0.03908023051917553\n", - "Surface training t=25735, loss=0.047196704894304276\n", - "Surface training t=25736, loss=0.03598032891750336\n", - "Surface training t=25737, loss=0.02657417394220829\n", - "Surface training t=25738, loss=0.027052052319049835\n", - "Surface training t=25739, loss=0.028059616684913635\n", - "Surface training t=25740, loss=0.0324283791705966\n", - "Surface training t=25741, loss=0.049971867352724075\n", - "Surface training t=25742, loss=0.02820081263780594\n", - "Surface training t=25743, loss=0.02999392058700323\n", - "Surface training t=25744, loss=0.03343770559877157\n", - "Surface training t=25745, loss=0.03008808195590973\n", - "Surface training t=25746, loss=0.027008467353880405\n", - "Surface training t=25747, loss=0.02619200199842453\n", - "Surface training t=25748, loss=0.024612804874777794\n", - "Surface training t=25749, loss=0.023992321453988552\n", - "Surface training t=25750, loss=0.027506329119205475\n", - "Surface training t=25751, loss=0.0285413796082139\n", - "Surface training t=25752, loss=0.034019202925264835\n", - "Surface training t=25753, loss=0.04575919173657894\n", - "Surface training t=25754, loss=0.03508821967989206\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=25755, loss=0.042055217549204826\n", - "Surface training t=25756, loss=0.04151364043354988\n", - "Surface training t=25757, loss=0.03484439663589001\n", - "Surface training t=25758, loss=0.0353170670568943\n", - "Surface training t=25759, loss=0.033110687509179115\n", - "Surface training t=25760, loss=0.03591453842818737\n", - "Surface training t=25761, loss=0.025735403411090374\n", - "Surface training t=25762, loss=0.040924934670329094\n", - "Surface training t=25763, loss=0.025791621766984463\n", - "Surface training t=25764, loss=0.031849407590925694\n", - "Surface training t=25765, loss=0.023546290583908558\n", - "Surface training t=25766, loss=0.018439004663378\n", - "Surface training t=25767, loss=0.020161107182502747\n", - "Surface training t=25768, loss=0.023678037337958813\n", - "Surface training t=25769, loss=0.021492482163012028\n", - "Surface training t=25770, loss=0.02138871606439352\n", - "Surface training t=25771, loss=0.017157288268208504\n", - "Surface training t=25772, loss=0.020328804850578308\n", - "Surface training t=25773, loss=0.018006769940257072\n", - "Surface training t=25774, loss=0.01502757053822279\n", - "Surface training t=25775, loss=0.01688358746469021\n", - "Surface training t=25776, loss=0.015414289198815823\n", - "Surface training t=25777, loss=0.017363756895065308\n", - "Surface training t=25778, loss=0.020932476967573166\n", - "Surface training t=25779, loss=0.019721793942153454\n", - "Surface training t=25780, loss=0.02064493205398321\n", - "Surface training t=25781, loss=0.01707193534821272\n", - "Surface training t=25782, loss=0.016764785163104534\n", - "Surface training t=25783, loss=0.026269464753568172\n", - "Surface training t=25784, loss=0.02419128641486168\n", - "Surface training t=25785, loss=0.02101953886449337\n", - "Surface training t=25786, loss=0.0218176431953907\n", - "Surface training t=25787, loss=0.02575451973825693\n", - "Surface training t=25788, loss=0.019716902635991573\n", - "Surface training t=25789, loss=0.024115961510688066\n", - "Surface training t=25790, loss=0.027180246077477932\n", - "Surface training t=25791, loss=0.025179771706461906\n", - "Surface training t=25792, loss=0.02491340134292841\n", - "Surface training t=25793, loss=0.039168573915958405\n", - "Surface training t=25794, loss=0.027209742926061153\n", - "Surface training t=25795, loss=0.0351690910756588\n", - "Surface training t=25796, loss=0.030768584460020065\n", - "Surface training t=25797, loss=0.041939038783311844\n", - "Surface training t=25798, loss=0.03255430981516838\n", - "Surface training t=25799, loss=0.026534587144851685\n", - "Surface training t=25800, loss=0.024229343980550766\n", - "Surface training t=25801, loss=0.020519412122666836\n", - "Surface training t=25802, loss=0.02352994680404663\n", - "Surface training t=25803, loss=0.02590438351035118\n", - "Surface training t=25804, loss=0.02471732720732689\n", - "Surface training t=25805, loss=0.022421457804739475\n", - "Surface training t=25806, loss=0.020411581732332706\n", - "Surface training t=25807, loss=0.019580595195293427\n", - "Surface training t=25808, loss=0.02197412122040987\n", - "Surface training t=25809, loss=0.02306864783167839\n", - "Surface training t=25810, loss=0.02291968185454607\n", - "Surface training t=25811, loss=0.028047138825058937\n", - "Surface training t=25812, loss=0.030098695307970047\n", - "Surface training t=25813, loss=0.025768358260393143\n", - "Surface training t=25814, loss=0.023785254918038845\n", - "Surface training t=25815, loss=0.026078219525516033\n", - "Surface training t=25816, loss=0.027577068656682968\n", - "Surface training t=25817, loss=0.024451488628983498\n", - "Surface training t=25818, loss=0.029979621060192585\n", - "Surface training t=25819, loss=0.026531429961323738\n", - "Surface training t=25820, loss=0.025793377310037613\n", - "Surface training t=25821, loss=0.0223050806671381\n", - "Surface training t=25822, loss=0.028866608627140522\n", - "Surface training t=25823, loss=0.0271600978448987\n", - "Surface training t=25824, loss=0.027918011881411076\n", - "Surface training t=25825, loss=0.023359508253633976\n", - "Surface training t=25826, loss=0.01964739989489317\n", - "Surface training t=25827, loss=0.016594878863543272\n", - "Surface training t=25828, loss=0.015420346986502409\n", - "Surface training t=25829, loss=0.017191519029438496\n", - "Surface training t=25830, loss=0.016542044933885336\n", - "Surface training t=25831, loss=0.01915183151140809\n", - "Surface training t=25832, loss=0.016793442890048027\n", - "Surface training t=25833, loss=0.020627806894481182\n", - "Surface training t=25834, loss=0.01214326499029994\n", - "Surface training t=25835, loss=0.016248243860900402\n", - "Surface training t=25836, loss=0.015900337137281895\n", - "Surface training t=25837, loss=0.01682691741734743\n", - "Surface training t=25838, loss=0.0153785296715796\n", - "Surface training t=25839, loss=0.013713293708860874\n", - "Surface training t=25840, loss=0.017353211995214224\n", - "Surface training t=25841, loss=0.02137681283056736\n", - "Surface training t=25842, loss=0.025617815554142\n", - "Surface training t=25843, loss=0.026692445389926434\n", - "Surface training t=25844, loss=0.030260998755693436\n", - "Surface training t=25845, loss=0.03345578908920288\n", - "Surface training t=25846, loss=0.025400206446647644\n", - "Surface training t=25847, loss=0.03410713002085686\n", - "Surface training t=25848, loss=0.04467006400227547\n", - "Surface training t=25849, loss=0.03199844900518656\n", - "Surface training t=25850, loss=0.030027889646589756\n", - "Surface training t=25851, loss=0.021163627970963717\n", - "Surface training t=25852, loss=0.02290577907115221\n", - "Surface training t=25853, loss=0.022346924059093\n", - "Surface training t=25854, loss=0.022930320352315903\n", - "Surface training t=25855, loss=0.027220742776989937\n", - "Surface training t=25856, loss=0.022161815315485\n", - "Surface training t=25857, loss=0.023030360229313374\n", - "Surface training t=25858, loss=0.027997354045510292\n", - "Surface training t=25859, loss=0.03440132178366184\n", - "Surface training t=25860, loss=0.024673993699252605\n", - "Surface training t=25861, loss=0.035555871203541756\n", - "Surface training t=25862, loss=0.024024835787713528\n", - "Surface training t=25863, loss=0.02922958228737116\n", - "Surface training t=25864, loss=0.019671939313411713\n", - "Surface training t=25865, loss=0.021093294024467468\n", - "Surface training t=25866, loss=0.033230604603886604\n", - "Surface training t=25867, loss=0.03038155473768711\n", - "Surface training t=25868, loss=0.024577373638749123\n", - "Surface training t=25869, loss=0.02634417451918125\n", - "Surface training t=25870, loss=0.026971131563186646\n", - "Surface training t=25871, loss=0.024054111912846565\n", - "Surface training t=25872, loss=0.03470396250486374\n", - "Surface training t=25873, loss=0.024436277337372303\n", - "Surface training t=25874, loss=0.028557831421494484\n", - "Surface training t=25875, loss=0.021174801513552666\n", - "Surface training t=25876, loss=0.021003500558435917\n", - "Surface training t=25877, loss=0.023579152300953865\n", - "Surface training t=25878, loss=0.017483381554484367\n", - "Surface training t=25879, loss=0.01855431403964758\n", - "Surface training t=25880, loss=0.015838665422052145\n", - "Surface training t=25881, loss=0.01472994964569807\n", - "Surface training t=25882, loss=0.015838949009776115\n", - "Surface training t=25883, loss=0.018843717873096466\n", - "Surface training t=25884, loss=0.01794319087639451\n", - "Surface training t=25885, loss=0.023054111748933792\n", - "Surface training t=25886, loss=0.025272672064602375\n", - "Surface training t=25887, loss=0.014591682236641645\n", - "Surface training t=25888, loss=0.01580412918701768\n", - "Surface training t=25889, loss=0.020290233194828033\n", - "Surface training t=25890, loss=0.021193443797528744\n", - "Surface training t=25891, loss=0.022514047101140022\n", - "Surface training t=25892, loss=0.02886094432324171\n", - "Surface training t=25893, loss=0.02190515212714672\n", - "Surface training t=25894, loss=0.029947366565465927\n", - "Surface training t=25895, loss=0.029167314991354942\n", - "Surface training t=25896, loss=0.03379753604531288\n", - "Surface training t=25897, loss=0.03229862358421087\n", - "Surface training t=25898, loss=0.02779268939048052\n", - "Surface training t=25899, loss=0.02172977663576603\n", - "Surface training t=25900, loss=0.029271992854773998\n", - "Surface training t=25901, loss=0.02707899734377861\n", - "Surface training t=25902, loss=0.04383968561887741\n", - "Surface training t=25903, loss=0.028450962156057358\n", - "Surface training t=25904, loss=0.02657162956893444\n", - "Surface training t=25905, loss=0.03224153257906437\n", - "Surface training t=25906, loss=0.034783635288476944\n", - "Surface training t=25907, loss=0.027616648003458977\n", - "Surface training t=25908, loss=0.03565682843327522\n", - "Surface training t=25909, loss=0.04193788766860962\n", - "Surface training t=25910, loss=0.03955176658928394\n", - "Surface training t=25911, loss=0.036827901378273964\n", - "Surface training t=25912, loss=0.03650677762925625\n", - "Surface training t=25913, loss=0.03152444027364254\n", - "Surface training t=25914, loss=0.028620329685509205\n", - "Surface training t=25915, loss=0.027594372630119324\n", - "Surface training t=25916, loss=0.02635864820331335\n", - "Surface training t=25917, loss=0.04526039771735668\n", - "Surface training t=25918, loss=0.025847584940493107\n", - "Surface training t=25919, loss=0.024319520220160484\n", - "Surface training t=25920, loss=0.02910755481570959\n", - "Surface training t=25921, loss=0.02818125579506159\n", - "Surface training t=25922, loss=0.023617202416062355\n", - "Surface training t=25923, loss=0.024659311398863792\n", - "Surface training t=25924, loss=0.03897004574537277\n", - "Surface training t=25925, loss=0.030221796594560146\n", - "Surface training t=25926, loss=0.0251213563606143\n", - "Surface training t=25927, loss=0.024479208514094353\n", - "Surface training t=25928, loss=0.018052772618830204\n", - "Surface training t=25929, loss=0.016779527068138123\n", - "Surface training t=25930, loss=0.023820807226002216\n", - "Surface training t=25931, loss=0.03652952425181866\n", - "Surface training t=25932, loss=0.024695693515241146\n", - "Surface training t=25933, loss=0.021255414001643658\n", - "Surface training t=25934, loss=0.020749898627400398\n", - "Surface training t=25935, loss=0.022791799157857895\n", - "Surface training t=25936, loss=0.024015425704419613\n", - "Surface training t=25937, loss=0.021082903258502483\n", - "Surface training t=25938, loss=0.021439913660287857\n", - "Surface training t=25939, loss=0.021515244618058205\n", - "Surface training t=25940, loss=0.023001331835985184\n", - "Surface training t=25941, loss=0.03349905647337437\n", - "Surface training t=25942, loss=0.03669280931353569\n", - "Surface training t=25943, loss=0.034573767334222794\n", - "Surface training t=25944, loss=0.030221091583371162\n", - "Surface training t=25945, loss=0.02489964012056589\n", - "Surface training t=25946, loss=0.02602876629680395\n", - "Surface training t=25947, loss=0.023059185594320297\n", - "Surface training t=25948, loss=0.02070318441838026\n", - "Surface training t=25949, loss=0.024357701651751995\n", - "Surface training t=25950, loss=0.025536376982927322\n", - "Surface training t=25951, loss=0.022336112335324287\n", - "Surface training t=25952, loss=0.02452170941978693\n", - "Surface training t=25953, loss=0.02286478877067566\n", - "Surface training t=25954, loss=0.02169984020292759\n", - "Surface training t=25955, loss=0.020604705438017845\n", - "Surface training t=25956, loss=0.022503022104501724\n", - "Surface training t=25957, loss=0.02395383734256029\n", - "Surface training t=25958, loss=0.016126585192978382\n", - "Surface training t=25959, loss=0.017522153444588184\n", - "Surface training t=25960, loss=0.022990419529378414\n", - "Surface training t=25961, loss=0.025805474258959293\n", - "Surface training t=25962, loss=0.022512848488986492\n", - "Surface training t=25963, loss=0.024467501789331436\n", - "Surface training t=25964, loss=0.04132164269685745\n", - "Surface training t=25965, loss=0.02698496263474226\n", - "Surface training t=25966, loss=0.03389879781752825\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=25967, loss=0.041485495865345\n", - "Surface training t=25968, loss=0.03018825128674507\n", - "Surface training t=25969, loss=0.02581965085119009\n", - "Surface training t=25970, loss=0.022541998885571957\n", - "Surface training t=25971, loss=0.0246038930490613\n", - "Surface training t=25972, loss=0.02123875916004181\n", - "Surface training t=25973, loss=0.02256530523300171\n", - "Surface training t=25974, loss=0.0292646624147892\n", - "Surface training t=25975, loss=0.031104610301554203\n", - "Surface training t=25976, loss=0.030800025910139084\n", - "Surface training t=25977, loss=0.026296991854906082\n", - "Surface training t=25978, loss=0.029306160286068916\n", - "Surface training t=25979, loss=0.03549128118902445\n", - "Surface training t=25980, loss=0.032729946076869965\n", - "Surface training t=25981, loss=0.034646145068109035\n", - "Surface training t=25982, loss=0.04605505429208279\n", - "Surface training t=25983, loss=0.03413880057632923\n", - "Surface training t=25984, loss=0.030999724753201008\n", - "Surface training t=25985, loss=0.02919068932533264\n", - "Surface training t=25986, loss=0.0371194239705801\n", - "Surface training t=25987, loss=0.027719834819436073\n", - "Surface training t=25988, loss=0.023578675463795662\n", - "Surface training t=25989, loss=0.027002576738595963\n", - "Surface training t=25990, loss=0.025105122476816177\n", - "Surface training t=25991, loss=0.03999052196741104\n", - "Surface training t=25992, loss=0.031408848240971565\n", - "Surface training t=25993, loss=0.04978235438466072\n", - "Surface training t=25994, loss=0.030274552293121815\n", - "Surface training t=25995, loss=0.04751739650964737\n", - "Surface training t=25996, loss=0.026200142689049244\n", - "Surface training t=25997, loss=0.037063343450427055\n", - "Surface training t=25998, loss=0.03499120473861694\n", - "Surface training t=25999, loss=0.02580953761935234\n", - "Surface training t=26000, loss=0.030888468027114868\n", - "Surface training t=26001, loss=0.02515417616814375\n", - "Surface training t=26002, loss=0.032098494470119476\n", - "Surface training t=26003, loss=0.03067849576473236\n", - "Surface training t=26004, loss=0.027410926297307014\n", - "Surface training t=26005, loss=0.036268580704927444\n", - "Surface training t=26006, loss=0.03299019206315279\n", - "Surface training t=26007, loss=0.028137550689280033\n", - "Surface training t=26008, loss=0.02856370247900486\n", - "Surface training t=26009, loss=0.02265931386500597\n", - "Surface training t=26010, loss=0.020108919590711594\n", - "Surface training t=26011, loss=0.030498345382511616\n", - "Surface training t=26012, loss=0.01944753434509039\n", - "Surface training t=26013, loss=0.025982131250202656\n", - "Surface training t=26014, loss=0.02470137272030115\n", - "Surface training t=26015, loss=0.02493621315807104\n", - "Surface training t=26016, loss=0.019616618752479553\n", - "Surface training t=26017, loss=0.020182104781270027\n", - "Surface training t=26018, loss=0.024076425470411777\n", - "Surface training t=26019, loss=0.030216756276786327\n", - "Surface training t=26020, loss=0.03284091874957085\n", - "Surface training t=26021, loss=0.026933561079204082\n", - "Surface training t=26022, loss=0.02617962844669819\n", - "Surface training t=26023, loss=0.02852494642138481\n", - "Surface training t=26024, loss=0.018443490844219923\n", - "Surface training t=26025, loss=0.018934258725494146\n", - "Surface training t=26026, loss=0.020495809614658356\n", - "Surface training t=26027, loss=0.019815044477581978\n", - "Surface training t=26028, loss=0.01569673791527748\n", - "Surface training t=26029, loss=0.022157324478030205\n", - "Surface training t=26030, loss=0.0276905819773674\n", - "Surface training t=26031, loss=0.029928884468972683\n", - "Surface training t=26032, loss=0.028311758302152157\n", - "Surface training t=26033, loss=0.028116789646446705\n", - "Surface training t=26034, loss=0.025072124786674976\n", - "Surface training t=26035, loss=0.029267728328704834\n", - "Surface training t=26036, loss=0.031952458433806896\n", - "Surface training t=26037, loss=0.026443556882441044\n", - "Surface training t=26038, loss=0.02763673011213541\n", - "Surface training t=26039, loss=0.028134359046816826\n", - "Surface training t=26040, loss=0.018787794280797243\n", - "Surface training t=26041, loss=0.025297287851572037\n", - "Surface training t=26042, loss=0.02166332071647048\n", - "Surface training t=26043, loss=0.027305825613439083\n", - "Surface training t=26044, loss=0.019775171764194965\n", - "Surface training t=26045, loss=0.019587963819503784\n", - "Surface training t=26046, loss=0.02240313868969679\n", - "Surface training t=26047, loss=0.02166043594479561\n", - "Surface training t=26048, loss=0.017183764837682247\n", - "Surface training t=26049, loss=0.01886746846139431\n", - "Surface training t=26050, loss=0.020034813322126865\n", - "Surface training t=26051, loss=0.017487010918557644\n", - "Surface training t=26052, loss=0.019677428528666496\n", - "Surface training t=26053, loss=0.021147850900888443\n", - "Surface training t=26054, loss=0.01804992649704218\n", - "Surface training t=26055, loss=0.017742732539772987\n", - "Surface training t=26056, loss=0.025312134996056557\n", - "Surface training t=26057, loss=0.01861137617379427\n", - "Surface training t=26058, loss=0.020095370709896088\n", - "Surface training t=26059, loss=0.026572409085929394\n", - "Surface training t=26060, loss=0.02124401181936264\n", - "Surface training t=26061, loss=0.024298175238072872\n", - "Surface training t=26062, loss=0.026038086973130703\n", - "Surface training t=26063, loss=0.029337928630411625\n", - "Surface training t=26064, loss=0.031894419342279434\n", - "Surface training t=26065, loss=0.025611717253923416\n", - "Surface training t=26066, loss=0.02552824467420578\n", - "Surface training t=26067, loss=0.027723648585379124\n", - "Surface training t=26068, loss=0.024118042550981045\n", - "Surface training t=26069, loss=0.023862622678279877\n", - "Surface training t=26070, loss=0.023090096190571785\n", - "Surface training t=26071, loss=0.030960483476519585\n", - "Surface training t=26072, loss=0.02397553063929081\n", - "Surface training t=26073, loss=0.02284051477909088\n", - "Surface training t=26074, loss=0.020264589227735996\n", - "Surface training t=26075, loss=0.018503887578845024\n", - "Surface training t=26076, loss=0.023307526484131813\n", - "Surface training t=26077, loss=0.01858167164027691\n", - "Surface training t=26078, loss=0.020727982744574547\n", - "Surface training t=26079, loss=0.016755249351263046\n", - "Surface training t=26080, loss=0.018338733352720737\n", - "Surface training t=26081, loss=0.018483576364815235\n", - "Surface training t=26082, loss=0.015865349676460028\n", - "Surface training t=26083, loss=0.01967438869178295\n", - "Surface training t=26084, loss=0.017232194542884827\n", - "Surface training t=26085, loss=0.01855662278831005\n", - "Surface training t=26086, loss=0.019273238256573677\n", - "Surface training t=26087, loss=0.023870735429227352\n", - "Surface training t=26088, loss=0.03225962910801172\n", - "Surface training t=26089, loss=0.021835139952600002\n", - "Surface training t=26090, loss=0.016336981672793627\n", - "Surface training t=26091, loss=0.017547840252518654\n", - "Surface training t=26092, loss=0.023637225851416588\n", - "Surface training t=26093, loss=0.01641277363523841\n", - "Surface training t=26094, loss=0.021275357343256474\n", - "Surface training t=26095, loss=0.021257642656564713\n", - "Surface training t=26096, loss=0.023129040375351906\n", - "Surface training t=26097, loss=0.019296811893582344\n", - "Surface training t=26098, loss=0.016935770865529776\n", - "Surface training t=26099, loss=0.02247443236410618\n", - "Surface training t=26100, loss=0.032917171716690063\n", - "Surface training t=26101, loss=0.024218063801527023\n", - "Surface training t=26102, loss=0.025179877877235413\n", - "Surface training t=26103, loss=0.023558584973216057\n", - "Surface training t=26104, loss=0.016327836085110903\n", - "Surface training t=26105, loss=0.020565807819366455\n", - "Surface training t=26106, loss=0.017908163368701935\n", - "Surface training t=26107, loss=0.019164569675922394\n", - "Surface training t=26108, loss=0.013602171093225479\n", - "Surface training t=26109, loss=0.018099477514624596\n", - "Surface training t=26110, loss=0.01825984474271536\n", - "Surface training t=26111, loss=0.015480856411159039\n", - "Surface training t=26112, loss=0.015911592170596123\n", - "Surface training t=26113, loss=0.01630955096334219\n", - "Surface training t=26114, loss=0.018651618622243404\n", - "Surface training t=26115, loss=0.020718427374958992\n", - "Surface training t=26116, loss=0.020093602128326893\n", - "Surface training t=26117, loss=0.017532016150653362\n", - "Surface training t=26118, loss=0.016646770760416985\n", - "Surface training t=26119, loss=0.018503975123167038\n", - "Surface training t=26120, loss=0.02001294493675232\n", - "Surface training t=26121, loss=0.025789296254515648\n", - "Surface training t=26122, loss=0.027016742154955864\n", - "Surface training t=26123, loss=0.023387485183775425\n", - "Surface training t=26124, loss=0.01893198862671852\n", - "Surface training t=26125, loss=0.015747863799333572\n", - "Surface training t=26126, loss=0.024396736174821854\n", - "Surface training t=26127, loss=0.021127793937921524\n", - "Surface training t=26128, loss=0.015063536819070578\n", - "Surface training t=26129, loss=0.016689272597432137\n", - "Surface training t=26130, loss=0.014193673618137836\n", - "Surface training t=26131, loss=0.016550608910620213\n", - "Surface training t=26132, loss=0.012591094011440873\n", - "Surface training t=26133, loss=0.0150428363122046\n", - "Surface training t=26134, loss=0.01660901866853237\n", - "Surface training t=26135, loss=0.015346936415880919\n", - "Surface training t=26136, loss=0.01672738790512085\n", - "Surface training t=26137, loss=0.016493823379278183\n", - "Surface training t=26138, loss=0.015194869134575129\n", - "Surface training t=26139, loss=0.016733381897211075\n", - "Surface training t=26140, loss=0.025807649828493595\n", - "Surface training t=26141, loss=0.030823852866888046\n", - "Surface training t=26142, loss=0.03226177114993334\n", - "Surface training t=26143, loss=0.027096252888441086\n", - "Surface training t=26144, loss=0.029156556352972984\n", - "Surface training t=26145, loss=0.031627961434423923\n", - "Surface training t=26146, loss=0.024629171937704086\n", - "Surface training t=26147, loss=0.023037638515233994\n", - "Surface training t=26148, loss=0.03465734422206879\n", - "Surface training t=26149, loss=0.0285214614123106\n", - "Surface training t=26150, loss=0.0368034802377224\n", - "Surface training t=26151, loss=0.025399071164429188\n", - "Surface training t=26152, loss=0.032412284053862095\n", - "Surface training t=26153, loss=0.028729742392897606\n", - "Surface training t=26154, loss=0.03282188065350056\n", - "Surface training t=26155, loss=0.04635328985750675\n", - "Surface training t=26156, loss=0.04374667443335056\n", - "Surface training t=26157, loss=0.030598534271121025\n", - "Surface training t=26158, loss=0.03611373528838158\n", - "Surface training t=26159, loss=0.03913610614836216\n", - "Surface training t=26160, loss=0.03423363622277975\n", - "Surface training t=26161, loss=0.043129630386829376\n", - "Surface training t=26162, loss=0.03296850435435772\n", - "Surface training t=26163, loss=0.04081113263964653\n", - "Surface training t=26164, loss=0.03388629108667374\n", - "Surface training t=26165, loss=0.032268837094306946\n", - "Surface training t=26166, loss=0.027787848375737667\n", - "Surface training t=26167, loss=0.029291899874806404\n", - "Surface training t=26168, loss=0.029102565720677376\n", - "Surface training t=26169, loss=0.02143957745283842\n", - "Surface training t=26170, loss=0.02718271967023611\n", - "Surface training t=26171, loss=0.023377221077680588\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=26172, loss=0.0274362089112401\n", - "Surface training t=26173, loss=0.02808745577931404\n", - "Surface training t=26174, loss=0.0269825030118227\n", - "Surface training t=26175, loss=0.02343880385160446\n", - "Surface training t=26176, loss=0.028319566510617733\n", - "Surface training t=26177, loss=0.02441344503313303\n", - "Surface training t=26178, loss=0.02381577529013157\n", - "Surface training t=26179, loss=0.03673143498599529\n", - "Surface training t=26180, loss=0.028031070716679096\n", - "Surface training t=26181, loss=0.022965963929891586\n", - "Surface training t=26182, loss=0.023256991989910603\n", - "Surface training t=26183, loss=0.027951001189649105\n", - "Surface training t=26184, loss=0.03352932445704937\n", - "Surface training t=26185, loss=0.04247371293604374\n", - "Surface training t=26186, loss=0.025943304412066936\n", - "Surface training t=26187, loss=0.024301250465214252\n", - "Surface training t=26188, loss=0.01913839764893055\n", - "Surface training t=26189, loss=0.013715160777792335\n", - "Surface training t=26190, loss=0.01643272489309311\n", - "Surface training t=26191, loss=0.019812597893178463\n", - "Surface training t=26192, loss=0.018773924559354782\n", - "Surface training t=26193, loss=0.020625753793865442\n", - "Surface training t=26194, loss=0.018864176236093044\n", - "Surface training t=26195, loss=0.02007421664893627\n", - "Surface training t=26196, loss=0.042512090876698494\n", - "Surface training t=26197, loss=0.03055736795067787\n", - "Surface training t=26198, loss=0.033038681373000145\n", - "Surface training t=26199, loss=0.03991004079580307\n", - "Surface training t=26200, loss=0.03719283267855644\n", - "Surface training t=26201, loss=0.032363600097596645\n", - "Surface training t=26202, loss=0.0378748569637537\n", - "Surface training t=26203, loss=0.027227467857301235\n", - "Surface training t=26204, loss=0.038006946444511414\n", - "Surface training t=26205, loss=0.026742983609437943\n", - "Surface training t=26206, loss=0.0317991878837347\n", - "Surface training t=26207, loss=0.027538497000932693\n", - "Surface training t=26208, loss=0.02960983570665121\n", - "Surface training t=26209, loss=0.03093637153506279\n", - "Surface training t=26210, loss=0.024805406108498573\n", - "Surface training t=26211, loss=0.02548354770988226\n", - "Surface training t=26212, loss=0.021024326793849468\n", - "Surface training t=26213, loss=0.02741492073982954\n", - "Surface training t=26214, loss=0.02403977420181036\n", - "Surface training t=26215, loss=0.02508060447871685\n", - "Surface training t=26216, loss=0.031065145507454872\n", - "Surface training t=26217, loss=0.021110590547323227\n", - "Surface training t=26218, loss=0.026159677654504776\n", - "Surface training t=26219, loss=0.021335408091545105\n", - "Surface training t=26220, loss=0.020837790332734585\n", - "Surface training t=26221, loss=0.020784580148756504\n", - "Surface training t=26222, loss=0.021474070847034454\n", - "Surface training t=26223, loss=0.02515301574021578\n", - "Surface training t=26224, loss=0.02282662596553564\n", - "Surface training t=26225, loss=0.020953773520886898\n", - "Surface training t=26226, loss=0.015611288137733936\n", - "Surface training t=26227, loss=0.020744934678077698\n", - "Surface training t=26228, loss=0.02318945899605751\n", - "Surface training t=26229, loss=0.023135501891374588\n", - "Surface training t=26230, loss=0.022870698012411594\n", - "Surface training t=26231, loss=0.02612286526709795\n", - "Surface training t=26232, loss=0.015925105195492506\n", - "Surface training t=26233, loss=0.02770654298365116\n", - "Surface training t=26234, loss=0.03193986415863037\n", - "Surface training t=26235, loss=0.025767052546143532\n", - "Surface training t=26236, loss=0.04333525337278843\n", - "Surface training t=26237, loss=0.032077861949801445\n", - "Surface training t=26238, loss=0.03306352999061346\n", - "Surface training t=26239, loss=0.030922003090381622\n", - "Surface training t=26240, loss=0.03073118068277836\n", - "Surface training t=26241, loss=0.022755444049835205\n", - "Surface training t=26242, loss=0.030458714812994003\n", - "Surface training t=26243, loss=0.02342166844755411\n", - "Surface training t=26244, loss=0.022350591607391834\n", - "Surface training t=26245, loss=0.01713522430509329\n", - "Surface training t=26246, loss=0.016281728632748127\n", - "Surface training t=26247, loss=0.017243691254407167\n", - "Surface training t=26248, loss=0.020852682180702686\n", - "Surface training t=26249, loss=0.021947667934000492\n", - "Surface training t=26250, loss=0.01777948858216405\n", - "Surface training t=26251, loss=0.0168420379050076\n", - "Surface training t=26252, loss=0.013124500401318073\n", - "Surface training t=26253, loss=0.021240821108222008\n", - "Surface training t=26254, loss=0.018340652342885733\n", - "Surface training t=26255, loss=0.020267068408429623\n", - "Surface training t=26256, loss=0.030793660320341587\n", - "Surface training t=26257, loss=0.02380772028118372\n", - "Surface training t=26258, loss=0.02373103890568018\n", - "Surface training t=26259, loss=0.02412388287484646\n", - "Surface training t=26260, loss=0.030599103309214115\n", - "Surface training t=26261, loss=0.03938772715628147\n", - "Surface training t=26262, loss=0.0289909690618515\n", - "Surface training t=26263, loss=0.023239401169121265\n", - "Surface training t=26264, loss=0.019813209772109985\n", - "Surface training t=26265, loss=0.016825486905872822\n", - "Surface training t=26266, loss=0.017489315010607243\n", - "Surface training t=26267, loss=0.02118444163352251\n", - "Surface training t=26268, loss=0.021847997792065144\n", - "Surface training t=26269, loss=0.02059367671608925\n", - "Surface training t=26270, loss=0.02641132101416588\n", - "Surface training t=26271, loss=0.019804304465651512\n", - "Surface training t=26272, loss=0.0179441524669528\n", - "Surface training t=26273, loss=0.015047557651996613\n", - "Surface training t=26274, loss=0.01608858723193407\n", - "Surface training t=26275, loss=0.015440443530678749\n", - "Surface training t=26276, loss=0.017075875774025917\n", - "Surface training t=26277, loss=0.014779378660023212\n", - "Surface training t=26278, loss=0.01860620640218258\n", - "Surface training t=26279, loss=0.0332297757267952\n", - "Surface training t=26280, loss=0.03589933179318905\n", - "Surface training t=26281, loss=0.03096108417958021\n", - "Surface training t=26282, loss=0.03083263337612152\n", - "Surface training t=26283, loss=0.05521527864038944\n", - "Surface training t=26284, loss=0.03206310793757439\n", - "Surface training t=26285, loss=0.03141343221068382\n", - "Surface training t=26286, loss=0.043219927698373795\n", - "Surface training t=26287, loss=0.02768191322684288\n", - "Surface training t=26288, loss=0.03208843804895878\n", - "Surface training t=26289, loss=0.0246423352509737\n", - "Surface training t=26290, loss=0.035990720614790916\n", - "Surface training t=26291, loss=0.03840065374970436\n", - "Surface training t=26292, loss=0.03164629731327295\n", - "Surface training t=26293, loss=0.04265440069139004\n", - "Surface training t=26294, loss=0.052278921008110046\n", - "Surface training t=26295, loss=0.035314274951815605\n", - "Surface training t=26296, loss=0.03438760060817003\n", - "Surface training t=26297, loss=0.02808874100446701\n", - "Surface training t=26298, loss=0.03707348369061947\n", - "Surface training t=26299, loss=0.033804142847657204\n", - "Surface training t=26300, loss=0.032227128744125366\n", - "Surface training t=26301, loss=0.041355062276124954\n", - "Surface training t=26302, loss=0.031120413914322853\n", - "Surface training t=26303, loss=0.03222855366766453\n", - "Surface training t=26304, loss=0.03861246071755886\n", - "Surface training t=26305, loss=0.0317526375874877\n", - "Surface training t=26306, loss=0.019789421930909157\n", - "Surface training t=26307, loss=0.019918635487556458\n", - "Surface training t=26308, loss=0.021589848212897778\n", - "Surface training t=26309, loss=0.026761403307318687\n", - "Surface training t=26310, loss=0.019455166533589363\n", - "Surface training t=26311, loss=0.01834691781550646\n", - "Surface training t=26312, loss=0.022443984635174274\n", - "Surface training t=26313, loss=0.01770273596048355\n", - "Surface training t=26314, loss=0.016570320818573236\n", - "Surface training t=26315, loss=0.01584568154066801\n", - "Surface training t=26316, loss=0.015788815449923277\n", - "Surface training t=26317, loss=0.014847047161310911\n", - "Surface training t=26318, loss=0.014926274307072163\n", - "Surface training t=26319, loss=0.01449147891253233\n", - "Surface training t=26320, loss=0.01681850291788578\n", - "Surface training t=26321, loss=0.020677845925092697\n", - "Surface training t=26322, loss=0.01701033115386963\n", - "Surface training t=26323, loss=0.020914197899401188\n", - "Surface training t=26324, loss=0.02031954610720277\n", - "Surface training t=26325, loss=0.017416090704500675\n", - "Surface training t=26326, loss=0.018289029598236084\n", - "Surface training t=26327, loss=0.022890171967446804\n", - "Surface training t=26328, loss=0.03070229571312666\n", - "Surface training t=26329, loss=0.023472623899579048\n", - "Surface training t=26330, loss=0.01989648025482893\n", - "Surface training t=26331, loss=0.01864475104957819\n", - "Surface training t=26332, loss=0.021612228825688362\n", - "Surface training t=26333, loss=0.023677421733736992\n", - "Surface training t=26334, loss=0.034982748329639435\n", - "Surface training t=26335, loss=0.02714993618428707\n", - "Surface training t=26336, loss=0.02217474766075611\n", - "Surface training t=26337, loss=0.02470549289137125\n", - "Surface training t=26338, loss=0.02962635178118944\n", - "Surface training t=26339, loss=0.019739845767617226\n", - "Surface training t=26340, loss=0.02116046193987131\n", - "Surface training t=26341, loss=0.023862059228122234\n", - "Surface training t=26342, loss=0.022298059426248074\n", - "Surface training t=26343, loss=0.024156086146831512\n", - "Surface training t=26344, loss=0.024425869807600975\n", - "Surface training t=26345, loss=0.02229452319443226\n", - "Surface training t=26346, loss=0.022535132244229317\n", - "Surface training t=26347, loss=0.01937384344637394\n", - "Surface training t=26348, loss=0.011913965689018369\n", - "Surface training t=26349, loss=0.018158168997615576\n", - "Surface training t=26350, loss=0.019327126443386078\n", - "Surface training t=26351, loss=0.021564441733062267\n", - "Surface training t=26352, loss=0.018661571200937033\n", - "Surface training t=26353, loss=0.01788339577615261\n", - "Surface training t=26354, loss=0.025288010016083717\n", - "Surface training t=26355, loss=0.022686856798827648\n", - "Surface training t=26356, loss=0.023343040607869625\n", - "Surface training t=26357, loss=0.022732414305210114\n", - "Surface training t=26358, loss=0.028325271792709827\n", - "Surface training t=26359, loss=0.025086007080972195\n", - "Surface training t=26360, loss=0.018094846047461033\n", - "Surface training t=26361, loss=0.01840823795646429\n", - "Surface training t=26362, loss=0.014173836447298527\n", - "Surface training t=26363, loss=0.01168375974521041\n", - "Surface training t=26364, loss=0.02001511538401246\n", - "Surface training t=26365, loss=0.02133831474930048\n", - "Surface training t=26366, loss=0.01722479984164238\n", - "Surface training t=26367, loss=0.021428611129522324\n", - "Surface training t=26368, loss=0.01941702887415886\n", - "Surface training t=26369, loss=0.01792287826538086\n", - "Surface training t=26370, loss=0.020538607612252235\n", - "Surface training t=26371, loss=0.02002779860049486\n", - "Surface training t=26372, loss=0.018131064251065254\n", - "Surface training t=26373, loss=0.025358575396239758\n", - "Surface training t=26374, loss=0.015467016026377678\n", - "Surface training t=26375, loss=0.022731264121830463\n", - "Surface training t=26376, loss=0.024087288416922092\n", - "Surface training t=26377, loss=0.019981992430984974\n", - "Surface training t=26378, loss=0.01787528023123741\n", - "Surface training t=26379, loss=0.018656035885214806\n", - "Surface training t=26380, loss=0.01880842074751854\n", - "Surface training t=26381, loss=0.021852903999388218\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=26382, loss=0.01772178802639246\n", - "Surface training t=26383, loss=0.021138501353561878\n", - "Surface training t=26384, loss=0.01568891853094101\n", - "Surface training t=26385, loss=0.01688370108604431\n", - "Surface training t=26386, loss=0.01864886935800314\n", - "Surface training t=26387, loss=0.0161757692694664\n", - "Surface training t=26388, loss=0.01998677011579275\n", - "Surface training t=26389, loss=0.017394770868122578\n", - "Surface training t=26390, loss=0.015680798329412937\n", - "Surface training t=26391, loss=0.022638332098722458\n", - "Surface training t=26392, loss=0.017687685787677765\n", - "Surface training t=26393, loss=0.016237560659646988\n", - "Surface training t=26394, loss=0.019859778694808483\n", - "Surface training t=26395, loss=0.014897832181304693\n", - "Surface training t=26396, loss=0.01756350975483656\n", - "Surface training t=26397, loss=0.0186051893979311\n", - "Surface training t=26398, loss=0.023834173567593098\n", - "Surface training t=26399, loss=0.025035434402525425\n", - "Surface training t=26400, loss=0.01693770382553339\n", - "Surface training t=26401, loss=0.018826859537512064\n", - "Surface training t=26402, loss=0.02625653799623251\n", - "Surface training t=26403, loss=0.026977838948369026\n", - "Surface training t=26404, loss=0.0288610914722085\n", - "Surface training t=26405, loss=0.027513885870575905\n", - "Surface training t=26406, loss=0.02169169671833515\n", - "Surface training t=26407, loss=0.0237621134147048\n", - "Surface training t=26408, loss=0.023455428890883923\n", - "Surface training t=26409, loss=0.019629908725619316\n", - "Surface training t=26410, loss=0.02629148866981268\n", - "Surface training t=26411, loss=0.030821208842098713\n", - "Surface training t=26412, loss=0.020342824049293995\n", - "Surface training t=26413, loss=0.019456111826002598\n", - "Surface training t=26414, loss=0.020360760390758514\n", - "Surface training t=26415, loss=0.01775537896901369\n", - "Surface training t=26416, loss=0.01950740907341242\n", - "Surface training t=26417, loss=0.020930870436131954\n", - "Surface training t=26418, loss=0.028240262530744076\n", - "Surface training t=26419, loss=0.021951009519398212\n", - "Surface training t=26420, loss=0.022713455371558666\n", - "Surface training t=26421, loss=0.026645994745194912\n", - "Surface training t=26422, loss=0.02480925712734461\n", - "Surface training t=26423, loss=0.020166514441370964\n", - "Surface training t=26424, loss=0.024260375648736954\n", - "Surface training t=26425, loss=0.030878376215696335\n", - "Surface training t=26426, loss=0.020813808776438236\n", - "Surface training t=26427, loss=0.029906662181019783\n", - "Surface training t=26428, loss=0.02797720581293106\n", - "Surface training t=26429, loss=0.02429191954433918\n", - "Surface training t=26430, loss=0.029098790138959885\n", - "Surface training t=26431, loss=0.017827002331614494\n", - "Surface training t=26432, loss=0.0169070097617805\n", - "Surface training t=26433, loss=0.015194885432720184\n", - "Surface training t=26434, loss=0.016640322748571634\n", - "Surface training t=26435, loss=0.014923983253538609\n", - "Surface training t=26436, loss=0.025152767077088356\n", - "Surface training t=26437, loss=0.03202824015170336\n", - "Surface training t=26438, loss=0.028731833212077618\n", - "Surface training t=26439, loss=0.021556073799729347\n", - "Surface training t=26440, loss=0.026561329141259193\n", - "Surface training t=26441, loss=0.02755869645625353\n", - "Surface training t=26442, loss=0.02731235884130001\n", - "Surface training t=26443, loss=0.028602933511137962\n", - "Surface training t=26444, loss=0.03553450480103493\n", - "Surface training t=26445, loss=0.04368898086249828\n", - "Surface training t=26446, loss=0.04211168922483921\n", - "Surface training t=26447, loss=0.03624376654624939\n", - "Surface training t=26448, loss=0.038108253851532936\n", - "Surface training t=26449, loss=0.03530805557966232\n", - "Surface training t=26450, loss=0.03136494196951389\n", - "Surface training t=26451, loss=0.04013100266456604\n", - "Surface training t=26452, loss=0.031850568018853664\n", - "Surface training t=26453, loss=0.03461090475320816\n", - "Surface training t=26454, loss=0.031200464814901352\n", - "Surface training t=26455, loss=0.042373886331915855\n", - "Surface training t=26456, loss=0.04625370167195797\n", - "Surface training t=26457, loss=0.03422923944890499\n", - "Surface training t=26458, loss=0.03216412663459778\n", - "Surface training t=26459, loss=0.030856752768158913\n", - "Surface training t=26460, loss=0.043382553383708\n", - "Surface training t=26461, loss=0.03290202282369137\n", - "Surface training t=26462, loss=0.04898379184305668\n", - "Surface training t=26463, loss=0.0372199472039938\n", - "Surface training t=26464, loss=0.03647509776055813\n", - "Surface training t=26465, loss=0.04622394032776356\n", - "Surface training t=26466, loss=0.03093005996197462\n", - "Surface training t=26467, loss=0.03205030411481857\n", - "Surface training t=26468, loss=0.031203387305140495\n", - "Surface training t=26469, loss=0.04349967837333679\n", - "Surface training t=26470, loss=0.03650069609284401\n", - "Surface training t=26471, loss=0.038521953858435154\n", - "Surface training t=26472, loss=0.031699071638286114\n", - "Surface training t=26473, loss=0.03156200610101223\n", - "Surface training t=26474, loss=0.029098734259605408\n", - "Surface training t=26475, loss=0.028422517701983452\n", - "Surface training t=26476, loss=0.032543543726205826\n", - "Surface training t=26477, loss=0.02548260986804962\n", - "Surface training t=26478, loss=0.022829758003354073\n", - "Surface training t=26479, loss=0.022904563695192337\n", - "Surface training t=26480, loss=0.030848409980535507\n", - "Surface training t=26481, loss=0.025448349304497242\n", - "Surface training t=26482, loss=0.02678658440709114\n", - "Surface training t=26483, loss=0.03159054554998875\n", - "Surface training t=26484, loss=0.033073144033551216\n", - "Surface training t=26485, loss=0.03450027108192444\n", - "Surface training t=26486, loss=0.0321611100807786\n", - "Surface training t=26487, loss=0.03851115517318249\n", - "Surface training t=26488, loss=0.046523021534085274\n", - "Surface training t=26489, loss=0.03201352804899216\n", - "Surface training t=26490, loss=0.031144050881266594\n", - "Surface training t=26491, loss=0.0340447723865509\n", - "Surface training t=26492, loss=0.028406732715666294\n", - "Surface training t=26493, loss=0.025439279153943062\n", - "Surface training t=26494, loss=0.025822791270911694\n", - "Surface training t=26495, loss=0.028753035701811314\n", - "Surface training t=26496, loss=0.027929017320275307\n", - "Surface training t=26497, loss=0.02334336657077074\n", - "Surface training t=26498, loss=0.024778532795608044\n", - "Surface training t=26499, loss=0.025842826813459396\n", - "Surface training t=26500, loss=0.023682115599513054\n", - "Surface training t=26501, loss=0.02126669231802225\n", - "Surface training t=26502, loss=0.021354115568101406\n", - "Surface training t=26503, loss=0.0237262356095016\n", - "Surface training t=26504, loss=0.024442116729915142\n", - "Surface training t=26505, loss=0.021215985529124737\n", - "Surface training t=26506, loss=0.023529469966888428\n", - "Surface training t=26507, loss=0.02110382355749607\n", - "Surface training t=26508, loss=0.022569689899683\n", - "Surface training t=26509, loss=0.015511035919189453\n", - "Surface training t=26510, loss=0.02100569475442171\n", - "Surface training t=26511, loss=0.020896613597869873\n", - "Surface training t=26512, loss=0.0170460045337677\n", - "Surface training t=26513, loss=0.023173958994448185\n", - "Surface training t=26514, loss=0.021134909242391586\n", - "Surface training t=26515, loss=0.022129323333501816\n", - "Surface training t=26516, loss=0.021359330974519253\n", - "Surface training t=26517, loss=0.033360473811626434\n", - "Surface training t=26518, loss=0.022790423594415188\n", - "Surface training t=26519, loss=0.02534913644194603\n", - "Surface training t=26520, loss=0.02749838214367628\n", - "Surface training t=26521, loss=0.022834484465420246\n", - "Surface training t=26522, loss=0.020810523070394993\n", - "Surface training t=26523, loss=0.023814181797206402\n", - "Surface training t=26524, loss=0.02175193838775158\n", - "Surface training t=26525, loss=0.023262994363904\n", - "Surface training t=26526, loss=0.01929406262934208\n", - "Surface training t=26527, loss=0.020729213021695614\n", - "Surface training t=26528, loss=0.0153625444509089\n", - "Surface training t=26529, loss=0.01982684899121523\n", - "Surface training t=26530, loss=0.01694725127890706\n", - "Surface training t=26531, loss=0.017189725302159786\n", - "Surface training t=26532, loss=0.023694725707173347\n", - "Surface training t=26533, loss=0.019004076719284058\n", - "Surface training t=26534, loss=0.020488115958869457\n", - "Surface training t=26535, loss=0.027610219083726406\n", - "Surface training t=26536, loss=0.027378438040614128\n", - "Surface training t=26537, loss=0.01845067646354437\n", - "Surface training t=26538, loss=0.019808282144367695\n", - "Surface training t=26539, loss=0.020794999785721302\n", - "Surface training t=26540, loss=0.018482177518308163\n", - "Surface training t=26541, loss=0.028944595716893673\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=26542, loss=0.025613234378397465\n", - "Surface training t=26543, loss=0.023383446037769318\n", - "Surface training t=26544, loss=0.031175479292869568\n", - "Surface training t=26545, loss=0.027449171990156174\n", - "Surface training t=26546, loss=0.02050183154642582\n", - "Surface training t=26547, loss=0.023344943299889565\n", - "Surface training t=26548, loss=0.027441399171948433\n", - "Surface training t=26549, loss=0.026018647477030754\n", - "Surface training t=26550, loss=0.019875140860676765\n", - "Surface training t=26551, loss=0.02224859967827797\n", - "Surface training t=26552, loss=0.017149693332612514\n", - "Surface training t=26553, loss=0.01804056577384472\n", - "Surface training t=26554, loss=0.01883196458220482\n", - "Surface training t=26555, loss=0.01905600167810917\n", - "Surface training t=26556, loss=0.022848833352327347\n", - "Surface training t=26557, loss=0.022937863133847713\n", - "Surface training t=26558, loss=0.03221359755843878\n", - "Surface training t=26559, loss=0.023082167841494083\n", - "Surface training t=26560, loss=0.02723222505301237\n", - "Surface training t=26561, loss=0.033279587514698505\n", - "Surface training t=26562, loss=0.02379586733877659\n", - "Surface training t=26563, loss=0.030998872593045235\n", - "Surface training t=26564, loss=0.031295452266931534\n", - "Surface training t=26565, loss=0.026994244195520878\n", - "Surface training t=26566, loss=0.03389977477490902\n", - "Surface training t=26567, loss=0.025939187966287136\n", - "Surface training t=26568, loss=0.02698102965950966\n", - "Surface training t=26569, loss=0.025189966894686222\n", - "Surface training t=26570, loss=0.03147077187895775\n", - "Surface training t=26571, loss=0.025553531013429165\n", - "Surface training t=26572, loss=0.036530718207359314\n", - "Surface training t=26573, loss=0.031258510425686836\n", - "Surface training t=26574, loss=0.037664225324988365\n", - "Surface training t=26575, loss=0.027774767018854618\n", - "Surface training t=26576, loss=0.02911110408604145\n", - "Surface training t=26577, loss=0.033694913610816\n", - "Surface training t=26578, loss=0.025899769738316536\n", - "Surface training t=26579, loss=0.03589335083961487\n", - "Surface training t=26580, loss=0.025965100154280663\n", - "Surface training t=26581, loss=0.025766558945178986\n", - "Surface training t=26582, loss=0.038620725274086\n", - "Surface training t=26583, loss=0.035326093435287476\n", - "Surface training t=26584, loss=0.0347285196185112\n", - "Surface training t=26585, loss=0.023673572577536106\n", - "Surface training t=26586, loss=0.02856836747378111\n", - "Surface training t=26587, loss=0.023380876518785954\n", - "Surface training t=26588, loss=0.0275599155575037\n", - "Surface training t=26589, loss=0.029705123975872993\n", - "Surface training t=26590, loss=0.03121645376086235\n", - "Surface training t=26591, loss=0.022188644856214523\n", - "Surface training t=26592, loss=0.025120683945715427\n", - "Surface training t=26593, loss=0.027344951406121254\n", - "Surface training t=26594, loss=0.028826389461755753\n", - "Surface training t=26595, loss=0.038159651681780815\n", - "Surface training t=26596, loss=0.025786837562918663\n", - "Surface training t=26597, loss=0.03066929057240486\n", - "Surface training t=26598, loss=0.02853370551019907\n", - "Surface training t=26599, loss=0.025086216628551483\n", - "Surface training t=26600, loss=0.032945998944342136\n", - "Surface training t=26601, loss=0.02948721870779991\n", - "Surface training t=26602, loss=0.023563137277960777\n", - "Surface training t=26603, loss=0.021396594122052193\n", - "Surface training t=26604, loss=0.02396508865058422\n", - "Surface training t=26605, loss=0.02059916127473116\n", - "Surface training t=26606, loss=0.020130488090217113\n", - "Surface training t=26607, loss=0.01984244491904974\n", - "Surface training t=26608, loss=0.03293133992701769\n", - "Surface training t=26609, loss=0.028171132318675518\n", - "Surface training t=26610, loss=0.022827666252851486\n", - "Surface training t=26611, loss=0.018813439644873142\n", - "Surface training t=26612, loss=0.02158679161220789\n", - "Surface training t=26613, loss=0.01938571408390999\n", - "Surface training t=26614, loss=0.020398293156176805\n", - "Surface training t=26615, loss=0.019678502343595028\n", - "Surface training t=26616, loss=0.023975620046257973\n", - "Surface training t=26617, loss=0.02415182627737522\n", - "Surface training t=26618, loss=0.02066759392619133\n", - "Surface training t=26619, loss=0.021455449052155018\n", - "Surface training t=26620, loss=0.018061217851936817\n", - "Surface training t=26621, loss=0.015556619968265295\n", - "Surface training t=26622, loss=0.014771520160138607\n", - "Surface training t=26623, loss=0.016387672629207373\n", - "Surface training t=26624, loss=0.017705054953694344\n", - "Surface training t=26625, loss=0.0159502774477005\n", - "Surface training t=26626, loss=0.017289836890995502\n", - "Surface training t=26627, loss=0.016944178845733404\n", - "Surface training t=26628, loss=0.022152977995574474\n", - "Surface training t=26629, loss=0.023009405471384525\n", - "Surface training t=26630, loss=0.02378149703145027\n", - "Surface training t=26631, loss=0.029667716473340988\n", - "Surface training t=26632, loss=0.03156043775379658\n", - "Surface training t=26633, loss=0.04161849059164524\n", - "Surface training t=26634, loss=0.028676211833953857\n", - "Surface training t=26635, loss=0.03461524751037359\n", - "Surface training t=26636, loss=0.02322025503963232\n", - "Surface training t=26637, loss=0.01887326454743743\n", - "Surface training t=26638, loss=0.031189781613647938\n", - "Surface training t=26639, loss=0.02638854831457138\n", - "Surface training t=26640, loss=0.023950278759002686\n", - "Surface training t=26641, loss=0.029011382721364498\n", - "Surface training t=26642, loss=0.02599593624472618\n", - "Surface training t=26643, loss=0.024225926958024502\n", - "Surface training t=26644, loss=0.01844397746026516\n", - "Surface training t=26645, loss=0.01955440081655979\n", - "Surface training t=26646, loss=0.018105522729456425\n", - "Surface training t=26647, loss=0.02333460096269846\n", - "Surface training t=26648, loss=0.020004034973680973\n", - "Surface training t=26649, loss=0.018818167503923178\n", - "Surface training t=26650, loss=0.022671012207865715\n", - "Surface training t=26651, loss=0.020044052973389626\n", - "Surface training t=26652, loss=0.018425488844513893\n", - "Surface training t=26653, loss=0.018143966794013977\n", - "Surface training t=26654, loss=0.02031613141298294\n", - "Surface training t=26655, loss=0.02291359193623066\n", - "Surface training t=26656, loss=0.024956155568361282\n", - "Surface training t=26657, loss=0.028857476077973843\n", - "Surface training t=26658, loss=0.02480527851730585\n", - "Surface training t=26659, loss=0.022209457121789455\n", - "Surface training t=26660, loss=0.022262834012508392\n", - "Surface training t=26661, loss=0.01643344108015299\n", - "Surface training t=26662, loss=0.022598855197429657\n", - "Surface training t=26663, loss=0.01881061401218176\n", - "Surface training t=26664, loss=0.01941092498600483\n", - "Surface training t=26665, loss=0.0263825049623847\n", - "Surface training t=26666, loss=0.022169150412082672\n", - "Surface training t=26667, loss=0.015575127676129341\n", - "Surface training t=26668, loss=0.021646575070917606\n", - "Surface training t=26669, loss=0.0165421012789011\n", - "Surface training t=26670, loss=0.02154871355742216\n", - "Surface training t=26671, loss=0.020118439570069313\n", - "Surface training t=26672, loss=0.018153691664338112\n", - "Surface training t=26673, loss=0.01786710135638714\n", - "Surface training t=26674, loss=0.022472851909697056\n", - "Surface training t=26675, loss=0.02584890043362975\n", - "Surface training t=26676, loss=0.023601258173584938\n", - "Surface training t=26677, loss=0.025826189666986465\n", - "Surface training t=26678, loss=0.03850769065320492\n", - "Surface training t=26679, loss=0.03101448155939579\n", - "Surface training t=26680, loss=0.03268483653664589\n", - "Surface training t=26681, loss=0.026188328862190247\n", - "Surface training t=26682, loss=0.022143185138702393\n", - "Surface training t=26683, loss=0.03195010218769312\n", - "Surface training t=26684, loss=0.02825010195374489\n", - "Surface training t=26685, loss=0.023985588923096657\n", - "Surface training t=26686, loss=0.03285336773842573\n", - "Surface training t=26687, loss=0.02473971899598837\n", - "Surface training t=26688, loss=0.030707809142768383\n", - "Surface training t=26689, loss=0.02917831763625145\n", - "Surface training t=26690, loss=0.02377161756157875\n", - "Surface training t=26691, loss=0.026008586399257183\n", - "Surface training t=26692, loss=0.030046367086470127\n", - "Surface training t=26693, loss=0.025268581695854664\n", - "Surface training t=26694, loss=0.030426928773522377\n", - "Surface training t=26695, loss=0.02839125134050846\n", - "Surface training t=26696, loss=0.02287574578076601\n", - "Surface training t=26697, loss=0.04245861619710922\n", - "Surface training t=26698, loss=0.02833937667310238\n", - "Surface training t=26699, loss=0.0424590390175581\n", - "Surface training t=26700, loss=0.03132180403918028\n", - "Surface training t=26701, loss=0.028634147718548775\n", - "Surface training t=26702, loss=0.028821912594139576\n", - "Surface training t=26703, loss=0.03364919312298298\n", - "Surface training t=26704, loss=0.023230988532304764\n", - "Surface training t=26705, loss=0.028966760262846947\n", - "Surface training t=26706, loss=0.022248800843954086\n", - "Surface training t=26707, loss=0.019483983516693115\n", - "Surface training t=26708, loss=0.015550042036920786\n", - "Surface training t=26709, loss=0.018074871972203255\n", - "Surface training t=26710, loss=0.024915166199207306\n", - "Surface training t=26711, loss=0.024262175895273685\n", - "Surface training t=26712, loss=0.01446449477225542\n", - "Surface training t=26713, loss=0.01788888592272997\n", - "Surface training t=26714, loss=0.014154210221022367\n", - "Surface training t=26715, loss=0.017966043204069138\n", - "Surface training t=26716, loss=0.015540830790996552\n", - "Surface training t=26717, loss=0.017993222922086716\n", - "Surface training t=26718, loss=0.018708630464971066\n", - "Surface training t=26719, loss=0.021819954738020897\n", - "Surface training t=26720, loss=0.018114046193659306\n", - "Surface training t=26721, loss=0.020738153718411922\n", - "Surface training t=26722, loss=0.01854722760617733\n", - "Surface training t=26723, loss=0.021013102494180202\n", - "Surface training t=26724, loss=0.020730603486299515\n", - "Surface training t=26725, loss=0.020982286892831326\n", - "Surface training t=26726, loss=0.020968531258404255\n", - "Surface training t=26727, loss=0.020724169444292784\n", - "Surface training t=26728, loss=0.02089923992753029\n", - "Surface training t=26729, loss=0.024655962362885475\n", - "Surface training t=26730, loss=0.02178204618394375\n", - "Surface training t=26731, loss=0.019787501078099012\n", - "Surface training t=26732, loss=0.024214296601712704\n", - "Surface training t=26733, loss=0.02856338955461979\n", - "Surface training t=26734, loss=0.021429235115647316\n", - "Surface training t=26735, loss=0.02455311454832554\n", - "Surface training t=26736, loss=0.03389127738773823\n", - "Surface training t=26737, loss=0.029656761325895786\n", - "Surface training t=26738, loss=0.02093820832669735\n", - "Surface training t=26739, loss=0.03580840863287449\n", - "Surface training t=26740, loss=0.022348728962242603\n", - "Surface training t=26741, loss=0.023320638574659824\n", - "Surface training t=26742, loss=0.02843271940946579\n", - "Surface training t=26743, loss=0.02445764746516943\n", - "Surface training t=26744, loss=0.023456212133169174\n", - "Surface training t=26745, loss=0.023459513671696186\n", - "Surface training t=26746, loss=0.020846380852162838\n", - "Surface training t=26747, loss=0.017315562814474106\n", - "Surface training t=26748, loss=0.015837284736335278\n", - "Surface training t=26749, loss=0.012348115677013993\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=26750, loss=0.027700232341885567\n", - "Surface training t=26751, loss=0.02779199182987213\n", - "Surface training t=26752, loss=0.028403736650943756\n", - "Surface training t=26753, loss=0.03757094219326973\n", - "Surface training t=26754, loss=0.027486024424433708\n", - "Surface training t=26755, loss=0.03896253928542137\n", - "Surface training t=26756, loss=0.03142682183533907\n", - "Surface training t=26757, loss=0.026020338758826256\n", - "Surface training t=26758, loss=0.029787395149469376\n", - "Surface training t=26759, loss=0.025426638312637806\n", - "Surface training t=26760, loss=0.027856169268488884\n", - "Surface training t=26761, loss=0.03464309498667717\n", - "Surface training t=26762, loss=0.027290313504636288\n", - "Surface training t=26763, loss=0.028831264935433865\n", - "Surface training t=26764, loss=0.030128229409456253\n", - "Surface training t=26765, loss=0.026855308562517166\n", - "Surface training t=26766, loss=0.052006255835294724\n", - "Surface training t=26767, loss=0.0326827522367239\n", - "Surface training t=26768, loss=0.03930904436856508\n", - "Surface training t=26769, loss=0.05832775495946407\n", - "Surface training t=26770, loss=0.03449162933975458\n", - "Surface training t=26771, loss=0.047884657979011536\n", - "Surface training t=26772, loss=0.026962127070873976\n", - "Surface training t=26773, loss=0.02884641755372286\n", - "Surface training t=26774, loss=0.026828736066818237\n", - "Surface training t=26775, loss=0.028266864828765392\n", - "Surface training t=26776, loss=0.029510308988392353\n", - "Surface training t=26777, loss=0.022892155684530735\n", - "Surface training t=26778, loss=0.02183162420988083\n", - "Surface training t=26779, loss=0.022417588159441948\n", - "Surface training t=26780, loss=0.019297434948384762\n", - "Surface training t=26781, loss=0.028894826769828796\n", - "Surface training t=26782, loss=0.0260475380346179\n", - "Surface training t=26783, loss=0.02799784205853939\n", - "Surface training t=26784, loss=0.02903833892196417\n", - "Surface training t=26785, loss=0.025824550539255142\n", - "Surface training t=26786, loss=0.021833069622516632\n", - "Surface training t=26787, loss=0.02553131803870201\n", - "Surface training t=26788, loss=0.024800618179142475\n", - "Surface training t=26789, loss=0.026187860406935215\n", - "Surface training t=26790, loss=0.022685707546770573\n", - "Surface training t=26791, loss=0.01935255154967308\n", - "Surface training t=26792, loss=0.022746878676116467\n", - "Surface training t=26793, loss=0.019924402236938477\n", - "Surface training t=26794, loss=0.019494352862238884\n", - "Surface training t=26795, loss=0.021389364264905453\n", - "Surface training t=26796, loss=0.01690651196986437\n", - "Surface training t=26797, loss=0.018945956602692604\n", - "Surface training t=26798, loss=0.020868735387921333\n", - "Surface training t=26799, loss=0.016792865470051765\n", - "Surface training t=26800, loss=0.023210582323372364\n", - "Surface training t=26801, loss=0.019321264699101448\n", - "Surface training t=26802, loss=0.022541682235896587\n", - "Surface training t=26803, loss=0.021782664582133293\n", - "Surface training t=26804, loss=0.019076106138527393\n", - "Surface training t=26805, loss=0.014728138223290443\n", - "Surface training t=26806, loss=0.017510061617940664\n", - "Surface training t=26807, loss=0.01960840728133917\n", - "Surface training t=26808, loss=0.020067677833139896\n", - "Surface training t=26809, loss=0.019174817949533463\n", - "Surface training t=26810, loss=0.02394074574112892\n", - "Surface training t=26811, loss=0.027383269742131233\n", - "Surface training t=26812, loss=0.022989804856479168\n", - "Surface training t=26813, loss=0.019429026637226343\n", - "Surface training t=26814, loss=0.019647376611828804\n", - "Surface training t=26815, loss=0.022309179417788982\n", - "Surface training t=26816, loss=0.01964022684842348\n", - "Surface training t=26817, loss=0.01988658681511879\n", - "Surface training t=26818, loss=0.01642149919643998\n", - "Surface training t=26819, loss=0.020752943120896816\n", - "Surface training t=26820, loss=0.02366103231906891\n", - "Surface training t=26821, loss=0.024706323631107807\n", - "Surface training t=26822, loss=0.020507565699517727\n", - "Surface training t=26823, loss=0.017440106254070997\n", - "Surface training t=26824, loss=0.022379210218787193\n", - "Surface training t=26825, loss=0.016585303470492363\n", - "Surface training t=26826, loss=0.015257998835295439\n", - "Surface training t=26827, loss=0.018782027065753937\n", - "Surface training t=26828, loss=0.017978177405893803\n", - "Surface training t=26829, loss=0.01781861949712038\n", - "Surface training t=26830, loss=0.01952225249260664\n", - "Surface training t=26831, loss=0.01769338082522154\n", - "Surface training t=26832, loss=0.020388559438288212\n", - "Surface training t=26833, loss=0.022874865680933\n", - "Surface training t=26834, loss=0.01807808643206954\n", - "Surface training t=26835, loss=0.019420743919909\n", - "Surface training t=26836, loss=0.015474895015358925\n", - "Surface training t=26837, loss=0.01788436435163021\n", - "Surface training t=26838, loss=0.021300971508026123\n", - "Surface training t=26839, loss=0.02019376400858164\n", - "Surface training t=26840, loss=0.0249840272590518\n", - "Surface training t=26841, loss=0.02019552420824766\n", - "Surface training t=26842, loss=0.021719645708799362\n", - "Surface training t=26843, loss=0.021590281277894974\n", - "Surface training t=26844, loss=0.019759451039135456\n", - "Surface training t=26845, loss=0.015181050170212984\n", - "Surface training t=26846, loss=0.020196544006466866\n", - "Surface training t=26847, loss=0.015396709088236094\n", - "Surface training t=26848, loss=0.017599827609956264\n", - "Surface training t=26849, loss=0.01838413765653968\n", - "Surface training t=26850, loss=0.019267029128968716\n", - "Surface training t=26851, loss=0.01854108739644289\n", - "Surface training t=26852, loss=0.015802673064172268\n", - "Surface training t=26853, loss=0.01805241499096155\n", - "Surface training t=26854, loss=0.017877810169011354\n", - "Surface training t=26855, loss=0.016946290619671345\n", - "Surface training t=26856, loss=0.018133276142179966\n", - "Surface training t=26857, loss=0.023463976569473743\n", - "Surface training t=26858, loss=0.031315505504608154\n", - "Surface training t=26859, loss=0.026930110529065132\n", - "Surface training t=26860, loss=0.03983984887599945\n", - "Surface training t=26861, loss=0.028285525739192963\n", - "Surface training t=26862, loss=0.02616290282458067\n", - "Surface training t=26863, loss=0.03071887418627739\n", - "Surface training t=26864, loss=0.0274196770042181\n", - "Surface training t=26865, loss=0.03145894780755043\n", - "Surface training t=26866, loss=0.027308091521263123\n", - "Surface training t=26867, loss=0.02486612368375063\n", - "Surface training t=26868, loss=0.02228622417896986\n", - "Surface training t=26869, loss=0.03827211074531078\n", - "Surface training t=26870, loss=0.027592744678258896\n", - "Surface training t=26871, loss=0.04069912247359753\n", - "Surface training t=26872, loss=0.0292810732498765\n", - "Surface training t=26873, loss=0.029651041142642498\n", - "Surface training t=26874, loss=0.03479443024843931\n", - "Surface training t=26875, loss=0.02837185747921467\n", - "Surface training t=26876, loss=0.03593826666474342\n", - "Surface training t=26877, loss=0.026305794715881348\n", - "Surface training t=26878, loss=0.03135838359594345\n", - "Surface training t=26879, loss=0.02656231541186571\n", - "Surface training t=26880, loss=0.019212340004742146\n", - "Surface training t=26881, loss=0.01767652854323387\n", - "Surface training t=26882, loss=0.016786916181445122\n", - "Surface training t=26883, loss=0.015629241708666086\n", - "Surface training t=26884, loss=0.016621719114482403\n", - "Surface training t=26885, loss=0.013032801914960146\n", - "Surface training t=26886, loss=0.0219039311632514\n", - "Surface training t=26887, loss=0.018979030661284924\n", - "Surface training t=26888, loss=0.018964088521897793\n", - "Surface training t=26889, loss=0.01964524295181036\n", - "Surface training t=26890, loss=0.02441919967532158\n", - "Surface training t=26891, loss=0.02896455116569996\n", - "Surface training t=26892, loss=0.03576071001589298\n", - "Surface training t=26893, loss=0.02932604029774666\n", - "Surface training t=26894, loss=0.023666825145483017\n", - "Surface training t=26895, loss=0.03155449591577053\n", - "Surface training t=26896, loss=0.03729736618697643\n", - "Surface training t=26897, loss=0.029363062232732773\n", - "Surface training t=26898, loss=0.03254939988255501\n", - "Surface training t=26899, loss=0.032079579308629036\n", - "Surface training t=26900, loss=0.045565035194158554\n", - "Surface training t=26901, loss=0.041391029953956604\n", - "Surface training t=26902, loss=0.03568083606660366\n", - "Surface training t=26903, loss=0.039962053298950195\n", - "Surface training t=26904, loss=0.02403427194803953\n", - "Surface training t=26905, loss=0.022027921862900257\n", - "Surface training t=26906, loss=0.021106433123350143\n", - "Surface training t=26907, loss=0.018939943984150887\n", - "Surface training t=26908, loss=0.01852208562195301\n", - "Surface training t=26909, loss=0.015884158667176962\n", - "Surface training t=26910, loss=0.01902661006897688\n", - "Surface training t=26911, loss=0.024785947054624557\n", - "Surface training t=26912, loss=0.03190728276968002\n", - "Surface training t=26913, loss=0.021558833308517933\n", - "Surface training t=26914, loss=0.023535712622106075\n", - "Surface training t=26915, loss=0.02426900528371334\n", - "Surface training t=26916, loss=0.028046071529388428\n", - "Surface training t=26917, loss=0.03208498377352953\n", - "Surface training t=26918, loss=0.028278778307139874\n", - "Surface training t=26919, loss=0.02495462354272604\n", - "Surface training t=26920, loss=0.028000012040138245\n", - "Surface training t=26921, loss=0.022648312151432037\n", - "Surface training t=26922, loss=0.034278806298971176\n", - "Surface training t=26923, loss=0.03714725002646446\n", - "Surface training t=26924, loss=0.031476644799113274\n", - "Surface training t=26925, loss=0.05625907517969608\n", - "Surface training t=26926, loss=0.03440315928310156\n", - "Surface training t=26927, loss=0.03323919977992773\n", - "Surface training t=26928, loss=0.028618372976779938\n", - "Surface training t=26929, loss=0.03542625345289707\n", - "Surface training t=26930, loss=0.02877007983624935\n", - "Surface training t=26931, loss=0.028450494166463614\n", - "Surface training t=26932, loss=0.0371562484651804\n", - "Surface training t=26933, loss=0.036699273623526096\n", - "Surface training t=26934, loss=0.028569347225129604\n", - "Surface training t=26935, loss=0.021000326611101627\n", - "Surface training t=26936, loss=0.027274079620838165\n", - "Surface training t=26937, loss=0.0282681742683053\n", - "Surface training t=26938, loss=0.028844216838479042\n", - "Surface training t=26939, loss=0.020015674643218517\n", - "Surface training t=26940, loss=0.023570368066430092\n", - "Surface training t=26941, loss=0.02293423842638731\n", - "Surface training t=26942, loss=0.024914494715631008\n", - "Surface training t=26943, loss=0.02298123762011528\n", - "Surface training t=26944, loss=0.014652057085186243\n", - "Surface training t=26945, loss=0.02204087283462286\n", - "Surface training t=26946, loss=0.02022639475762844\n", - "Surface training t=26947, loss=0.027921966277062893\n", - "Surface training t=26948, loss=0.020461044274270535\n", - "Surface training t=26949, loss=0.018976610153913498\n", - "Surface training t=26950, loss=0.020381650887429714\n", - "Surface training t=26951, loss=0.02388478722423315\n", - "Surface training t=26952, loss=0.023688077926635742\n", - "Surface training t=26953, loss=0.024489798583090305\n", - "Surface training t=26954, loss=0.026944074779748917\n", - "Surface training t=26955, loss=0.020063217729330063\n", - "Surface training t=26956, loss=0.019669603556394577\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=26957, loss=0.02044010069221258\n", - "Surface training t=26958, loss=0.022144552320241928\n", - "Surface training t=26959, loss=0.01783924736082554\n", - "Surface training t=26960, loss=0.01770285051316023\n", - "Surface training t=26961, loss=0.01905764266848564\n", - "Surface training t=26962, loss=0.014347035437822342\n", - "Surface training t=26963, loss=0.016150166280567646\n", - "Surface training t=26964, loss=0.020389918237924576\n", - "Surface training t=26965, loss=0.023114576004445553\n", - "Surface training t=26966, loss=0.02292228676378727\n", - "Surface training t=26967, loss=0.018466535955667496\n", - "Surface training t=26968, loss=0.019288625568151474\n", - "Surface training t=26969, loss=0.022691424936056137\n", - "Surface training t=26970, loss=0.02040799241513014\n", - "Surface training t=26971, loss=0.017686056904494762\n", - "Surface training t=26972, loss=0.020667122676968575\n", - "Surface training t=26973, loss=0.027422001585364342\n", - "Surface training t=26974, loss=0.02492512669414282\n", - "Surface training t=26975, loss=0.03172288369387388\n", - "Surface training t=26976, loss=0.022751903161406517\n", - "Surface training t=26977, loss=0.02822146099060774\n", - "Surface training t=26978, loss=0.022142712958157063\n", - "Surface training t=26979, loss=0.022369678132236004\n", - "Surface training t=26980, loss=0.024851338006556034\n", - "Surface training t=26981, loss=0.022855008020997047\n", - "Surface training t=26982, loss=0.023150460794568062\n", - "Surface training t=26983, loss=0.02484466414898634\n", - "Surface training t=26984, loss=0.022644256241619587\n", - "Surface training t=26985, loss=0.02202996565029025\n", - "Surface training t=26986, loss=0.022605234757065773\n", - "Surface training t=26987, loss=0.02485608123242855\n", - "Surface training t=26988, loss=0.026065267622470856\n", - "Surface training t=26989, loss=0.027814926579594612\n", - "Surface training t=26990, loss=0.0398112740367651\n", - "Surface training t=26991, loss=0.031780133955180645\n", - "Surface training t=26992, loss=0.033145494759082794\n", - "Surface training t=26993, loss=0.04822670668363571\n", - "Surface training t=26994, loss=0.04253395274281502\n", - "Surface training t=26995, loss=0.032361045479774475\n", - "Surface training t=26996, loss=0.03610462974756956\n", - "Surface training t=26997, loss=0.034372550435364246\n", - "Surface training t=26998, loss=0.02387683466076851\n", - "Surface training t=26999, loss=0.027907252311706543\n", - "Surface training t=27000, loss=0.02620207704603672\n", - "Surface training t=27001, loss=0.032321373000741005\n", - "Surface training t=27002, loss=0.02617024350911379\n", - "Surface training t=27003, loss=0.026484818197786808\n", - "Surface training t=27004, loss=0.021778211928904057\n", - "Surface training t=27005, loss=0.025911429896950722\n", - "Surface training t=27006, loss=0.03118240926414728\n", - "Surface training t=27007, loss=0.02637208253145218\n", - "Surface training t=27008, loss=0.02583853341639042\n", - "Surface training t=27009, loss=0.029050039127469063\n", - "Surface training t=27010, loss=0.02060109842568636\n", - "Surface training t=27011, loss=0.02348276972770691\n", - "Surface training t=27012, loss=0.028155972249805927\n", - "Surface training t=27013, loss=0.022608657367527485\n", - "Surface training t=27014, loss=0.023961488157510757\n", - "Surface training t=27015, loss=0.033929482102394104\n", - "Surface training t=27016, loss=0.026558367535471916\n", - "Surface training t=27017, loss=0.029513186775147915\n", - "Surface training t=27018, loss=0.023428321816027164\n", - "Surface training t=27019, loss=0.03256107680499554\n", - "Surface training t=27020, loss=0.02228876668959856\n", - "Surface training t=27021, loss=0.03467187285423279\n", - "Surface training t=27022, loss=0.02292401622980833\n", - "Surface training t=27023, loss=0.019344737753272057\n", - "Surface training t=27024, loss=0.026057185605168343\n", - "Surface training t=27025, loss=0.023262389935553074\n", - "Surface training t=27026, loss=0.017498312518000603\n", - "Surface training t=27027, loss=0.02364757750183344\n", - "Surface training t=27028, loss=0.01613223645836115\n", - "Surface training t=27029, loss=0.013855346944183111\n", - "Surface training t=27030, loss=0.013934254180639982\n", - "Surface training t=27031, loss=0.020824010483920574\n", - "Surface training t=27032, loss=0.02081090211868286\n", - "Surface training t=27033, loss=0.01959382463246584\n", - "Surface training t=27034, loss=0.01933662872761488\n", - "Surface training t=27035, loss=0.02049240656197071\n", - "Surface training t=27036, loss=0.021772812120616436\n", - "Surface training t=27037, loss=0.015293360222131014\n", - "Surface training t=27038, loss=0.017817710526287556\n", - "Surface training t=27039, loss=0.020683390088379383\n", - "Surface training t=27040, loss=0.011894472409039736\n", - "Surface training t=27041, loss=0.014179861173033714\n", - "Surface training t=27042, loss=0.015509032178670168\n", - "Surface training t=27043, loss=0.014726377557963133\n", - "Surface training t=27044, loss=0.015276343561708927\n", - "Surface training t=27045, loss=0.014872337691485882\n", - "Surface training t=27046, loss=0.014371329452842474\n", - "Surface training t=27047, loss=0.01928803324699402\n", - "Surface training t=27048, loss=0.015009550377726555\n", - "Surface training t=27049, loss=0.01681529451161623\n", - "Surface training t=27050, loss=0.015774482395499945\n", - "Surface training t=27051, loss=0.018179161474108696\n", - "Surface training t=27052, loss=0.014297312125563622\n", - "Surface training t=27053, loss=0.01908353576436639\n", - "Surface training t=27054, loss=0.018305815756320953\n", - "Surface training t=27055, loss=0.019181708805263042\n", - "Surface training t=27056, loss=0.020290045998990536\n", - "Surface training t=27057, loss=0.016865405719727278\n", - "Surface training t=27058, loss=0.018155154306441545\n", - "Surface training t=27059, loss=0.021398098208010197\n", - "Surface training t=27060, loss=0.01847596187144518\n", - "Surface training t=27061, loss=0.018638870678842068\n", - "Surface training t=27062, loss=0.04303601570427418\n", - "Surface training t=27063, loss=0.026714451611042023\n", - "Surface training t=27064, loss=0.03357491921633482\n", - "Surface training t=27065, loss=0.026287497021257877\n", - "Surface training t=27066, loss=0.0250778216868639\n", - "Surface training t=27067, loss=0.035193538293242455\n", - "Surface training t=27068, loss=0.029820634983479977\n", - "Surface training t=27069, loss=0.03018841426819563\n", - "Surface training t=27070, loss=0.030520400032401085\n", - "Surface training t=27071, loss=0.024671198800206184\n", - "Surface training t=27072, loss=0.02429369557648897\n", - "Surface training t=27073, loss=0.02210131846368313\n", - "Surface training t=27074, loss=0.017680935095995665\n", - "Surface training t=27075, loss=0.030225095339119434\n", - "Surface training t=27076, loss=0.03804655931890011\n", - "Surface training t=27077, loss=0.0305801834911108\n", - "Surface training t=27078, loss=0.031876638531684875\n", - "Surface training t=27079, loss=0.028271658346056938\n", - "Surface training t=27080, loss=0.030723472125828266\n", - "Surface training t=27081, loss=0.03103779163211584\n", - "Surface training t=27082, loss=0.028065833263099194\n", - "Surface training t=27083, loss=0.026748008094727993\n", - "Surface training t=27084, loss=0.02330778446048498\n", - "Surface training t=27085, loss=0.02440230082720518\n", - "Surface training t=27086, loss=0.021260907873511314\n", - "Surface training t=27087, loss=0.02259316574782133\n", - "Surface training t=27088, loss=0.016890425700694323\n", - "Surface training t=27089, loss=0.01494352426379919\n", - "Surface training t=27090, loss=0.024302988313138485\n", - "Surface training t=27091, loss=0.027154389768838882\n", - "Surface training t=27092, loss=0.03153568785637617\n", - "Surface training t=27093, loss=0.027839560993015766\n", - "Surface training t=27094, loss=0.03252008371055126\n", - "Surface training t=27095, loss=0.028361945413053036\n", - "Surface training t=27096, loss=0.040811795741319656\n", - "Surface training t=27097, loss=0.025924271903932095\n", - "Surface training t=27098, loss=0.0282749580219388\n", - "Surface training t=27099, loss=0.037456363439559937\n", - "Surface training t=27100, loss=0.027986934408545494\n", - "Surface training t=27101, loss=0.03501569479703903\n", - "Surface training t=27102, loss=0.02799312397837639\n", - "Surface training t=27103, loss=0.03271844983100891\n", - "Surface training t=27104, loss=0.02528530079871416\n", - "Surface training t=27105, loss=0.020120788365602493\n", - "Surface training t=27106, loss=0.0219012051820755\n", - "Surface training t=27107, loss=0.027334352023899555\n", - "Surface training t=27108, loss=0.02329659927636385\n", - "Surface training t=27109, loss=0.023254837840795517\n", - "Surface training t=27110, loss=0.03433326259255409\n", - "Surface training t=27111, loss=0.029059878550469875\n", - "Surface training t=27112, loss=0.031052681617438793\n", - "Surface training t=27113, loss=0.0446306299418211\n", - "Surface training t=27114, loss=0.03436988778412342\n", - "Surface training t=27115, loss=0.027454130351543427\n", - "Surface training t=27116, loss=0.03841186873614788\n", - "Surface training t=27117, loss=0.03187235537916422\n", - "Surface training t=27118, loss=0.03545636776834726\n", - "Surface training t=27119, loss=0.03729366697371006\n", - "Surface training t=27120, loss=0.028787448070943356\n", - "Surface training t=27121, loss=0.03356114216148853\n", - "Surface training t=27122, loss=0.026156472973525524\n", - "Surface training t=27123, loss=0.01724256668239832\n", - "Surface training t=27124, loss=0.020594080910086632\n", - "Surface training t=27125, loss=0.015040540602058172\n", - "Surface training t=27126, loss=0.024322877638041973\n", - "Surface training t=27127, loss=0.025065231136977673\n", - "Surface training t=27128, loss=0.02271584328263998\n", - "Surface training t=27129, loss=0.021236108615994453\n", - "Surface training t=27130, loss=0.03880828991532326\n", - "Surface training t=27131, loss=0.02937759179621935\n", - "Surface training t=27132, loss=0.03899109363555908\n", - "Surface training t=27133, loss=0.0299272695556283\n", - "Surface training t=27134, loss=0.04532433860003948\n", - "Surface training t=27135, loss=0.03536786325275898\n", - "Surface training t=27136, loss=0.031695401296019554\n", - "Surface training t=27137, loss=0.037193670868873596\n", - "Surface training t=27138, loss=0.0247508492320776\n", - "Surface training t=27139, loss=0.018700867891311646\n", - "Surface training t=27140, loss=0.018715535290539265\n", - "Surface training t=27141, loss=0.020883994176983833\n", - "Surface training t=27142, loss=0.027546076104044914\n", - "Surface training t=27143, loss=0.028597352094948292\n", - "Surface training t=27144, loss=0.03090602532029152\n", - "Surface training t=27145, loss=0.021527649834752083\n", - "Surface training t=27146, loss=0.026892557740211487\n", - "Surface training t=27147, loss=0.023398863151669502\n", - "Surface training t=27148, loss=0.02547221351414919\n", - "Surface training t=27149, loss=0.029645448550581932\n", - "Surface training t=27150, loss=0.03139836527407169\n", - "Surface training t=27151, loss=0.02762995008379221\n", - "Surface training t=27152, loss=0.025986523367464542\n", - "Surface training t=27153, loss=0.02494119293987751\n", - "Surface training t=27154, loss=0.024186287075281143\n", - "Surface training t=27155, loss=0.028870667330920696\n", - "Surface training t=27156, loss=0.026030157692730427\n", - "Surface training t=27157, loss=0.02265645330771804\n", - "Surface training t=27158, loss=0.018651656806468964\n", - "Surface training t=27159, loss=0.02050084713846445\n", - "Surface training t=27160, loss=0.019209500402212143\n", - "Surface training t=27161, loss=0.019660294987261295\n", - "Surface training t=27162, loss=0.02125915139913559\n", - "Surface training t=27163, loss=0.022261057514697313\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=27164, loss=0.02848134282976389\n", - "Surface training t=27165, loss=0.028764483518898487\n", - "Surface training t=27166, loss=0.03587385453283787\n", - "Surface training t=27167, loss=0.028652844950556755\n", - "Surface training t=27168, loss=0.019369883462786674\n", - "Surface training t=27169, loss=0.019704698584973812\n", - "Surface training t=27170, loss=0.01916253799572587\n", - "Surface training t=27171, loss=0.027680352330207825\n", - "Surface training t=27172, loss=0.023251130245625973\n", - "Surface training t=27173, loss=0.028645575046539307\n", - "Surface training t=27174, loss=0.022379654459655285\n", - "Surface training t=27175, loss=0.02024602424353361\n", - "Surface training t=27176, loss=0.019002240151166916\n", - "Surface training t=27177, loss=0.020743229426443577\n", - "Surface training t=27178, loss=0.022181913256645203\n", - "Surface training t=27179, loss=0.016228842083364725\n", - "Surface training t=27180, loss=0.020283252000808716\n", - "Surface training t=27181, loss=0.018246960826218128\n", - "Surface training t=27182, loss=0.018539351411163807\n", - "Surface training t=27183, loss=0.017706511542201042\n", - "Surface training t=27184, loss=0.01580321416258812\n", - "Surface training t=27185, loss=0.015268273651599884\n", - "Surface training t=27186, loss=0.01712551061064005\n", - "Surface training t=27187, loss=0.017703401390463114\n", - "Surface training t=27188, loss=0.018042533192783594\n", - "Surface training t=27189, loss=0.014855818822979927\n", - "Surface training t=27190, loss=0.016944500152021646\n", - "Surface training t=27191, loss=0.016670582350343466\n", - "Surface training t=27192, loss=0.018234664108604193\n", - "Surface training t=27193, loss=0.013560605235397816\n", - "Surface training t=27194, loss=0.015300026163458824\n", - "Surface training t=27195, loss=0.018271364271640778\n", - "Surface training t=27196, loss=0.015464586671441793\n", - "Surface training t=27197, loss=0.013088114559650421\n", - "Surface training t=27198, loss=0.01757175801321864\n", - "Surface training t=27199, loss=0.012342299334704876\n", - "Surface training t=27200, loss=0.014360636938363314\n", - "Surface training t=27201, loss=0.014784800354391336\n", - "Surface training t=27202, loss=0.015297492500394583\n", - "Surface training t=27203, loss=0.014882257208228111\n", - "Surface training t=27204, loss=0.019424714613705873\n", - "Surface training t=27205, loss=0.021571682766079903\n", - "Surface training t=27206, loss=0.01866669626906514\n", - "Surface training t=27207, loss=0.016732463147491217\n", - "Surface training t=27208, loss=0.018209893256425858\n", - "Surface training t=27209, loss=0.01611726824194193\n", - "Surface training t=27210, loss=0.017265483736991882\n", - "Surface training t=27211, loss=0.01512649841606617\n", - "Surface training t=27212, loss=0.01800432987511158\n", - "Surface training t=27213, loss=0.01794601511210203\n", - "Surface training t=27214, loss=0.019734550267457962\n", - "Surface training t=27215, loss=0.013187226839363575\n", - "Surface training t=27216, loss=0.018080600537359715\n", - "Surface training t=27217, loss=0.016337723471224308\n", - "Surface training t=27218, loss=0.016613516956567764\n", - "Surface training t=27219, loss=0.021035038866102695\n", - "Surface training t=27220, loss=0.021095504984259605\n", - "Surface training t=27221, loss=0.026356272399425507\n", - "Surface training t=27222, loss=0.022814905270934105\n", - "Surface training t=27223, loss=0.024025672115385532\n", - "Surface training t=27224, loss=0.022673881612718105\n", - "Surface training t=27225, loss=0.019073612056672573\n", - "Surface training t=27226, loss=0.019040411338210106\n", - "Surface training t=27227, loss=0.015529044903814793\n", - "Surface training t=27228, loss=0.017277750186622143\n", - "Surface training t=27229, loss=0.017972830682992935\n", - "Surface training t=27230, loss=0.014868848491460085\n", - "Surface training t=27231, loss=0.014374821912497282\n", - "Surface training t=27232, loss=0.01523851789534092\n", - "Surface training t=27233, loss=0.018266598228365183\n", - "Surface training t=27234, loss=0.0240761898458004\n", - "Surface training t=27235, loss=0.03446296229958534\n", - "Surface training t=27236, loss=0.02699813712388277\n", - "Surface training t=27237, loss=0.025961737148463726\n", - "Surface training t=27238, loss=0.028582402504980564\n", - "Surface training t=27239, loss=0.021734373178333044\n", - "Surface training t=27240, loss=0.0197625532746315\n", - "Surface training t=27241, loss=0.02516904566437006\n", - "Surface training t=27242, loss=0.02620484121143818\n", - "Surface training t=27243, loss=0.03290400467813015\n", - "Surface training t=27244, loss=0.028720738366246223\n", - "Surface training t=27245, loss=0.02979352604597807\n", - "Surface training t=27246, loss=0.037352483719587326\n", - "Surface training t=27247, loss=0.032813175581395626\n", - "Surface training t=27248, loss=0.036735476925969124\n", - "Surface training t=27249, loss=0.03734312020242214\n", - "Surface training t=27250, loss=0.032896898686885834\n", - "Surface training t=27251, loss=0.03910777345299721\n", - "Surface training t=27252, loss=0.019638829864561558\n", - "Surface training t=27253, loss=0.035289518535137177\n", - "Surface training t=27254, loss=0.03807055205106735\n", - "Surface training t=27255, loss=0.022276100236922503\n", - "Surface training t=27256, loss=0.030137816444039345\n", - "Surface training t=27257, loss=0.025025762617588043\n", - "Surface training t=27258, loss=0.01998894475400448\n", - "Surface training t=27259, loss=0.021341828629374504\n", - "Surface training t=27260, loss=0.017620479222387075\n", - "Surface training t=27261, loss=0.025737214833498\n", - "Surface training t=27262, loss=0.029222617857158184\n", - "Surface training t=27263, loss=0.030136089771986008\n", - "Surface training t=27264, loss=0.025469188578426838\n", - "Surface training t=27265, loss=0.028582319617271423\n", - "Surface training t=27266, loss=0.030864007771015167\n", - "Surface training t=27267, loss=0.030758455395698547\n", - "Surface training t=27268, loss=0.028175896033644676\n", - "Surface training t=27269, loss=0.026287703774869442\n", - "Surface training t=27270, loss=0.026644466444849968\n", - "Surface training t=27271, loss=0.029320908710360527\n", - "Surface training t=27272, loss=0.025981377810239792\n", - "Surface training t=27273, loss=0.032636442221701145\n", - "Surface training t=27274, loss=0.03187006711959839\n", - "Surface training t=27275, loss=0.028221014887094498\n", - "Surface training t=27276, loss=0.026707978919148445\n", - "Surface training t=27277, loss=0.02341547980904579\n", - "Surface training t=27278, loss=0.017713225446641445\n", - "Surface training t=27279, loss=0.024811169132590294\n", - "Surface training t=27280, loss=0.02298583835363388\n", - "Surface training t=27281, loss=0.016271570697426796\n", - "Surface training t=27282, loss=0.014211400877684355\n", - "Surface training t=27283, loss=0.01691223355010152\n", - "Surface training t=27284, loss=0.015449874568730593\n", - "Surface training t=27285, loss=0.01979330275207758\n", - "Surface training t=27286, loss=0.01705285534262657\n", - "Surface training t=27287, loss=0.014552588574588299\n", - "Surface training t=27288, loss=0.015431524254381657\n", - "Surface training t=27289, loss=0.020025338511914015\n", - "Surface training t=27290, loss=0.02098738681524992\n", - "Surface training t=27291, loss=0.019786175806075335\n", - "Surface training t=27292, loss=0.023373683914542198\n", - "Surface training t=27293, loss=0.020261642523109913\n", - "Surface training t=27294, loss=0.025283189490437508\n", - "Surface training t=27295, loss=0.02041088044643402\n", - "Surface training t=27296, loss=0.03531981445848942\n", - "Surface training t=27297, loss=0.02708583977073431\n", - "Surface training t=27298, loss=0.023427161388099194\n", - "Surface training t=27299, loss=0.02357475832104683\n", - "Surface training t=27300, loss=0.02376740053296089\n", - "Surface training t=27301, loss=0.02591311652213335\n", - "Surface training t=27302, loss=0.024023442529141903\n", - "Surface training t=27303, loss=0.027867245487868786\n", - "Surface training t=27304, loss=0.02035303460434079\n", - "Surface training t=27305, loss=0.020118266344070435\n", - "Surface training t=27306, loss=0.027550656348466873\n", - "Surface training t=27307, loss=0.024988661520183086\n", - "Surface training t=27308, loss=0.0376509428024292\n", - "Surface training t=27309, loss=0.025312436744570732\n", - "Surface training t=27310, loss=0.024828720837831497\n", - "Surface training t=27311, loss=0.021590312477201223\n", - "Surface training t=27312, loss=0.02659896295517683\n", - "Surface training t=27313, loss=0.020634731277823448\n", - "Surface training t=27314, loss=0.02231459692120552\n", - "Surface training t=27315, loss=0.016823233105242252\n", - "Surface training t=27316, loss=0.018848552368581295\n", - "Surface training t=27317, loss=0.024995163083076477\n", - "Surface training t=27318, loss=0.017108998261392117\n", - "Surface training t=27319, loss=0.015591546893119812\n", - "Surface training t=27320, loss=0.022314743138849735\n", - "Surface training t=27321, loss=0.019374564290046692\n", - "Surface training t=27322, loss=0.022079508751630783\n", - "Surface training t=27323, loss=0.019384409300982952\n", - "Surface training t=27324, loss=0.021130884066224098\n", - "Surface training t=27325, loss=0.017984973266720772\n", - "Surface training t=27326, loss=0.017655299976468086\n", - "Surface training t=27327, loss=0.012954581528902054\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=27328, loss=0.01619875244796276\n", - "Surface training t=27329, loss=0.017839260399341583\n", - "Surface training t=27330, loss=0.020781535655260086\n", - "Surface training t=27331, loss=0.02943706139922142\n", - "Surface training t=27332, loss=0.02405325509607792\n", - "Surface training t=27333, loss=0.01794891245663166\n", - "Surface training t=27334, loss=0.02379966713488102\n", - "Surface training t=27335, loss=0.02370339259505272\n", - "Surface training t=27336, loss=0.018234044313430786\n", - "Surface training t=27337, loss=0.021983017213642597\n", - "Surface training t=27338, loss=0.01302828686311841\n", - "Surface training t=27339, loss=0.017740804702043533\n", - "Surface training t=27340, loss=0.015965106431394815\n", - "Surface training t=27341, loss=0.021206701174378395\n", - "Surface training t=27342, loss=0.02883146796375513\n", - "Surface training t=27343, loss=0.021403702907264233\n", - "Surface training t=27344, loss=0.02284698188304901\n", - "Surface training t=27345, loss=0.025926075875759125\n", - "Surface training t=27346, loss=0.02639947645366192\n", - "Surface training t=27347, loss=0.02951642870903015\n", - "Surface training t=27348, loss=0.026278088800609112\n", - "Surface training t=27349, loss=0.029002871364355087\n", - "Surface training t=27350, loss=0.027995468117296696\n", - "Surface training t=27351, loss=0.031407617032527924\n", - "Surface training t=27352, loss=0.03394969366490841\n", - "Surface training t=27353, loss=0.03193740826100111\n", - "Surface training t=27354, loss=0.0259700370952487\n", - "Surface training t=27355, loss=0.028207422234117985\n", - "Surface training t=27356, loss=0.03465267736464739\n", - "Surface training t=27357, loss=0.029271120205521584\n", - "Surface training t=27358, loss=0.03198446333408356\n", - "Surface training t=27359, loss=0.038131202571094036\n", - "Surface training t=27360, loss=0.04713445343077183\n", - "Surface training t=27361, loss=0.041956715285778046\n", - "Surface training t=27362, loss=0.044734152033925056\n", - "Surface training t=27363, loss=0.02837182767689228\n", - "Surface training t=27364, loss=0.026464328169822693\n", - "Surface training t=27365, loss=0.02731501031666994\n", - "Surface training t=27366, loss=0.02794897835701704\n", - "Surface training t=27367, loss=0.030832702293992043\n", - "Surface training t=27368, loss=0.04302582889795303\n", - "Surface training t=27369, loss=0.026802537962794304\n", - "Surface training t=27370, loss=0.028618287295103073\n", - "Surface training t=27371, loss=0.024073190055787563\n", - "Surface training t=27372, loss=0.02582646068185568\n", - "Surface training t=27373, loss=0.021744253113865852\n", - "Surface training t=27374, loss=0.024660624563694\n", - "Surface training t=27375, loss=0.029963587410748005\n", - "Surface training t=27376, loss=0.023640201427042484\n", - "Surface training t=27377, loss=0.02879601437598467\n", - "Surface training t=27378, loss=0.026252874173223972\n", - "Surface training t=27379, loss=0.031232940033078194\n", - "Surface training t=27380, loss=0.031637043692171574\n", - "Surface training t=27381, loss=0.02723439410328865\n", - "Surface training t=27382, loss=0.031021817587316036\n", - "Surface training t=27383, loss=0.028660654090344906\n", - "Surface training t=27384, loss=0.020626908168196678\n", - "Surface training t=27385, loss=0.022111801430583\n", - "Surface training t=27386, loss=0.018803033977746964\n", - "Surface training t=27387, loss=0.020127364434301853\n", - "Surface training t=27388, loss=0.0248128529638052\n", - "Surface training t=27389, loss=0.0212382385507226\n", - "Surface training t=27390, loss=0.0154878506436944\n", - "Surface training t=27391, loss=0.021384721621870995\n", - "Surface training t=27392, loss=0.02297370508313179\n", - "Surface training t=27393, loss=0.021173156797885895\n", - "Surface training t=27394, loss=0.016623562667518854\n", - "Surface training t=27395, loss=0.021635886281728745\n", - "Surface training t=27396, loss=0.021656861528754234\n", - "Surface training t=27397, loss=0.022594140842556953\n", - "Surface training t=27398, loss=0.02175927720963955\n", - "Surface training t=27399, loss=0.024952242150902748\n", - "Surface training t=27400, loss=0.023481992073357105\n", - "Surface training t=27401, loss=0.029020175337791443\n", - "Surface training t=27402, loss=0.04553719609975815\n", - "Surface training t=27403, loss=0.03365664556622505\n", - "Surface training t=27404, loss=0.038867329247295856\n", - "Surface training t=27405, loss=0.05265112780034542\n", - "Surface training t=27406, loss=0.038602931424975395\n", - "Surface training t=27407, loss=0.042015815153717995\n", - "Surface training t=27408, loss=0.03297707438468933\n", - "Surface training t=27409, loss=0.024606063030660152\n", - "Surface training t=27410, loss=0.03064462821930647\n", - "Surface training t=27411, loss=0.024987089447677135\n", - "Surface training t=27412, loss=0.02683134377002716\n", - "Surface training t=27413, loss=0.024533954448997974\n", - "Surface training t=27414, loss=0.03144642245024443\n", - "Surface training t=27415, loss=0.0343654602766037\n", - "Surface training t=27416, loss=0.027632268145680428\n", - "Surface training t=27417, loss=0.03730032220482826\n", - "Surface training t=27418, loss=0.03257011342793703\n", - "Surface training t=27419, loss=0.03500285930931568\n", - "Surface training t=27420, loss=0.03847227990627289\n", - "Surface training t=27421, loss=0.029498745687305927\n", - "Surface training t=27422, loss=0.026977738365530968\n", - "Surface training t=27423, loss=0.027716683223843575\n", - "Surface training t=27424, loss=0.024860107339918613\n", - "Surface training t=27425, loss=0.02341560460627079\n", - "Surface training t=27426, loss=0.031268646009266376\n", - "Surface training t=27427, loss=0.026612896472215652\n", - "Surface training t=27428, loss=0.030398515053093433\n", - "Surface training t=27429, loss=0.031718840822577477\n", - "Surface training t=27430, loss=0.02487902995198965\n", - "Surface training t=27431, loss=0.03257869090884924\n", - "Surface training t=27432, loss=0.026884869672358036\n", - "Surface training t=27433, loss=0.028188947588205338\n", - "Surface training t=27434, loss=0.02367432415485382\n", - "Surface training t=27435, loss=0.021913782693445683\n", - "Surface training t=27436, loss=0.019847994670271873\n", - "Surface training t=27437, loss=0.0176002262160182\n", - "Surface training t=27438, loss=0.020040458999574184\n", - "Surface training t=27439, loss=0.018240579403936863\n", - "Surface training t=27440, loss=0.02564875688403845\n", - "Surface training t=27441, loss=0.017583515495061874\n", - "Surface training t=27442, loss=0.024038922041654587\n", - "Surface training t=27443, loss=0.023785293102264404\n", - "Surface training t=27444, loss=0.02155377622693777\n", - "Surface training t=27445, loss=0.023713963106274605\n", - "Surface training t=27446, loss=0.019811255857348442\n", - "Surface training t=27447, loss=0.01912139868363738\n", - "Surface training t=27448, loss=0.021409770473837852\n", - "Surface training t=27449, loss=0.025285822339355946\n", - "Surface training t=27450, loss=0.016642923932522535\n", - "Surface training t=27451, loss=0.019622843712568283\n", - "Surface training t=27452, loss=0.022115147672593594\n", - "Surface training t=27453, loss=0.018512561917304993\n", - "Surface training t=27454, loss=0.01748052053153515\n", - "Surface training t=27455, loss=0.016586003825068474\n", - "Surface training t=27456, loss=0.023657958023250103\n", - "Surface training t=27457, loss=0.018181036226451397\n", - "Surface training t=27458, loss=0.023413430899381638\n", - "Surface training t=27459, loss=0.025644666515290737\n", - "Surface training t=27460, loss=0.03149003908038139\n", - "Surface training t=27461, loss=0.03415648452937603\n", - "Surface training t=27462, loss=0.028202246874570847\n", - "Surface training t=27463, loss=0.03186020255088806\n", - "Surface training t=27464, loss=0.03967735730111599\n", - "Surface training t=27465, loss=0.03148399665951729\n", - "Surface training t=27466, loss=0.028442558832466602\n", - "Surface training t=27467, loss=0.03315340355038643\n", - "Surface training t=27468, loss=0.031116507947444916\n", - "Surface training t=27469, loss=0.027708529494702816\n", - "Surface training t=27470, loss=0.024378793314099312\n", - "Surface training t=27471, loss=0.023650528863072395\n", - "Surface training t=27472, loss=0.020041923969984055\n", - "Surface training t=27473, loss=0.01886661071330309\n", - "Surface training t=27474, loss=0.018708757124841213\n", - "Surface training t=27475, loss=0.01681705843657255\n", - "Surface training t=27476, loss=0.015970204025506973\n", - "Surface training t=27477, loss=0.017453642562031746\n", - "Surface training t=27478, loss=0.016639928799122572\n", - "Surface training t=27479, loss=0.017300275154411793\n", - "Surface training t=27480, loss=0.017789042554795742\n", - "Surface training t=27481, loss=0.01909061335027218\n", - "Surface training t=27482, loss=0.024962020106613636\n", - "Surface training t=27483, loss=0.026021823287010193\n", - "Surface training t=27484, loss=0.02077129576355219\n", - "Surface training t=27485, loss=0.021209019236266613\n", - "Surface training t=27486, loss=0.020011325366795063\n", - "Surface training t=27487, loss=0.01860957033932209\n", - "Surface training t=27488, loss=0.02091360744088888\n", - "Surface training t=27489, loss=0.024840116500854492\n", - "Surface training t=27490, loss=0.026633317582309246\n", - "Surface training t=27491, loss=0.02464572060853243\n", - "Surface training t=27492, loss=0.02485833689570427\n", - "Surface training t=27493, loss=0.03659561835229397\n", - "Surface training t=27494, loss=0.02783789113163948\n", - "Surface training t=27495, loss=0.026542527601122856\n", - "Surface training t=27496, loss=0.022600959055125713\n", - "Surface training t=27497, loss=0.0464529674500227\n", - "Surface training t=27498, loss=0.0341974887996912\n", - "Surface training t=27499, loss=0.03245009761303663\n", - "Surface training t=27500, loss=0.031664587557315826\n", - "Surface training t=27501, loss=0.03128077834844589\n", - "Surface training t=27502, loss=0.03326734434813261\n", - "Surface training t=27503, loss=0.02980763278901577\n", - "Surface training t=27504, loss=0.02598385140299797\n", - "Surface training t=27505, loss=0.023463495075702667\n", - "Surface training t=27506, loss=0.02483850996941328\n", - "Surface training t=27507, loss=0.027637934312224388\n", - "Surface training t=27508, loss=0.03720096871256828\n", - "Surface training t=27509, loss=0.030885386280715466\n", - "Surface training t=27510, loss=0.028961360454559326\n", - "Surface training t=27511, loss=0.029011116363108158\n", - "Surface training t=27512, loss=0.03293787222355604\n", - "Surface training t=27513, loss=0.04232562705874443\n", - "Surface training t=27514, loss=0.03225140366703272\n", - "Surface training t=27515, loss=0.0351030332967639\n", - "Surface training t=27516, loss=0.043824756518006325\n", - "Surface training t=27517, loss=0.03847781755030155\n", - "Surface training t=27518, loss=0.043053628876805305\n", - "Surface training t=27519, loss=0.04301728308200836\n", - "Surface training t=27520, loss=0.03651002049446106\n", - "Surface training t=27521, loss=0.054348548874258995\n", - "Surface training t=27522, loss=0.035922364331781864\n", - "Surface training t=27523, loss=0.0344202509149909\n", - "Surface training t=27524, loss=0.04320163652300835\n", - "Surface training t=27525, loss=0.03493589907884598\n", - "Surface training t=27526, loss=0.031055202707648277\n", - "Surface training t=27527, loss=0.037316739559173584\n", - "Surface training t=27528, loss=0.039421225897967815\n", - "Surface training t=27529, loss=0.030514923855662346\n", - "Surface training t=27530, loss=0.030446825549006462\n", - "Surface training t=27531, loss=0.03101487271487713\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=27532, loss=0.033823675476014614\n", - "Surface training t=27533, loss=0.025845607742667198\n", - "Surface training t=27534, loss=0.02556120976805687\n", - "Surface training t=27535, loss=0.021181855350732803\n", - "Surface training t=27536, loss=0.022724458947777748\n", - "Surface training t=27537, loss=0.0272762319073081\n", - "Surface training t=27538, loss=0.02494935132563114\n", - "Surface training t=27539, loss=0.01769753359258175\n", - "Surface training t=27540, loss=0.022037711925804615\n", - "Surface training t=27541, loss=0.025599914602935314\n", - "Surface training t=27542, loss=0.025105783715844154\n", - "Surface training t=27543, loss=0.02205847203731537\n", - "Surface training t=27544, loss=0.01929630059748888\n", - "Surface training t=27545, loss=0.02563906740397215\n", - "Surface training t=27546, loss=0.021886282600462437\n", - "Surface training t=27547, loss=0.014200064353644848\n", - "Surface training t=27548, loss=0.017213867511600256\n", - "Surface training t=27549, loss=0.014055718667805195\n", - "Surface training t=27550, loss=0.01817764900624752\n", - "Surface training t=27551, loss=0.018420962616801262\n", - "Surface training t=27552, loss=0.015113579109311104\n", - "Surface training t=27553, loss=0.01907999347895384\n", - "Surface training t=27554, loss=0.01640621153637767\n", - "Surface training t=27555, loss=0.017783456947654486\n", - "Surface training t=27556, loss=0.02114791516214609\n", - "Surface training t=27557, loss=0.018827810883522034\n", - "Surface training t=27558, loss=0.014946897979825735\n", - "Surface training t=27559, loss=0.014845522586256266\n", - "Surface training t=27560, loss=0.01250487333163619\n", - "Surface training t=27561, loss=0.014103967230767012\n", - "Surface training t=27562, loss=0.016436648555099964\n", - "Surface training t=27563, loss=0.01728043332695961\n", - "Surface training t=27564, loss=0.01574015012010932\n", - "Surface training t=27565, loss=0.01938761491328478\n", - "Surface training t=27566, loss=0.016293358523398638\n", - "Surface training t=27567, loss=0.016342798247933388\n", - "Surface training t=27568, loss=0.01566800568252802\n", - "Surface training t=27569, loss=0.01943073607981205\n", - "Surface training t=27570, loss=0.02506289817392826\n", - "Surface training t=27571, loss=0.02413194254040718\n", - "Surface training t=27572, loss=0.020073245279490948\n", - "Surface training t=27573, loss=0.022681422531604767\n", - "Surface training t=27574, loss=0.034473735839128494\n", - "Surface training t=27575, loss=0.022014685906469822\n", - "Surface training t=27576, loss=0.02011918742209673\n", - "Surface training t=27577, loss=0.02965667936950922\n", - "Surface training t=27578, loss=0.019982969388365746\n", - "Surface training t=27579, loss=0.020910647697746754\n", - "Surface training t=27580, loss=0.0198250412940979\n", - "Surface training t=27581, loss=0.01730494387447834\n", - "Surface training t=27582, loss=0.018935075029730797\n", - "Surface training t=27583, loss=0.018761964049190283\n", - "Surface training t=27584, loss=0.023100978694856167\n", - "Surface training t=27585, loss=0.01866268366575241\n", - "Surface training t=27586, loss=0.016381744295358658\n", - "Surface training t=27587, loss=0.022266699001193047\n", - "Surface training t=27588, loss=0.020988398231565952\n", - "Surface training t=27589, loss=0.026298335753381252\n", - "Surface training t=27590, loss=0.022148409858345985\n", - "Surface training t=27591, loss=0.019156454131007195\n", - "Surface training t=27592, loss=0.021106651052832603\n", - "Surface training t=27593, loss=0.021456480957567692\n", - "Surface training t=27594, loss=0.022961697541177273\n", - "Surface training t=27595, loss=0.023631194606423378\n", - "Surface training t=27596, loss=0.028424755670130253\n", - "Surface training t=27597, loss=0.023623822256922722\n", - "Surface training t=27598, loss=0.02102866768836975\n", - "Surface training t=27599, loss=0.021031079813838005\n", - "Surface training t=27600, loss=0.026027205400168896\n", - "Surface training t=27601, loss=0.020000260323286057\n", - "Surface training t=27602, loss=0.027808508835732937\n", - "Surface training t=27603, loss=0.03310745768249035\n", - "Surface training t=27604, loss=0.02504836954176426\n", - "Surface training t=27605, loss=0.026228894479572773\n", - "Surface training t=27606, loss=0.029531802982091904\n", - "Surface training t=27607, loss=0.030450335703790188\n", - "Surface training t=27608, loss=0.037206098437309265\n", - "Surface training t=27609, loss=0.025908704847097397\n", - "Surface training t=27610, loss=0.02871674206107855\n", - "Surface training t=27611, loss=0.03519104793667793\n", - "Surface training t=27612, loss=0.03664008900523186\n", - "Surface training t=27613, loss=0.03178275469690561\n", - "Surface training t=27614, loss=0.036289600655436516\n", - "Surface training t=27615, loss=0.04071152210235596\n", - "Surface training t=27616, loss=0.024769517593085766\n", - "Surface training t=27617, loss=0.027220592834055424\n", - "Surface training t=27618, loss=0.024196342565119267\n", - "Surface training t=27619, loss=0.024138161912560463\n", - "Surface training t=27620, loss=0.02139168232679367\n", - "Surface training t=27621, loss=0.022809339687228203\n", - "Surface training t=27622, loss=0.025728371925652027\n", - "Surface training t=27623, loss=0.027219736017286777\n", - "Surface training t=27624, loss=0.03278158977627754\n", - "Surface training t=27625, loss=0.03836419619619846\n", - "Surface training t=27626, loss=0.030689483508467674\n", - "Surface training t=27627, loss=0.02965718973428011\n", - "Surface training t=27628, loss=0.027692988514900208\n", - "Surface training t=27629, loss=0.04197176545858383\n", - "Surface training t=27630, loss=0.028978480957448483\n", - "Surface training t=27631, loss=0.028860002756118774\n", - "Surface training t=27632, loss=0.02871001325547695\n", - "Surface training t=27633, loss=0.03319589979946613\n", - "Surface training t=27634, loss=0.022108733654022217\n", - "Surface training t=27635, loss=0.019987761974334717\n", - "Surface training t=27636, loss=0.02278977632522583\n", - "Surface training t=27637, loss=0.022153626196086407\n", - "Surface training t=27638, loss=0.023700066842138767\n", - "Surface training t=27639, loss=0.020334268920123577\n", - "Surface training t=27640, loss=0.023726667277514935\n", - "Surface training t=27641, loss=0.03066575899720192\n", - "Surface training t=27642, loss=0.029322735033929348\n", - "Surface training t=27643, loss=0.026069742627441883\n", - "Surface training t=27644, loss=0.02846788614988327\n", - "Surface training t=27645, loss=0.036650851368904114\n", - "Surface training t=27646, loss=0.035925730131566525\n", - "Surface training t=27647, loss=0.03102933708578348\n", - "Surface training t=27648, loss=0.03729112446308136\n", - "Surface training t=27649, loss=0.03453810699284077\n", - "Surface training t=27650, loss=0.025815204717218876\n", - "Surface training t=27651, loss=0.033330341801047325\n", - "Surface training t=27652, loss=0.023055095225572586\n", - "Surface training t=27653, loss=0.024371705949306488\n", - "Surface training t=27654, loss=0.027410367503762245\n", - "Surface training t=27655, loss=0.03116979543119669\n", - "Surface training t=27656, loss=0.03198301047086716\n", - "Surface training t=27657, loss=0.028803536668419838\n", - "Surface training t=27658, loss=0.020138714462518692\n", - "Surface training t=27659, loss=0.02656687516719103\n", - "Surface training t=27660, loss=0.020704824943095446\n", - "Surface training t=27661, loss=0.02166650164872408\n", - "Surface training t=27662, loss=0.020406333729624748\n", - "Surface training t=27663, loss=0.01719912327826023\n", - "Surface training t=27664, loss=0.021952235139906406\n", - "Surface training t=27665, loss=0.018605020828545094\n", - "Surface training t=27666, loss=0.016489296220242977\n", - "Surface training t=27667, loss=0.013884998857975006\n", - "Surface training t=27668, loss=0.022592135705053806\n", - "Surface training t=27669, loss=0.016752548050135374\n", - "Surface training t=27670, loss=0.01654177764430642\n", - "Surface training t=27671, loss=0.018464921042323112\n", - "Surface training t=27672, loss=0.01688528200611472\n", - "Surface training t=27673, loss=0.015693020075559616\n", - "Surface training t=27674, loss=0.01931553054600954\n", - "Surface training t=27675, loss=0.020270667038857937\n", - "Surface training t=27676, loss=0.020072161220014095\n", - "Surface training t=27677, loss=0.020112419500947\n", - "Surface training t=27678, loss=0.018245533108711243\n", - "Surface training t=27679, loss=0.02290021162480116\n", - "Surface training t=27680, loss=0.02305900678038597\n", - "Surface training t=27681, loss=0.025617968291044235\n", - "Surface training t=27682, loss=0.036051319912075996\n", - "Surface training t=27683, loss=0.02457005623728037\n", - "Surface training t=27684, loss=0.029672074131667614\n", - "Surface training t=27685, loss=0.043041354045271873\n", - "Surface training t=27686, loss=0.0258781174197793\n", - "Surface training t=27687, loss=0.04686926677823067\n", - "Surface training t=27688, loss=0.034935591742396355\n", - "Surface training t=27689, loss=0.03867172822356224\n", - "Surface training t=27690, loss=0.03415774833410978\n", - "Surface training t=27691, loss=0.0519380122423172\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=27692, loss=0.03967718593776226\n", - "Surface training t=27693, loss=0.05771147459745407\n", - "Surface training t=27694, loss=0.038613857701420784\n", - "Surface training t=27695, loss=0.05227992683649063\n", - "Surface training t=27696, loss=0.04634195379912853\n", - "Surface training t=27697, loss=0.036512838676571846\n", - "Surface training t=27698, loss=0.039096809923648834\n", - "Surface training t=27699, loss=0.034955878742039204\n", - "Surface training t=27700, loss=0.03890870697796345\n", - "Surface training t=27701, loss=0.032815489917993546\n", - "Surface training t=27702, loss=0.0301731638610363\n", - "Surface training t=27703, loss=0.03554338030517101\n", - "Surface training t=27704, loss=0.022958800196647644\n", - "Surface training t=27705, loss=0.026659897528588772\n", - "Surface training t=27706, loss=0.025862937793135643\n", - "Surface training t=27707, loss=0.01953030563890934\n", - "Surface training t=27708, loss=0.024419800378382206\n", - "Surface training t=27709, loss=0.0238149706274271\n", - "Surface training t=27710, loss=0.02517937868833542\n", - "Surface training t=27711, loss=0.02467372454702854\n", - "Surface training t=27712, loss=0.022227453999221325\n", - "Surface training t=27713, loss=0.023088273592293262\n", - "Surface training t=27714, loss=0.024269741028547287\n", - "Surface training t=27715, loss=0.025420943275094032\n", - "Surface training t=27716, loss=0.018930944614112377\n", - "Surface training t=27717, loss=0.024259960278868675\n", - "Surface training t=27718, loss=0.021916531957685947\n", - "Surface training t=27719, loss=0.021238491870462894\n", - "Surface training t=27720, loss=0.026083238422870636\n", - "Surface training t=27721, loss=0.0173262981697917\n", - "Surface training t=27722, loss=0.02028175164014101\n", - "Surface training t=27723, loss=0.015611376147717237\n", - "Surface training t=27724, loss=0.016986634116619825\n", - "Surface training t=27725, loss=0.018363346345722675\n", - "Surface training t=27726, loss=0.019854577258229256\n", - "Surface training t=27727, loss=0.015876919962465763\n", - "Surface training t=27728, loss=0.01831453340128064\n", - "Surface training t=27729, loss=0.017765552271157503\n", - "Surface training t=27730, loss=0.02132310252636671\n", - "Surface training t=27731, loss=0.019529791548848152\n", - "Surface training t=27732, loss=0.018379258923232555\n", - "Surface training t=27733, loss=0.01881435513496399\n", - "Surface training t=27734, loss=0.017917927354574203\n", - "Surface training t=27735, loss=0.020649433135986328\n", - "Surface training t=27736, loss=0.017176369205117226\n", - "Surface training t=27737, loss=0.02745706867426634\n", - "Surface training t=27738, loss=0.03550564870238304\n", - "Surface training t=27739, loss=0.02766087558120489\n", - "Surface training t=27740, loss=0.025548603385686874\n", - "Surface training t=27741, loss=0.02634145226329565\n", - "Surface training t=27742, loss=0.030413244850933552\n", - "Surface training t=27743, loss=0.022587095387279987\n", - "Surface training t=27744, loss=0.016776185482740402\n", - "Surface training t=27745, loss=0.018879457842558622\n", - "Surface training t=27746, loss=0.021272472105920315\n", - "Surface training t=27747, loss=0.02584797516465187\n", - "Surface training t=27748, loss=0.02585239615291357\n", - "Surface training t=27749, loss=0.02290162444114685\n", - "Surface training t=27750, loss=0.01696810405701399\n", - "Surface training t=27751, loss=0.02141878567636013\n", - "Surface training t=27752, loss=0.019860584288835526\n", - "Surface training t=27753, loss=0.020084958523511887\n", - "Surface training t=27754, loss=0.01819978840649128\n", - "Surface training t=27755, loss=0.01772918738424778\n", - "Surface training t=27756, loss=0.015797445084899664\n", - "Surface training t=27757, loss=0.016391413286328316\n", - "Surface training t=27758, loss=0.015203487128019333\n", - "Surface training t=27759, loss=0.012730329297482967\n", - "Surface training t=27760, loss=0.016211941838264465\n", - "Surface training t=27761, loss=0.01586012588813901\n", - "Surface training t=27762, loss=0.011451228987425566\n", - "Surface training t=27763, loss=0.012670733965933323\n", - "Surface training t=27764, loss=0.012006219942122698\n", - "Surface training t=27765, loss=0.013614431023597717\n", - "Surface training t=27766, loss=0.01922037359327078\n", - "Surface training t=27767, loss=0.019605416804552078\n", - "Surface training t=27768, loss=0.02229171898216009\n", - "Surface training t=27769, loss=0.018930611200630665\n", - "Surface training t=27770, loss=0.021808608435094357\n", - "Surface training t=27771, loss=0.019729805178940296\n", - "Surface training t=27772, loss=0.020530777983367443\n", - "Surface training t=27773, loss=0.022723968140780926\n", - "Surface training t=27774, loss=0.019933415576815605\n", - "Surface training t=27775, loss=0.01565652433782816\n", - "Surface training t=27776, loss=0.022424189373850822\n", - "Surface training t=27777, loss=0.01942208968102932\n", - "Surface training t=27778, loss=0.015225164126604795\n", - "Surface training t=27779, loss=0.018882863223552704\n", - "Surface training t=27780, loss=0.013297637924551964\n", - "Surface training t=27781, loss=0.011899566277861595\n", - "Surface training t=27782, loss=0.017691995948553085\n", - "Surface training t=27783, loss=0.017862148582935333\n", - "Surface training t=27784, loss=0.018384930677711964\n", - "Surface training t=27785, loss=0.01380149181932211\n", - "Surface training t=27786, loss=0.016068127937614918\n", - "Surface training t=27787, loss=0.019159282092005014\n", - "Surface training t=27788, loss=0.02095959521830082\n", - "Surface training t=27789, loss=0.022713077254593372\n", - "Surface training t=27790, loss=0.01899679657071829\n", - "Surface training t=27791, loss=0.02094663865864277\n", - "Surface training t=27792, loss=0.023240447975695133\n", - "Surface training t=27793, loss=0.02262937743216753\n", - "Surface training t=27794, loss=0.02049626223742962\n", - "Surface training t=27795, loss=0.0245577497407794\n", - "Surface training t=27796, loss=0.030881470069289207\n", - "Surface training t=27797, loss=0.022994245402514935\n", - "Surface training t=27798, loss=0.01737096207216382\n", - "Surface training t=27799, loss=0.014996539801359177\n", - "Surface training t=27800, loss=0.015538581181317568\n", - "Surface training t=27801, loss=0.01868976652622223\n", - "Surface training t=27802, loss=0.01757709216326475\n", - "Surface training t=27803, loss=0.0186653733253479\n", - "Surface training t=27804, loss=0.016629527788609266\n", - "Surface training t=27805, loss=0.017661916092038155\n", - "Surface training t=27806, loss=0.01923803798854351\n", - "Surface training t=27807, loss=0.01800133939832449\n", - "Surface training t=27808, loss=0.020657941699028015\n", - "Surface training t=27809, loss=0.02278857585042715\n", - "Surface training t=27810, loss=0.01802858244627714\n", - "Surface training t=27811, loss=0.018208280205726624\n", - "Surface training t=27812, loss=0.023249113000929356\n", - "Surface training t=27813, loss=0.026061560958623886\n", - "Surface training t=27814, loss=0.0309145487844944\n", - "Surface training t=27815, loss=0.029599986970424652\n", - "Surface training t=27816, loss=0.037378303706645966\n", - "Surface training t=27817, loss=0.029855075292289257\n", - "Surface training t=27818, loss=0.028797468170523643\n", - "Surface training t=27819, loss=0.02543602790683508\n", - "Surface training t=27820, loss=0.02611719351261854\n", - "Surface training t=27821, loss=0.032617468386888504\n", - "Surface training t=27822, loss=0.04049724526703358\n", - "Surface training t=27823, loss=0.032838018611073494\n", - "Surface training t=27824, loss=0.026548249647021294\n", - "Surface training t=27825, loss=0.02524299919605255\n", - "Surface training t=27826, loss=0.019839409738779068\n", - "Surface training t=27827, loss=0.02158559113740921\n", - "Surface training t=27828, loss=0.018674843478947878\n", - "Surface training t=27829, loss=0.018878964707255363\n", - "Surface training t=27830, loss=0.020347709767520428\n", - "Surface training t=27831, loss=0.022860906086862087\n", - "Surface training t=27832, loss=0.02234868612140417\n", - "Surface training t=27833, loss=0.02193572837859392\n", - "Surface training t=27834, loss=0.023288926109671593\n", - "Surface training t=27835, loss=0.01957537606358528\n", - "Surface training t=27836, loss=0.013686774298548698\n", - "Surface training t=27837, loss=0.014135031495243311\n", - "Surface training t=27838, loss=0.02292358409613371\n", - "Surface training t=27839, loss=0.016993090510368347\n", - "Surface training t=27840, loss=0.017466534860432148\n", - "Surface training t=27841, loss=0.023673677816987038\n", - "Surface training t=27842, loss=0.027330839075148106\n", - "Surface training t=27843, loss=0.02736519742757082\n", - "Surface training t=27844, loss=0.03427213430404663\n", - "Surface training t=27845, loss=0.023047962225973606\n", - "Surface training t=27846, loss=0.027602079324424267\n", - "Surface training t=27847, loss=0.027719199657440186\n", - "Surface training t=27848, loss=0.02523938100785017\n", - "Surface training t=27849, loss=0.01903735101222992\n", - "Surface training t=27850, loss=0.02011894714087248\n", - "Surface training t=27851, loss=0.0182129954919219\n", - "Surface training t=27852, loss=0.01984396670013666\n", - "Surface training t=27853, loss=0.02153566386550665\n", - "Surface training t=27854, loss=0.021983183920383453\n", - "Surface training t=27855, loss=0.024035131558775902\n", - "Surface training t=27856, loss=0.03576613962650299\n", - "Surface training t=27857, loss=0.02572616096585989\n", - "Surface training t=27858, loss=0.024806346744298935\n", - "Surface training t=27859, loss=0.024001714773476124\n", - "Surface training t=27860, loss=0.02614598535001278\n", - "Surface training t=27861, loss=0.029970331117510796\n", - "Surface training t=27862, loss=0.024565255269408226\n", - "Surface training t=27863, loss=0.024011804722249508\n", - "Surface training t=27864, loss=0.029707045294344425\n", - "Surface training t=27865, loss=0.02130914945155382\n", - "Surface training t=27866, loss=0.030886315740644932\n", - "Surface training t=27867, loss=0.029231992550194263\n", - "Surface training t=27868, loss=0.02021510200574994\n", - "Surface training t=27869, loss=0.02578588481992483\n", - "Surface training t=27870, loss=0.02700077835470438\n", - "Surface training t=27871, loss=0.019786246120929718\n", - "Surface training t=27872, loss=0.0206136591732502\n", - "Surface training t=27873, loss=0.026339896023273468\n", - "Surface training t=27874, loss=0.022096898406744003\n", - "Surface training t=27875, loss=0.020751692354679108\n", - "Surface training t=27876, loss=0.02635880745947361\n", - "Surface training t=27877, loss=0.019852428697049618\n", - "Surface training t=27878, loss=0.020567418076097965\n", - "Surface training t=27879, loss=0.021975496783852577\n", - "Surface training t=27880, loss=0.02636505290865898\n", - "Surface training t=27881, loss=0.03130409959703684\n", - "Surface training t=27882, loss=0.023068467155098915\n", - "Surface training t=27883, loss=0.029785198159515858\n", - "Surface training t=27884, loss=0.025547053664922714\n", - "Surface training t=27885, loss=0.028697548434138298\n", - "Surface training t=27886, loss=0.02800230961292982\n", - "Surface training t=27887, loss=0.02995684463530779\n", - "Surface training t=27888, loss=0.02924669999629259\n", - "Surface training t=27889, loss=0.022009071428328753\n", - "Surface training t=27890, loss=0.026006869971752167\n", - "Surface training t=27891, loss=0.027753964066505432\n", - "Surface training t=27892, loss=0.02979167178273201\n", - "Surface training t=27893, loss=0.023088298738002777\n", - "Surface training t=27894, loss=0.028509536758065224\n", - "Surface training t=27895, loss=0.02254902385175228\n", - "Surface training t=27896, loss=0.02179639134556055\n", - "Surface training t=27897, loss=0.01934468001127243\n", - "Surface training t=27898, loss=0.01681079575791955\n", - "Surface training t=27899, loss=0.020569047890603542\n", - "Surface training t=27900, loss=0.01470192614942789\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=27901, loss=0.021372588351368904\n", - "Surface training t=27902, loss=0.021837199572473764\n", - "Surface training t=27903, loss=0.02075171936303377\n", - "Surface training t=27904, loss=0.02700035460293293\n", - "Surface training t=27905, loss=0.01880104187875986\n", - "Surface training t=27906, loss=0.022816898301243782\n", - "Surface training t=27907, loss=0.018662788905203342\n", - "Surface training t=27908, loss=0.024421505630016327\n", - "Surface training t=27909, loss=0.02235241886228323\n", - "Surface training t=27910, loss=0.024072719737887383\n", - "Surface training t=27911, loss=0.03099672496318817\n", - "Surface training t=27912, loss=0.023998428136110306\n", - "Surface training t=27913, loss=0.03409311827272177\n", - "Surface training t=27914, loss=0.02389940246939659\n", - "Surface training t=27915, loss=0.022460326552391052\n", - "Surface training t=27916, loss=0.015497073531150818\n", - "Surface training t=27917, loss=0.02567758411169052\n", - "Surface training t=27918, loss=0.02816982287913561\n", - "Surface training t=27919, loss=0.028643546625971794\n", - "Surface training t=27920, loss=0.028104216791689396\n", - "Surface training t=27921, loss=0.02242510300129652\n", - "Surface training t=27922, loss=0.026224260218441486\n", - "Surface training t=27923, loss=0.028800517320632935\n", - "Surface training t=27924, loss=0.0309563297778368\n", - "Surface training t=27925, loss=0.03823411837220192\n", - "Surface training t=27926, loss=0.027575738728046417\n", - "Surface training t=27927, loss=0.030506417155265808\n", - "Surface training t=27928, loss=0.020754418335855007\n", - "Surface training t=27929, loss=0.023714538663625717\n", - "Surface training t=27930, loss=0.022410983219742775\n", - "Surface training t=27931, loss=0.02062212210148573\n", - "Surface training t=27932, loss=0.024884073995053768\n", - "Surface training t=27933, loss=0.018782068975269794\n", - "Surface training t=27934, loss=0.025331120938062668\n", - "Surface training t=27935, loss=0.023727376013994217\n", - "Surface training t=27936, loss=0.024141091853380203\n", - "Surface training t=27937, loss=0.024910394102334976\n", - "Surface training t=27938, loss=0.022720681503415108\n", - "Surface training t=27939, loss=0.018510231748223305\n", - "Surface training t=27940, loss=0.02481363620609045\n", - "Surface training t=27941, loss=0.031308574602007866\n", - "Surface training t=27942, loss=0.023144565522670746\n", - "Surface training t=27943, loss=0.02558143436908722\n", - "Surface training t=27944, loss=0.02433053497225046\n", - "Surface training t=27945, loss=0.02457443531602621\n", - "Surface training t=27946, loss=0.023628666065633297\n", - "Surface training t=27947, loss=0.02032705768942833\n", - "Surface training t=27948, loss=0.02381414081901312\n", - "Surface training t=27949, loss=0.022939239628612995\n", - "Surface training t=27950, loss=0.02276193629950285\n", - "Surface training t=27951, loss=0.024547766894102097\n", - "Surface training t=27952, loss=0.016321507282555103\n", - "Surface training t=27953, loss=0.021261987276375294\n", - "Surface training t=27954, loss=0.022769734263420105\n", - "Surface training t=27955, loss=0.01672629965469241\n", - "Surface training t=27956, loss=0.017359154298901558\n", - "Surface training t=27957, loss=0.016236302442848682\n", - "Surface training t=27958, loss=0.015082235913723707\n", - "Surface training t=27959, loss=0.018910993821918964\n", - "Surface training t=27960, loss=0.016840368509292603\n", - "Surface training t=27961, loss=0.019546015188097954\n", - "Surface training t=27962, loss=0.018057281151413918\n", - "Surface training t=27963, loss=0.019603434018790722\n", - "Surface training t=27964, loss=0.02013322990387678\n", - "Surface training t=27965, loss=0.01885530725121498\n", - "Surface training t=27966, loss=0.01576521061360836\n", - "Surface training t=27967, loss=0.018640348687767982\n", - "Surface training t=27968, loss=0.02077093906700611\n", - "Surface training t=27969, loss=0.016065950505435467\n", - "Surface training t=27970, loss=0.01891932962462306\n", - "Surface training t=27971, loss=0.02038305252790451\n", - "Surface training t=27972, loss=0.02202140912413597\n", - "Surface training t=27973, loss=0.023661119863390923\n", - "Surface training t=27974, loss=0.01841630879789591\n", - "Surface training t=27975, loss=0.015567161608487368\n", - "Surface training t=27976, loss=0.017491887789219618\n", - "Surface training t=27977, loss=0.018183264415711164\n", - "Surface training t=27978, loss=0.01736588589847088\n", - "Surface training t=27979, loss=0.024119596928358078\n", - "Surface training t=27980, loss=0.025150740519165993\n", - "Surface training t=27981, loss=0.02589964959770441\n", - "Surface training t=27982, loss=0.016735355369746685\n", - "Surface training t=27983, loss=0.01874187681823969\n", - "Surface training t=27984, loss=0.02102035004645586\n", - "Surface training t=27985, loss=0.018520635552704334\n", - "Surface training t=27986, loss=0.018941730260849\n", - "Surface training t=27987, loss=0.018794888630509377\n", - "Surface training t=27988, loss=0.015439957845956087\n", - "Surface training t=27989, loss=0.01726646814495325\n", - "Surface training t=27990, loss=0.020062658935785294\n", - "Surface training t=27991, loss=0.02372359298169613\n", - "Surface training t=27992, loss=0.023202046751976013\n", - "Surface training t=27993, loss=0.018617709167301655\n", - "Surface training t=27994, loss=0.015895187854766846\n", - "Surface training t=27995, loss=0.03239543177187443\n", - "Surface training t=27996, loss=0.022249859757721424\n", - "Surface training t=27997, loss=0.03310737106949091\n", - "Surface training t=27998, loss=0.02785121463239193\n", - "Surface training t=27999, loss=0.021570284850895405\n", - "Surface training t=28000, loss=0.0265297656878829\n", - "Surface training t=28001, loss=0.02042776346206665\n", - "Surface training t=28002, loss=0.016167878173291683\n", - "Surface training t=28003, loss=0.023497212678194046\n", - "Surface training t=28004, loss=0.01564285345375538\n", - "Surface training t=28005, loss=0.016470883507281542\n", - "Surface training t=28006, loss=0.01830616220831871\n", - "Surface training t=28007, loss=0.019521400332450867\n", - "Surface training t=28008, loss=0.021055053919553757\n", - "Surface training t=28009, loss=0.01618221588432789\n", - "Surface training t=28010, loss=0.018612460233271122\n", - "Surface training t=28011, loss=0.01718649361282587\n", - "Surface training t=28012, loss=0.01819052780047059\n", - "Surface training t=28013, loss=0.022571129724383354\n", - "Surface training t=28014, loss=0.0196685753762722\n", - "Surface training t=28015, loss=0.016814561560750008\n", - "Surface training t=28016, loss=0.018147965893149376\n", - "Surface training t=28017, loss=0.01622430980205536\n", - "Surface training t=28018, loss=0.01776453945785761\n", - "Surface training t=28019, loss=0.01376212714239955\n", - "Surface training t=28020, loss=0.01883571967482567\n", - "Surface training t=28021, loss=0.01357000321149826\n", - "Surface training t=28022, loss=0.017126412130892277\n", - "Surface training t=28023, loss=0.013385205529630184\n", - "Surface training t=28024, loss=0.01585989212617278\n", - "Surface training t=28025, loss=0.015552324242889881\n", - "Surface training t=28026, loss=0.014473853167146444\n", - "Surface training t=28027, loss=0.018577589187771082\n", - "Surface training t=28028, loss=0.024148722179234028\n", - "Surface training t=28029, loss=0.020700505003333092\n", - "Surface training t=28030, loss=0.02886057458817959\n", - "Surface training t=28031, loss=0.023215366527438164\n", - "Surface training t=28032, loss=0.026268478482961655\n", - "Surface training t=28033, loss=0.022147140465676785\n", - "Surface training t=28034, loss=0.020969060249626637\n", - "Surface training t=28035, loss=0.020190025214105844\n", - "Surface training t=28036, loss=0.01844798680394888\n", - "Surface training t=28037, loss=0.017175418324768543\n", - "Surface training t=28038, loss=0.01609402010217309\n", - "Surface training t=28039, loss=0.01609998708590865\n", - "Surface training t=28040, loss=0.017733708955347538\n", - "Surface training t=28041, loss=0.018564574420452118\n", - "Surface training t=28042, loss=0.020420389249920845\n", - "Surface training t=28043, loss=0.02127788122743368\n", - "Surface training t=28044, loss=0.022689444944262505\n", - "Surface training t=28045, loss=0.027643934823572636\n", - "Surface training t=28046, loss=0.01634242618456483\n", - "Surface training t=28047, loss=0.014944156631827354\n", - "Surface training t=28048, loss=0.01915309438481927\n", - "Surface training t=28049, loss=0.01920431014150381\n", - "Surface training t=28050, loss=0.022452587261795998\n", - "Surface training t=28051, loss=0.021896890364587307\n", - "Surface training t=28052, loss=0.021823612973093987\n", - "Surface training t=28053, loss=0.025489318184554577\n", - "Surface training t=28054, loss=0.025960519909858704\n", - "Surface training t=28055, loss=0.021306712180376053\n", - "Surface training t=28056, loss=0.02601318061351776\n", - "Surface training t=28057, loss=0.02761092595756054\n", - "Surface training t=28058, loss=0.029860854148864746\n", - "Surface training t=28059, loss=0.01957038789987564\n", - "Surface training t=28060, loss=0.014952694531530142\n", - "Surface training t=28061, loss=0.030159653164446354\n", - "Surface training t=28062, loss=0.01767493039369583\n", - "Surface training t=28063, loss=0.03217852860689163\n", - "Surface training t=28064, loss=0.02621220238506794\n", - "Surface training t=28065, loss=0.022872203961014748\n", - "Surface training t=28066, loss=0.026969491504132748\n", - "Surface training t=28067, loss=0.024647634476423264\n", - "Surface training t=28068, loss=0.028709392063319683\n", - "Surface training t=28069, loss=0.022466945461928844\n", - "Surface training t=28070, loss=0.018365848809480667\n", - "Surface training t=28071, loss=0.018521395977586508\n", - "Surface training t=28072, loss=0.024968464858829975\n", - "Surface training t=28073, loss=0.025260137394070625\n", - "Surface training t=28074, loss=0.017263724468648434\n", - "Surface training t=28075, loss=0.03004630282521248\n", - "Surface training t=28076, loss=0.0236955713480711\n", - "Surface training t=28077, loss=0.026025516912341118\n", - "Surface training t=28078, loss=0.032169096171855927\n", - "Surface training t=28079, loss=0.026851993054151535\n", - "Surface training t=28080, loss=0.023049998097121716\n", - "Surface training t=28081, loss=0.025508644059300423\n", - "Surface training t=28082, loss=0.018811258487403393\n", - "Surface training t=28083, loss=0.016634239815175533\n", - "Surface training t=28084, loss=0.01707124477252364\n", - "Surface training t=28085, loss=0.02126905508339405\n", - "Surface training t=28086, loss=0.01512190792709589\n", - "Surface training t=28087, loss=0.020063786767423153\n", - "Surface training t=28088, loss=0.013716401997953653\n", - "Surface training t=28089, loss=0.014413375873118639\n", - "Surface training t=28090, loss=0.013938922435045242\n", - "Surface training t=28091, loss=0.01256435178220272\n", - "Surface training t=28092, loss=0.016947058960795403\n", - "Surface training t=28093, loss=0.02118176966905594\n", - "Surface training t=28094, loss=0.021239078603684902\n", - "Surface training t=28095, loss=0.02429702877998352\n", - "Surface training t=28096, loss=0.023042109794914722\n", - "Surface training t=28097, loss=0.018230533227324486\n", - "Surface training t=28098, loss=0.018979689106345177\n", - "Surface training t=28099, loss=0.01908909808844328\n", - "Surface training t=28100, loss=0.020316521637141705\n", - "Surface training t=28101, loss=0.024352609179913998\n", - "Surface training t=28102, loss=0.01852116920053959\n", - "Surface training t=28103, loss=0.024838997051119804\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=28104, loss=0.027995833195745945\n", - "Surface training t=28105, loss=0.0197359761223197\n", - "Surface training t=28106, loss=0.021594191901385784\n", - "Surface training t=28107, loss=0.03271692059934139\n", - "Surface training t=28108, loss=0.025151751935482025\n", - "Surface training t=28109, loss=0.02992895618081093\n", - "Surface training t=28110, loss=0.028547008521854877\n", - "Surface training t=28111, loss=0.02806591149419546\n", - "Surface training t=28112, loss=0.025960834696888924\n", - "Surface training t=28113, loss=0.0170338349416852\n", - "Surface training t=28114, loss=0.020370975136756897\n", - "Surface training t=28115, loss=0.023282003588974476\n", - "Surface training t=28116, loss=0.023841272108256817\n", - "Surface training t=28117, loss=0.02364002913236618\n", - "Surface training t=28118, loss=0.018891059793531895\n", - "Surface training t=28119, loss=0.023810015991330147\n", - "Surface training t=28120, loss=0.023730644024908543\n", - "Surface training t=28121, loss=0.024589729495346546\n", - "Surface training t=28122, loss=0.020814866758883\n", - "Surface training t=28123, loss=0.017050032503902912\n", - "Surface training t=28124, loss=0.02341173868626356\n", - "Surface training t=28125, loss=0.01774443220347166\n", - "Surface training t=28126, loss=0.020867803134024143\n", - "Surface training t=28127, loss=0.023996224626898766\n", - "Surface training t=28128, loss=0.020503845997154713\n", - "Surface training t=28129, loss=0.02252245880663395\n", - "Surface training t=28130, loss=0.024879745207726955\n", - "Surface training t=28131, loss=0.01876120176166296\n", - "Surface training t=28132, loss=0.021689457818865776\n", - "Surface training t=28133, loss=0.017327788285911083\n", - "Surface training t=28134, loss=0.02245241217315197\n", - "Surface training t=28135, loss=0.020535052753984928\n", - "Surface training t=28136, loss=0.024670463986694813\n", - "Surface training t=28137, loss=0.023853576742112637\n", - "Surface training t=28138, loss=0.02535056695342064\n", - "Surface training t=28139, loss=0.02636262122541666\n", - "Surface training t=28140, loss=0.020669869147241116\n", - "Surface training t=28141, loss=0.020775304175913334\n", - "Surface training t=28142, loss=0.02051382791250944\n", - "Surface training t=28143, loss=0.019117919728159904\n", - "Surface training t=28144, loss=0.02065801341086626\n", - "Surface training t=28145, loss=0.023633331060409546\n", - "Surface training t=28146, loss=0.018086097203195095\n", - "Surface training t=28147, loss=0.022616485133767128\n", - "Surface training t=28148, loss=0.023486946243792772\n", - "Surface training t=28149, loss=0.02288295328617096\n", - "Surface training t=28150, loss=0.027353052981197834\n", - "Surface training t=28151, loss=0.02375134266912937\n", - "Surface training t=28152, loss=0.02272589225322008\n", - "Surface training t=28153, loss=0.02574241068214178\n", - "Surface training t=28154, loss=0.026494179852306843\n", - "Surface training t=28155, loss=0.0237100999802351\n", - "Surface training t=28156, loss=0.037691861391067505\n", - "Surface training t=28157, loss=0.03156551253050566\n", - "Surface training t=28158, loss=0.02690695319324732\n", - "Surface training t=28159, loss=0.027132312767207623\n", - "Surface training t=28160, loss=0.024567341431975365\n", - "Surface training t=28161, loss=0.02487615030258894\n", - "Surface training t=28162, loss=0.022380066569894552\n", - "Surface training t=28163, loss=0.024743408896028996\n", - "Surface training t=28164, loss=0.02945595234632492\n", - "Surface training t=28165, loss=0.02939656563103199\n", - "Surface training t=28166, loss=0.02444621082395315\n", - "Surface training t=28167, loss=0.02688341774046421\n", - "Surface training t=28168, loss=0.02947745006531477\n", - "Surface training t=28169, loss=0.019859887193888426\n", - "Surface training t=28170, loss=0.018018831498920918\n", - "Surface training t=28171, loss=0.018415392376482487\n", - "Surface training t=28172, loss=0.017179626505821943\n", - "Surface training t=28173, loss=0.016640642657876015\n", - "Surface training t=28174, loss=0.021891221404075623\n", - "Surface training t=28175, loss=0.023913436569273472\n", - "Surface training t=28176, loss=0.027102832682430744\n", - "Surface training t=28177, loss=0.019629701040685177\n", - "Surface training t=28178, loss=0.019295452162623405\n", - "Surface training t=28179, loss=0.017922330647706985\n", - "Surface training t=28180, loss=0.022546750493347645\n", - "Surface training t=28181, loss=0.020599855110049248\n", - "Surface training t=28182, loss=0.01804165542125702\n", - "Surface training t=28183, loss=0.016503070946782827\n", - "Surface training t=28184, loss=0.017115900292992592\n", - "Surface training t=28185, loss=0.016290881671011448\n", - "Surface training t=28186, loss=0.017270312178879976\n", - "Surface training t=28187, loss=0.015757259912788868\n", - "Surface training t=28188, loss=0.019739937037229538\n", - "Surface training t=28189, loss=0.020511317066848278\n", - "Surface training t=28190, loss=0.020907907746732235\n", - "Surface training t=28191, loss=0.02925360295921564\n", - "Surface training t=28192, loss=0.018241723999381065\n", - "Surface training t=28193, loss=0.024071697145700455\n", - "Surface training t=28194, loss=0.021145280450582504\n", - "Surface training t=28195, loss=0.022580300457775593\n", - "Surface training t=28196, loss=0.02550849039107561\n", - "Surface training t=28197, loss=0.023904599249362946\n", - "Surface training t=28198, loss=0.032997941598296165\n", - "Surface training t=28199, loss=0.023662755265831947\n", - "Surface training t=28200, loss=0.026420666836202145\n", - "Surface training t=28201, loss=0.02640717849135399\n", - "Surface training t=28202, loss=0.023916181176900864\n", - "Surface training t=28203, loss=0.02639122772961855\n", - "Surface training t=28204, loss=0.03007841855287552\n", - "Surface training t=28205, loss=0.02122162375599146\n", - "Surface training t=28206, loss=0.02246780227869749\n", - "Surface training t=28207, loss=0.019811205565929413\n", - "Surface training t=28208, loss=0.030322558246552944\n", - "Surface training t=28209, loss=0.027979369275271893\n", - "Surface training t=28210, loss=0.03781324811279774\n", - "Surface training t=28211, loss=0.03417033702135086\n", - "Surface training t=28212, loss=0.0296733183786273\n", - "Surface training t=28213, loss=0.026142138056457043\n", - "Surface training t=28214, loss=0.02516242116689682\n", - "Surface training t=28215, loss=0.02740784827619791\n", - "Surface training t=28216, loss=0.024917916394770145\n", - "Surface training t=28217, loss=0.02158785704523325\n", - "Surface training t=28218, loss=0.023965032771229744\n", - "Surface training t=28219, loss=0.022669944912195206\n", - "Surface training t=28220, loss=0.03313405439257622\n", - "Surface training t=28221, loss=0.02428832557052374\n", - "Surface training t=28222, loss=0.024595143273472786\n", - "Surface training t=28223, loss=0.027442898601293564\n", - "Surface training t=28224, loss=0.02324995305389166\n", - "Surface training t=28225, loss=0.02271004021167755\n", - "Surface training t=28226, loss=0.02275152876973152\n", - "Surface training t=28227, loss=0.025943662971258163\n", - "Surface training t=28228, loss=0.022821866907179356\n", - "Surface training t=28229, loss=0.017311913892626762\n", - "Surface training t=28230, loss=0.016861540731042624\n", - "Surface training t=28231, loss=0.018593238666653633\n", - "Surface training t=28232, loss=0.01563074579462409\n", - "Surface training t=28233, loss=0.017083076760172844\n", - "Surface training t=28234, loss=0.015301642008125782\n", - "Surface training t=28235, loss=0.01551839616149664\n", - "Surface training t=28236, loss=0.017418865114450455\n", - "Surface training t=28237, loss=0.013620796613395214\n", - "Surface training t=28238, loss=0.018174374476075172\n", - "Surface training t=28239, loss=0.021613141521811485\n", - "Surface training t=28240, loss=0.02847928460687399\n", - "Surface training t=28241, loss=0.021394817624241114\n", - "Surface training t=28242, loss=0.020798776298761368\n", - "Surface training t=28243, loss=0.021884772926568985\n", - "Surface training t=28244, loss=0.0141027239151299\n", - "Surface training t=28245, loss=0.018639614339917898\n", - "Surface training t=28246, loss=0.018356505781412125\n", - "Surface training t=28247, loss=0.019615270663052797\n", - "Surface training t=28248, loss=0.01795291854068637\n", - "Surface training t=28249, loss=0.021584040485322475\n", - "Surface training t=28250, loss=0.018486863002181053\n", - "Surface training t=28251, loss=0.029080609790980816\n", - "Surface training t=28252, loss=0.029224743135273457\n", - "Surface training t=28253, loss=0.020623603835701942\n", - "Surface training t=28254, loss=0.023917404003441334\n", - "Surface training t=28255, loss=0.02775616943836212\n", - "Surface training t=28256, loss=0.02354886755347252\n", - "Surface training t=28257, loss=0.019818244501948357\n", - "Surface training t=28258, loss=0.01770879700779915\n", - "Surface training t=28259, loss=0.016252863686531782\n", - "Surface training t=28260, loss=0.02396919671446085\n", - "Surface training t=28261, loss=0.02341694012284279\n", - "Surface training t=28262, loss=0.022892745211720467\n", - "Surface training t=28263, loss=0.02322474494576454\n", - "Surface training t=28264, loss=0.023904613219201565\n", - "Surface training t=28265, loss=0.028092974796891212\n", - "Surface training t=28266, loss=0.034445302560925484\n", - "Surface training t=28267, loss=0.031526118516922\n", - "Surface training t=28268, loss=0.03795139957219362\n", - "Surface training t=28269, loss=0.04704124853014946\n", - "Surface training t=28270, loss=0.03564880043268204\n", - "Surface training t=28271, loss=0.028221523389220238\n", - "Surface training t=28272, loss=0.035746727138757706\n", - "Surface training t=28273, loss=0.028167356736958027\n", - "Surface training t=28274, loss=0.029134382493793964\n", - "Surface training t=28275, loss=0.02508711814880371\n", - "Surface training t=28276, loss=0.020355047658085823\n", - "Surface training t=28277, loss=0.024309348315000534\n", - "Surface training t=28278, loss=0.021073988638818264\n", - "Surface training t=28279, loss=0.02119533997029066\n", - "Surface training t=28280, loss=0.020784342661499977\n", - "Surface training t=28281, loss=0.02104559075087309\n", - "Surface training t=28282, loss=0.0209931256249547\n", - "Surface training t=28283, loss=0.023279568180441856\n", - "Surface training t=28284, loss=0.03295241296291351\n", - "Surface training t=28285, loss=0.02729714848101139\n", - "Surface training t=28286, loss=0.024835925549268723\n", - "Surface training t=28287, loss=0.02614910714328289\n", - "Surface training t=28288, loss=0.021951597183942795\n", - "Surface training t=28289, loss=0.02044228231534362\n", - "Surface training t=28290, loss=0.023631593212485313\n", - "Surface training t=28291, loss=0.01769786700606346\n", - "Surface training t=28292, loss=0.013889702968299389\n", - "Surface training t=28293, loss=0.013485280331224203\n", - "Surface training t=28294, loss=0.015465395990759134\n", - "Surface training t=28295, loss=0.01780630461871624\n", - "Surface training t=28296, loss=0.020150166004896164\n", - "Surface training t=28297, loss=0.016390109434723854\n", - "Surface training t=28298, loss=0.014321242459118366\n", - "Surface training t=28299, loss=0.020191308110952377\n", - "Surface training t=28300, loss=0.026871761307120323\n", - "Surface training t=28301, loss=0.021234050393104553\n", - "Surface training t=28302, loss=0.02377972099930048\n", - "Surface training t=28303, loss=0.020441566593945026\n", - "Surface training t=28304, loss=0.022100976668298244\n", - "Surface training t=28305, loss=0.015168569050729275\n", - "Surface training t=28306, loss=0.013039056677371264\n", - "Surface training t=28307, loss=0.01672506984323263\n", - "Surface training t=28308, loss=0.019537736661732197\n", - "Surface training t=28309, loss=0.02082937676459551\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=28310, loss=0.01853476744145155\n", - "Surface training t=28311, loss=0.015484113246202469\n", - "Surface training t=28312, loss=0.018447628244757652\n", - "Surface training t=28313, loss=0.01683131232857704\n", - "Surface training t=28314, loss=0.025900905951857567\n", - "Surface training t=28315, loss=0.021516570821404457\n", - "Surface training t=28316, loss=0.017614400014281273\n", - "Surface training t=28317, loss=0.019401113502681255\n", - "Surface training t=28318, loss=0.017683109734207392\n", - "Surface training t=28319, loss=0.020971516147255898\n", - "Surface training t=28320, loss=0.028836197219789028\n", - "Surface training t=28321, loss=0.028484473936259747\n", - "Surface training t=28322, loss=0.03944903239607811\n", - "Surface training t=28323, loss=0.034464627504348755\n", - "Surface training t=28324, loss=0.0520276203751564\n", - "Surface training t=28325, loss=0.035158151760697365\n", - "Surface training t=28326, loss=0.03842976875603199\n", - "Surface training t=28327, loss=0.05736688897013664\n", - "Surface training t=28328, loss=0.04727020673453808\n", - "Surface training t=28329, loss=0.031967622227966785\n", - "Surface training t=28330, loss=0.036672431975603104\n", - "Surface training t=28331, loss=0.038985397666692734\n", - "Surface training t=28332, loss=0.034161802381277084\n", - "Surface training t=28333, loss=0.049616387113928795\n", - "Surface training t=28334, loss=0.0343667846173048\n", - "Surface training t=28335, loss=0.03342633694410324\n", - "Surface training t=28336, loss=0.026420440524816513\n", - "Surface training t=28337, loss=0.030497911386191845\n", - "Surface training t=28338, loss=0.02424096129834652\n", - "Surface training t=28339, loss=0.0252313781529665\n", - "Surface training t=28340, loss=0.02851146273314953\n", - "Surface training t=28341, loss=0.02484966441988945\n", - "Surface training t=28342, loss=0.025908883661031723\n", - "Surface training t=28343, loss=0.019592037424445152\n", - "Surface training t=28344, loss=0.019822724629193544\n", - "Surface training t=28345, loss=0.015553733799606562\n", - "Surface training t=28346, loss=0.015212832018733025\n", - "Surface training t=28347, loss=0.016231045126914978\n", - "Surface training t=28348, loss=0.017451481893658638\n", - "Surface training t=28349, loss=0.02012163493782282\n", - "Surface training t=28350, loss=0.016116506420075893\n", - "Surface training t=28351, loss=0.01592866936698556\n", - "Surface training t=28352, loss=0.017186612356454134\n", - "Surface training t=28353, loss=0.013694771565496922\n", - "Surface training t=28354, loss=0.01209374750033021\n", - "Surface training t=28355, loss=0.02003301400691271\n", - "Surface training t=28356, loss=0.02193448133766651\n", - "Surface training t=28357, loss=0.02438763529062271\n", - "Surface training t=28358, loss=0.021111884154379368\n", - "Surface training t=28359, loss=0.023429126478731632\n", - "Surface training t=28360, loss=0.02010315004736185\n", - "Surface training t=28361, loss=0.025761726312339306\n", - "Surface training t=28362, loss=0.022360894829034805\n", - "Surface training t=28363, loss=0.017595461569726467\n", - "Surface training t=28364, loss=0.02323502954095602\n", - "Surface training t=28365, loss=0.023563041351735592\n", - "Surface training t=28366, loss=0.02034826949238777\n", - "Surface training t=28367, loss=0.029294014908373356\n", - "Surface training t=28368, loss=0.02224088180810213\n", - "Surface training t=28369, loss=0.020473352633416653\n", - "Surface training t=28370, loss=0.019538485445082188\n", - "Surface training t=28371, loss=0.019229162950068712\n", - "Surface training t=28372, loss=0.01753155142068863\n", - "Surface training t=28373, loss=0.019435344263911247\n", - "Surface training t=28374, loss=0.017254925332963467\n", - "Surface training t=28375, loss=0.023951304145157337\n", - "Surface training t=28376, loss=0.022679870948195457\n", - "Surface training t=28377, loss=0.019451061263680458\n", - "Surface training t=28378, loss=0.017549989745020866\n", - "Surface training t=28379, loss=0.021719125099480152\n", - "Surface training t=28380, loss=0.026478036306798458\n", - "Surface training t=28381, loss=0.027992148883640766\n", - "Surface training t=28382, loss=0.02203649841248989\n", - "Surface training t=28383, loss=0.023393561132252216\n", - "Surface training t=28384, loss=0.018031837418675423\n", - "Surface training t=28385, loss=0.020123645663261414\n", - "Surface training t=28386, loss=0.021633898839354515\n", - "Surface training t=28387, loss=0.020388804376125336\n", - "Surface training t=28388, loss=0.01746124681085348\n", - "Surface training t=28389, loss=0.02139161340892315\n", - "Surface training t=28390, loss=0.017567714676260948\n", - "Surface training t=28391, loss=0.01947413943707943\n", - "Surface training t=28392, loss=0.018898287788033485\n", - "Surface training t=28393, loss=0.02057700604200363\n", - "Surface training t=28394, loss=0.031547486782073975\n", - "Surface training t=28395, loss=0.022971787489950657\n", - "Surface training t=28396, loss=0.020293679554015398\n", - "Surface training t=28397, loss=0.026006050407886505\n", - "Surface training t=28398, loss=0.025181107223033905\n", - "Surface training t=28399, loss=0.023808257654309273\n", - "Surface training t=28400, loss=0.022136163897812366\n", - "Surface training t=28401, loss=0.023510907776653767\n", - "Surface training t=28402, loss=0.02082705870270729\n", - "Surface training t=28403, loss=0.022484260611236095\n", - "Surface training t=28404, loss=0.024911698885262012\n", - "Surface training t=28405, loss=0.02039658185094595\n", - "Surface training t=28406, loss=0.030866393819451332\n", - "Surface training t=28407, loss=0.025756445713341236\n", - "Surface training t=28408, loss=0.026477201841771603\n", - "Surface training t=28409, loss=0.0403993409126997\n", - "Surface training t=28410, loss=0.032350026071071625\n", - "Surface training t=28411, loss=0.047213878482580185\n", - "Surface training t=28412, loss=0.026812908239662647\n", - "Surface training t=28413, loss=0.03049214743077755\n", - "Surface training t=28414, loss=0.021233958192169666\n", - "Surface training t=28415, loss=0.028693951666355133\n", - "Surface training t=28416, loss=0.03596762474626303\n", - "Surface training t=28417, loss=0.031121359206736088\n", - "Surface training t=28418, loss=0.03108762949705124\n", - "Surface training t=28419, loss=0.029840661212801933\n", - "Surface training t=28420, loss=0.02647740300744772\n", - "Surface training t=28421, loss=0.025032101199030876\n", - "Surface training t=28422, loss=0.04249463975429535\n", - "Surface training t=28423, loss=0.02792319282889366\n", - "Surface training t=28424, loss=0.031828269362449646\n", - "Surface training t=28425, loss=0.033901821821928024\n", - "Surface training t=28426, loss=0.04365067556500435\n", - "Surface training t=28427, loss=0.032970258966088295\n", - "Surface training t=28428, loss=0.05222243256866932\n", - "Surface training t=28429, loss=0.035592799074947834\n", - "Surface training t=28430, loss=0.040998052805662155\n", - "Surface training t=28431, loss=0.026851296424865723\n", - "Surface training t=28432, loss=0.026315913535654545\n", - "Surface training t=28433, loss=0.029218324460089207\n", - "Surface training t=28434, loss=0.023172828368842602\n", - "Surface training t=28435, loss=0.02276693657040596\n", - "Surface training t=28436, loss=0.03364086244255304\n", - "Surface training t=28437, loss=0.021542957052588463\n", - "Surface training t=28438, loss=0.02221496682614088\n", - "Surface training t=28439, loss=0.024185599759221077\n", - "Surface training t=28440, loss=0.020242784172296524\n", - "Surface training t=28441, loss=0.022695918567478657\n", - "Surface training t=28442, loss=0.030872659757733345\n", - "Surface training t=28443, loss=0.034587275236845016\n", - "Surface training t=28444, loss=0.029810702428221703\n", - "Surface training t=28445, loss=0.03792236186563969\n", - "Surface training t=28446, loss=0.03363322652876377\n", - "Surface training t=28447, loss=0.024043606594204903\n", - "Surface training t=28448, loss=0.02098038326948881\n", - "Surface training t=28449, loss=0.01496011484414339\n", - "Surface training t=28450, loss=0.01869233138859272\n", - "Surface training t=28451, loss=0.020047961734235287\n", - "Surface training t=28452, loss=0.017869205214083195\n", - "Surface training t=28453, loss=0.020451738499104977\n", - "Surface training t=28454, loss=0.022306066006422043\n", - "Surface training t=28455, loss=0.0475953072309494\n", - "Surface training t=28456, loss=0.03373897913843393\n", - "Surface training t=28457, loss=0.04875354841351509\n", - "Surface training t=28458, loss=0.03990701027214527\n", - "Surface training t=28459, loss=0.029155105352401733\n", - "Surface training t=28460, loss=0.033434610813856125\n", - "Surface training t=28461, loss=0.025922427885234356\n", - "Surface training t=28462, loss=0.02268373593688011\n", - "Surface training t=28463, loss=0.018957090564072132\n", - "Surface training t=28464, loss=0.03410220704972744\n", - "Surface training t=28465, loss=0.029672810807824135\n", - "Surface training t=28466, loss=0.02935538161545992\n", - "Surface training t=28467, loss=0.026053478941321373\n", - "Surface training t=28468, loss=0.021578785963356495\n", - "Surface training t=28469, loss=0.04150952026247978\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=28470, loss=0.02973866742104292\n", - "Surface training t=28471, loss=0.03323042020201683\n", - "Surface training t=28472, loss=0.05920827388763428\n", - "Surface training t=28473, loss=0.0406525619328022\n", - "Surface training t=28474, loss=0.054452164098620415\n", - "Surface training t=28475, loss=0.032258568331599236\n", - "Surface training t=28476, loss=0.03173556178808212\n", - "Surface training t=28477, loss=0.025854192674160004\n", - "Surface training t=28478, loss=0.027741936966776848\n", - "Surface training t=28479, loss=0.03215243015438318\n", - "Surface training t=28480, loss=0.027224287390708923\n", - "Surface training t=28481, loss=0.02324963826686144\n", - "Surface training t=28482, loss=0.0371532142162323\n", - "Surface training t=28483, loss=0.027585445903241634\n", - "Surface training t=28484, loss=0.03119373507797718\n", - "Surface training t=28485, loss=0.03478255867958069\n", - "Surface training t=28486, loss=0.03882699832320213\n", - "Surface training t=28487, loss=0.03317393362522125\n", - "Surface training t=28488, loss=0.036253771744668484\n", - "Surface training t=28489, loss=0.04110804758965969\n", - "Surface training t=28490, loss=0.03278140351176262\n", - "Surface training t=28491, loss=0.040772564709186554\n", - "Surface training t=28492, loss=0.03139848727732897\n", - "Surface training t=28493, loss=0.03068238403648138\n", - "Surface training t=28494, loss=0.02796781901270151\n", - "Surface training t=28495, loss=0.029759323224425316\n", - "Surface training t=28496, loss=0.053461525589227676\n", - "Surface training t=28497, loss=0.042220307514071465\n", - "Surface training t=28498, loss=0.048304324969649315\n", - "Surface training t=28499, loss=0.042813559994101524\n", - "Surface training t=28500, loss=0.028513044118881226\n", - "Surface training t=28501, loss=0.027209972962737083\n", - "Surface training t=28502, loss=0.029701429419219494\n", - "Surface training t=28503, loss=0.023431084118783474\n", - "Surface training t=28504, loss=0.03174269385635853\n", - "Surface training t=28505, loss=0.02779859211295843\n", - "Surface training t=28506, loss=0.028369909152388573\n", - "Surface training t=28507, loss=0.03185711428523064\n", - "Surface training t=28508, loss=0.02775478083640337\n", - "Surface training t=28509, loss=0.02927605714648962\n", - "Surface training t=28510, loss=0.02513639535754919\n", - "Surface training t=28511, loss=0.026238450780510902\n", - "Surface training t=28512, loss=0.030567463487386703\n", - "Surface training t=28513, loss=0.030356640927493572\n", - "Surface training t=28514, loss=0.024444714188575745\n", - "Surface training t=28515, loss=0.03085981123149395\n", - "Surface training t=28516, loss=0.030199742875993252\n", - "Surface training t=28517, loss=0.028270638547837734\n", - "Surface training t=28518, loss=0.025052032433450222\n", - "Surface training t=28519, loss=0.02197373565286398\n", - "Surface training t=28520, loss=0.019929182715713978\n", - "Surface training t=28521, loss=0.02022435422986746\n", - "Surface training t=28522, loss=0.02186806220561266\n", - "Surface training t=28523, loss=0.021020307205617428\n", - "Surface training t=28524, loss=0.016429031267762184\n", - "Surface training t=28525, loss=0.021671727299690247\n", - "Surface training t=28526, loss=0.02042313478887081\n", - "Surface training t=28527, loss=0.021625771187245846\n", - "Surface training t=28528, loss=0.017847249284386635\n", - "Surface training t=28529, loss=0.015594058204442263\n", - "Surface training t=28530, loss=0.021118237636983395\n", - "Surface training t=28531, loss=0.01661355746909976\n", - "Surface training t=28532, loss=0.018498631194233894\n", - "Surface training t=28533, loss=0.01798613928258419\n", - "Surface training t=28534, loss=0.02036702260375023\n", - "Surface training t=28535, loss=0.02073767501860857\n", - "Surface training t=28536, loss=0.02285726275295019\n", - "Surface training t=28537, loss=0.015189331024885178\n", - "Surface training t=28538, loss=0.01918434351682663\n", - "Surface training t=28539, loss=0.020818124525249004\n", - "Surface training t=28540, loss=0.019976016134023666\n", - "Surface training t=28541, loss=0.020545051433146\n", - "Surface training t=28542, loss=0.022189217619597912\n", - "Surface training t=28543, loss=0.020230007357895374\n", - "Surface training t=28544, loss=0.02016584388911724\n", - "Surface training t=28545, loss=0.03164015430957079\n", - "Surface training t=28546, loss=0.019974862225353718\n", - "Surface training t=28547, loss=0.01942575629800558\n", - "Surface training t=28548, loss=0.02288288064301014\n", - "Surface training t=28549, loss=0.025820634327828884\n", - "Surface training t=28550, loss=0.027019910514354706\n", - "Surface training t=28551, loss=0.03919630404561758\n", - "Surface training t=28552, loss=0.026218934915959835\n", - "Surface training t=28553, loss=0.0319405235350132\n", - "Surface training t=28554, loss=0.027740041725337505\n", - "Surface training t=28555, loss=0.028557488694787025\n", - "Surface training t=28556, loss=0.024416528642177582\n", - "Surface training t=28557, loss=0.0239772479981184\n", - "Surface training t=28558, loss=0.02914977353066206\n", - "Surface training t=28559, loss=0.02500003296881914\n", - "Surface training t=28560, loss=0.02558781113475561\n", - "Surface training t=28561, loss=0.0339966993778944\n", - "Surface training t=28562, loss=0.030509168282151222\n", - "Surface training t=28563, loss=0.031129310838878155\n", - "Surface training t=28564, loss=0.02564880345016718\n", - "Surface training t=28565, loss=0.02661379985511303\n", - "Surface training t=28566, loss=0.03243883699178696\n", - "Surface training t=28567, loss=0.02421450801193714\n", - "Surface training t=28568, loss=0.027796950191259384\n", - "Surface training t=28569, loss=0.028433951549232006\n", - "Surface training t=28570, loss=0.023204000666737556\n", - "Surface training t=28571, loss=0.023034507408738136\n", - "Surface training t=28572, loss=0.02219441719353199\n", - "Surface training t=28573, loss=0.01756592048332095\n", - "Surface training t=28574, loss=0.02428367454558611\n", - "Surface training t=28575, loss=0.01735421922057867\n", - "Surface training t=28576, loss=0.02197299338877201\n", - "Surface training t=28577, loss=0.016332055442035198\n", - "Surface training t=28578, loss=0.021317433565855026\n", - "Surface training t=28579, loss=0.01427910290658474\n", - "Surface training t=28580, loss=0.021110319532454014\n", - "Surface training t=28581, loss=0.01852534618228674\n", - "Surface training t=28582, loss=0.023227084428071976\n", - "Surface training t=28583, loss=0.017871562391519547\n", - "Surface training t=28584, loss=0.020420292858034372\n", - "Surface training t=28585, loss=0.018073623068630695\n", - "Surface training t=28586, loss=0.02296322863548994\n", - "Surface training t=28587, loss=0.03229255694895983\n", - "Surface training t=28588, loss=0.02404477633535862\n", - "Surface training t=28589, loss=0.030169066973030567\n", - "Surface training t=28590, loss=0.02055539097636938\n", - "Surface training t=28591, loss=0.024304254911839962\n", - "Surface training t=28592, loss=0.02904967125505209\n", - "Surface training t=28593, loss=0.023440267890691757\n", - "Surface training t=28594, loss=0.024114505387842655\n", - "Surface training t=28595, loss=0.014867340214550495\n", - "Surface training t=28596, loss=0.01700026262551546\n", - "Surface training t=28597, loss=0.014462689869105816\n", - "Surface training t=28598, loss=0.019206794910132885\n", - "Surface training t=28599, loss=0.022754136472940445\n", - "Surface training t=28600, loss=0.024139302782714367\n", - "Surface training t=28601, loss=0.023968755267560482\n", - "Surface training t=28602, loss=0.03482868615537882\n", - "Surface training t=28603, loss=0.028522025793790817\n", - "Surface training t=28604, loss=0.028863387182354927\n", - "Surface training t=28605, loss=0.036619171500205994\n", - "Surface training t=28606, loss=0.03537929058074951\n", - "Surface training t=28607, loss=0.029027795419096947\n", - "Surface training t=28608, loss=0.03130309842526913\n", - "Surface training t=28609, loss=0.041990939527750015\n", - "Surface training t=28610, loss=0.026738861575722694\n", - "Surface training t=28611, loss=0.03772137127816677\n", - "Surface training t=28612, loss=0.028590538538992405\n", - "Surface training t=28613, loss=0.03318948484957218\n", - "Surface training t=28614, loss=0.025553010404109955\n", - "Surface training t=28615, loss=0.028574266470968723\n", - "Surface training t=28616, loss=0.024182969704270363\n", - "Surface training t=28617, loss=0.021238891407847404\n", - "Surface training t=28618, loss=0.022120701149106026\n", - "Surface training t=28619, loss=0.024586222134530544\n", - "Surface training t=28620, loss=0.023149192333221436\n", - "Surface training t=28621, loss=0.01817223709076643\n", - "Surface training t=28622, loss=0.021354641765356064\n", - "Surface training t=28623, loss=0.020012281835079193\n", - "Surface training t=28624, loss=0.015164298936724663\n", - "Surface training t=28625, loss=0.018923611380159855\n", - "Surface training t=28626, loss=0.020310314372181892\n", - "Surface training t=28627, loss=0.015328174456954002\n", - "Surface training t=28628, loss=0.018873419612646103\n", - "Surface training t=28629, loss=0.015090267639607191\n", - "Surface training t=28630, loss=0.018158809281885624\n", - "Surface training t=28631, loss=0.016571802087128162\n", - "Surface training t=28632, loss=0.01736746635288\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=28633, loss=0.01472760597243905\n", - "Surface training t=28634, loss=0.013897622935473919\n", - "Surface training t=28635, loss=0.014809418469667435\n", - "Surface training t=28636, loss=0.018754920922219753\n", - "Surface training t=28637, loss=0.01642677653580904\n", - "Surface training t=28638, loss=0.019275803118944168\n", - "Surface training t=28639, loss=0.027603772468864918\n", - "Surface training t=28640, loss=0.022278508637100458\n", - "Surface training t=28641, loss=0.02277978789061308\n", - "Surface training t=28642, loss=0.026520922780036926\n", - "Surface training t=28643, loss=0.02151307463645935\n", - "Surface training t=28644, loss=0.019120008684694767\n", - "Surface training t=28645, loss=0.02167478296905756\n", - "Surface training t=28646, loss=0.019467527978122234\n", - "Surface training t=28647, loss=0.017892442643642426\n", - "Surface training t=28648, loss=0.015109050087630749\n", - "Surface training t=28649, loss=0.014599665999412537\n", - "Surface training t=28650, loss=0.01361848646774888\n", - "Surface training t=28651, loss=0.013580809812992811\n", - "Surface training t=28652, loss=0.017909192480146885\n", - "Surface training t=28653, loss=0.01977213053032756\n", - "Surface training t=28654, loss=0.024286245461553335\n", - "Surface training t=28655, loss=0.023285096511244774\n", - "Surface training t=28656, loss=0.020694509148597717\n", - "Surface training t=28657, loss=0.02116505056619644\n", - "Surface training t=28658, loss=0.025761038064956665\n", - "Surface training t=28659, loss=0.02242252044379711\n", - "Surface training t=28660, loss=0.021082296036183834\n", - "Surface training t=28661, loss=0.02616732381284237\n", - "Surface training t=28662, loss=0.024460296146571636\n", - "Surface training t=28663, loss=0.02734267618507147\n", - "Surface training t=28664, loss=0.020858388859778643\n", - "Surface training t=28665, loss=0.031089761294424534\n", - "Surface training t=28666, loss=0.024675600230693817\n", - "Surface training t=28667, loss=0.019197425805032253\n", - "Surface training t=28668, loss=0.018150226678699255\n", - "Surface training t=28669, loss=0.020152038894593716\n", - "Surface training t=28670, loss=0.02518480271100998\n", - "Surface training t=28671, loss=0.023429205641150475\n", - "Surface training t=28672, loss=0.029023428447544575\n", - "Surface training t=28673, loss=0.025753919035196304\n", - "Surface training t=28674, loss=0.023783556185662746\n", - "Surface training t=28675, loss=0.02415228821337223\n", - "Surface training t=28676, loss=0.02555968053638935\n", - "Surface training t=28677, loss=0.020637065172195435\n", - "Surface training t=28678, loss=0.020074542611837387\n", - "Surface training t=28679, loss=0.02148884069174528\n", - "Surface training t=28680, loss=0.0153060806915164\n", - "Surface training t=28681, loss=0.017812975216656923\n", - "Surface training t=28682, loss=0.015518495347350836\n", - "Surface training t=28683, loss=0.015922699123620987\n", - "Surface training t=28684, loss=0.017007476650178432\n", - "Surface training t=28685, loss=0.022372011095285416\n", - "Surface training t=28686, loss=0.015178636647760868\n", - "Surface training t=28687, loss=0.01934704091399908\n", - "Surface training t=28688, loss=0.01802152954041958\n", - "Surface training t=28689, loss=0.03424214757978916\n", - "Surface training t=28690, loss=0.027960054576396942\n", - "Surface training t=28691, loss=0.040240734815597534\n", - "Surface training t=28692, loss=0.024458741769194603\n", - "Surface training t=28693, loss=0.023664986714720726\n", - "Surface training t=28694, loss=0.015480916947126389\n", - "Surface training t=28695, loss=0.017183910124003887\n", - "Surface training t=28696, loss=0.015729650389403105\n", - "Surface training t=28697, loss=0.015736148227006197\n", - "Surface training t=28698, loss=0.011628500651568174\n", - "Surface training t=28699, loss=0.020013801753520966\n", - "Surface training t=28700, loss=0.01597942877560854\n", - "Surface training t=28701, loss=0.016469918191432953\n", - "Surface training t=28702, loss=0.015170364174991846\n", - "Surface training t=28703, loss=0.016385957598686218\n", - "Surface training t=28704, loss=0.022333993576467037\n", - "Surface training t=28705, loss=0.019905644468963146\n", - "Surface training t=28706, loss=0.01618623360991478\n", - "Surface training t=28707, loss=0.023643736727535725\n", - "Surface training t=28708, loss=0.04159179888665676\n", - "Surface training t=28709, loss=0.029481614008545876\n", - "Surface training t=28710, loss=0.027924579568207264\n", - "Surface training t=28711, loss=0.02523474022746086\n", - "Surface training t=28712, loss=0.030725307762622833\n", - "Surface training t=28713, loss=0.029041406698524952\n", - "Surface training t=28714, loss=0.02619022596627474\n", - "Surface training t=28715, loss=0.027008600998669863\n", - "Surface training t=28716, loss=0.041377509012818336\n", - "Surface training t=28717, loss=0.028252776712179184\n", - "Surface training t=28718, loss=0.032028255984187126\n", - "Surface training t=28719, loss=0.030292819254100323\n", - "Surface training t=28720, loss=0.03742188960313797\n", - "Surface training t=28721, loss=0.0301689263433218\n", - "Surface training t=28722, loss=0.036402798257768154\n", - "Surface training t=28723, loss=0.03345274366438389\n", - "Surface training t=28724, loss=0.025235358625650406\n", - "Surface training t=28725, loss=0.02824537828564644\n", - "Surface training t=28726, loss=0.023236708715558052\n", - "Surface training t=28727, loss=0.02742371056228876\n", - "Surface training t=28728, loss=0.029165362007915974\n", - "Surface training t=28729, loss=0.026815312914550304\n", - "Surface training t=28730, loss=0.026194454170763493\n", - "Surface training t=28731, loss=0.023146926425397396\n", - "Surface training t=28732, loss=0.01941586285829544\n", - "Surface training t=28733, loss=0.017556891310960054\n", - "Surface training t=28734, loss=0.016780005767941475\n", - "Surface training t=28735, loss=0.021989237517118454\n", - "Surface training t=28736, loss=0.020858964882791042\n", - "Surface training t=28737, loss=0.019291997887194157\n", - "Surface training t=28738, loss=0.016718525905162096\n", - "Surface training t=28739, loss=0.01714777061715722\n", - "Surface training t=28740, loss=0.01849451381713152\n", - "Surface training t=28741, loss=0.016878126189112663\n", - "Surface training t=28742, loss=0.02039286307990551\n", - "Surface training t=28743, loss=0.02072046510875225\n", - "Surface training t=28744, loss=0.020211869850754738\n", - "Surface training t=28745, loss=0.01689011324197054\n", - "Surface training t=28746, loss=0.014835386537015438\n", - "Surface training t=28747, loss=0.02064063586294651\n", - "Surface training t=28748, loss=0.01583193289116025\n", - "Surface training t=28749, loss=0.0154852494597435\n", - "Surface training t=28750, loss=0.01480346405878663\n", - "Surface training t=28751, loss=0.020569725893437862\n", - "Surface training t=28752, loss=0.022198928520083427\n", - "Surface training t=28753, loss=0.03179384209215641\n", - "Surface training t=28754, loss=0.0274944631382823\n", - "Surface training t=28755, loss=0.02343053836375475\n", - "Surface training t=28756, loss=0.027734791859984398\n", - "Surface training t=28757, loss=0.028732172213494778\n", - "Surface training t=28758, loss=0.03046286478638649\n", - "Surface training t=28759, loss=0.03361563477665186\n", - "Surface training t=28760, loss=0.0410242173820734\n", - "Surface training t=28761, loss=0.02831463050097227\n", - "Surface training t=28762, loss=0.02834676019847393\n", - "Surface training t=28763, loss=0.02926216647028923\n", - "Surface training t=28764, loss=0.03174104169011116\n", - "Surface training t=28765, loss=0.024060195311903954\n", - "Surface training t=28766, loss=0.02313469909131527\n", - "Surface training t=28767, loss=0.025773138739168644\n", - "Surface training t=28768, loss=0.02443073969334364\n", - "Surface training t=28769, loss=0.019431869499385357\n", - "Surface training t=28770, loss=0.02854607254266739\n", - "Surface training t=28771, loss=0.023742498829960823\n", - "Surface training t=28772, loss=0.02326162066310644\n", - "Surface training t=28773, loss=0.01744471862912178\n", - "Surface training t=28774, loss=0.020019972696900368\n", - "Surface training t=28775, loss=0.020115974359214306\n", - "Surface training t=28776, loss=0.02164110727608204\n", - "Surface training t=28777, loss=0.017308940645307302\n", - "Surface training t=28778, loss=0.021974864415824413\n", - "Surface training t=28779, loss=0.016820174641907215\n", - "Surface training t=28780, loss=0.019977744668722153\n", - "Surface training t=28781, loss=0.024983711540699005\n", - "Surface training t=28782, loss=0.026373582892119884\n", - "Surface training t=28783, loss=0.02009469550102949\n", - "Surface training t=28784, loss=0.02372752409428358\n", - "Surface training t=28785, loss=0.022310370579361916\n", - "Surface training t=28786, loss=0.019300821237266064\n", - "Surface training t=28787, loss=0.018485432490706444\n", - "Surface training t=28788, loss=0.019559200387448072\n", - "Surface training t=28789, loss=0.01860159169882536\n", - "Surface training t=28790, loss=0.019489172846078873\n", - "Surface training t=28791, loss=0.028230033814907074\n", - "Surface training t=28792, loss=0.021514147520065308\n", - "Surface training t=28793, loss=0.029587802477180958\n", - "Surface training t=28794, loss=0.02923878002911806\n", - "Surface training t=28795, loss=0.024199373088777065\n", - "Surface training t=28796, loss=0.02316349372267723\n", - "Surface training t=28797, loss=0.02184141520410776\n", - "Surface training t=28798, loss=0.0217899177223444\n", - "Surface training t=28799, loss=0.021828167140483856\n", - "Surface training t=28800, loss=0.02061367593705654\n", - "Surface training t=28801, loss=0.02899410855025053\n", - "Surface training t=28802, loss=0.03189657162874937\n", - "Surface training t=28803, loss=0.02810358814895153\n", - "Surface training t=28804, loss=0.02425932139158249\n", - "Surface training t=28805, loss=0.025179386138916016\n", - "Surface training t=28806, loss=0.024387115612626076\n", - "Surface training t=28807, loss=0.024596462957561016\n", - "Surface training t=28808, loss=0.03267845697700977\n", - "Surface training t=28809, loss=0.025857675820589066\n", - "Surface training t=28810, loss=0.02798400167375803\n", - "Surface training t=28811, loss=0.024421782232820988\n", - "Surface training t=28812, loss=0.01823372906073928\n", - "Surface training t=28813, loss=0.029410372488200665\n", - "Surface training t=28814, loss=0.02673538215458393\n", - "Surface training t=28815, loss=0.02424589730799198\n", - "Surface training t=28816, loss=0.01981656439602375\n", - "Surface training t=28817, loss=0.029344402253627777\n", - "Surface training t=28818, loss=0.023557888343930244\n", - "Surface training t=28819, loss=0.029715897515416145\n", - "Surface training t=28820, loss=0.028316041454672813\n", - "Surface training t=28821, loss=0.018699723295867443\n", - "Surface training t=28822, loss=0.017502466216683388\n", - "Surface training t=28823, loss=0.02068777848035097\n", - "Surface training t=28824, loss=0.02141114417463541\n", - "Surface training t=28825, loss=0.016785511281341314\n", - "Surface training t=28826, loss=0.0204333933070302\n", - "Surface training t=28827, loss=0.018437087535858154\n", - "Surface training t=28828, loss=0.014777667354792356\n", - "Surface training t=28829, loss=0.01700802706182003\n", - "Surface training t=28830, loss=0.017881841398775578\n", - "Surface training t=28831, loss=0.02023445814847946\n", - "Surface training t=28832, loss=0.017659086268395185\n", - "Surface training t=28833, loss=0.020959454588592052\n", - "Surface training t=28834, loss=0.01440106425434351\n", - "Surface training t=28835, loss=0.019528976641595364\n", - "Surface training t=28836, loss=0.01632807496935129\n", - "Surface training t=28837, loss=0.020274344831705093\n", - "Surface training t=28838, loss=0.026271960698068142\n", - "Surface training t=28839, loss=0.02126418985426426\n", - "Surface training t=28840, loss=0.02788092289119959\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=28841, loss=0.02883092127740383\n", - "Surface training t=28842, loss=0.02198103442788124\n", - "Surface training t=28843, loss=0.02390776202082634\n", - "Surface training t=28844, loss=0.03056465182453394\n", - "Surface training t=28845, loss=0.01875076349824667\n", - "Surface training t=28846, loss=0.021418257616460323\n", - "Surface training t=28847, loss=0.02454329188913107\n", - "Surface training t=28848, loss=0.019630645401775837\n", - "Surface training t=28849, loss=0.019853732082992792\n", - "Surface training t=28850, loss=0.018399087712168694\n", - "Surface training t=28851, loss=0.017840920016169548\n", - "Surface training t=28852, loss=0.02012085821479559\n", - "Surface training t=28853, loss=0.02268061973154545\n", - "Surface training t=28854, loss=0.019845749251544476\n", - "Surface training t=28855, loss=0.01911738980561495\n", - "Surface training t=28856, loss=0.018829562701284885\n", - "Surface training t=28857, loss=0.018369009718298912\n", - "Surface training t=28858, loss=0.017531585413962603\n", - "Surface training t=28859, loss=0.018977747298777103\n", - "Surface training t=28860, loss=0.019316932186484337\n", - "Surface training t=28861, loss=0.018335613422095776\n", - "Surface training t=28862, loss=0.013581760227680206\n", - "Surface training t=28863, loss=0.015225476119667292\n", - "Surface training t=28864, loss=0.013333756942301989\n", - "Surface training t=28865, loss=0.019730282947421074\n", - "Surface training t=28866, loss=0.018989264965057373\n", - "Surface training t=28867, loss=0.020569021813571453\n", - "Surface training t=28868, loss=0.021701590158045292\n", - "Surface training t=28869, loss=0.023320446722209454\n", - "Surface training t=28870, loss=0.022225501015782356\n", - "Surface training t=28871, loss=0.017574114724993706\n", - "Surface training t=28872, loss=0.020734231919050217\n", - "Surface training t=28873, loss=0.021418355405330658\n", - "Surface training t=28874, loss=0.02087731473147869\n", - "Surface training t=28875, loss=0.01700648618862033\n", - "Surface training t=28876, loss=0.0178216565400362\n", - "Surface training t=28877, loss=0.019608101807534695\n", - "Surface training t=28878, loss=0.018233872018754482\n", - "Surface training t=28879, loss=0.022477074526250362\n", - "Surface training t=28880, loss=0.024627002887427807\n", - "Surface training t=28881, loss=0.02549657318741083\n", - "Surface training t=28882, loss=0.0265802089124918\n", - "Surface training t=28883, loss=0.02933629509061575\n", - "Surface training t=28884, loss=0.024458744563162327\n", - "Surface training t=28885, loss=0.032593647949397564\n", - "Surface training t=28886, loss=0.026801890693604946\n", - "Surface training t=28887, loss=0.024499036371707916\n", - "Surface training t=28888, loss=0.025163549929857254\n", - "Surface training t=28889, loss=0.02502670604735613\n", - "Surface training t=28890, loss=0.0219503752887249\n", - "Surface training t=28891, loss=0.01700450386852026\n", - "Surface training t=28892, loss=0.021062754094600677\n", - "Surface training t=28893, loss=0.014598532114177942\n", - "Surface training t=28894, loss=0.017580529674887657\n", - "Surface training t=28895, loss=0.01952362898737192\n", - "Surface training t=28896, loss=0.015656289644539356\n", - "Surface training t=28897, loss=0.020536082796752453\n", - "Surface training t=28898, loss=0.02243290189653635\n", - "Surface training t=28899, loss=0.02904733270406723\n", - "Surface training t=28900, loss=0.022120478563010693\n", - "Surface training t=28901, loss=0.019015426747500896\n", - "Surface training t=28902, loss=0.018364201299846172\n", - "Surface training t=28903, loss=0.017638548277318478\n", - "Surface training t=28904, loss=0.022710010409355164\n", - "Surface training t=28905, loss=0.022225075401365757\n", - "Surface training t=28906, loss=0.01970438566058874\n", - "Surface training t=28907, loss=0.018076550215482712\n", - "Surface training t=28908, loss=0.017382608260959387\n", - "Surface training t=28909, loss=0.025586330331861973\n", - "Surface training t=28910, loss=0.023189784958958626\n", - "Surface training t=28911, loss=0.02021990856155753\n", - "Surface training t=28912, loss=0.018600427079945803\n", - "Surface training t=28913, loss=0.021168794482946396\n", - "Surface training t=28914, loss=0.01643923856317997\n", - "Surface training t=28915, loss=0.017932779155671597\n", - "Surface training t=28916, loss=0.024776059202849865\n", - "Surface training t=28917, loss=0.02629140019416809\n", - "Surface training t=28918, loss=0.021150107495486736\n", - "Surface training t=28919, loss=0.02497540321201086\n", - "Surface training t=28920, loss=0.026752776466310024\n", - "Surface training t=28921, loss=0.02602919563651085\n", - "Surface training t=28922, loss=0.029367961920797825\n", - "Surface training t=28923, loss=0.026333915069699287\n", - "Surface training t=28924, loss=0.022028453648090363\n", - "Surface training t=28925, loss=0.024364996701478958\n", - "Surface training t=28926, loss=0.02450737915933132\n", - "Surface training t=28927, loss=0.03398304991424084\n", - "Surface training t=28928, loss=0.03133765794336796\n", - "Surface training t=28929, loss=0.029041379690170288\n", - "Surface training t=28930, loss=0.023653510957956314\n", - "Surface training t=28931, loss=0.02519781980663538\n", - "Surface training t=28932, loss=0.0234657796099782\n", - "Surface training t=28933, loss=0.021364656277000904\n", - "Surface training t=28934, loss=0.029622036963701248\n", - "Surface training t=28935, loss=0.0205538934096694\n", - "Surface training t=28936, loss=0.024086695164442062\n", - "Surface training t=28937, loss=0.04869600757956505\n", - "Surface training t=28938, loss=0.031812150962650776\n", - "Surface training t=28939, loss=0.04332895949482918\n", - "Surface training t=28940, loss=0.038320668041706085\n", - "Surface training t=28941, loss=0.026205839589238167\n", - "Surface training t=28942, loss=0.028117189183831215\n", - "Surface training t=28943, loss=0.02854194026440382\n", - "Surface training t=28944, loss=0.045369165018200874\n", - "Surface training t=28945, loss=0.02915758080780506\n", - "Surface training t=28946, loss=0.03483576141297817\n", - "Surface training t=28947, loss=0.02446893509477377\n", - "Surface training t=28948, loss=0.016818768810480833\n", - "Surface training t=28949, loss=0.02050791308283806\n", - "Surface training t=28950, loss=0.018188880756497383\n", - "Surface training t=28951, loss=0.01732589164748788\n", - "Surface training t=28952, loss=0.01742196176201105\n", - "Surface training t=28953, loss=0.014187021180987358\n", - "Surface training t=28954, loss=0.013462575618177652\n", - "Surface training t=28955, loss=0.01559570711106062\n", - "Surface training t=28956, loss=0.01366403279826045\n", - "Surface training t=28957, loss=0.014441362116485834\n", - "Surface training t=28958, loss=0.01651781238615513\n", - "Surface training t=28959, loss=0.015161244198679924\n", - "Surface training t=28960, loss=0.018048147205263376\n", - "Surface training t=28961, loss=0.016576132737100124\n", - "Surface training t=28962, loss=0.02018397720530629\n", - "Surface training t=28963, loss=0.0205221064388752\n", - "Surface training t=28964, loss=0.024473993107676506\n", - "Surface training t=28965, loss=0.021146065555512905\n", - "Surface training t=28966, loss=0.02319229021668434\n", - "Surface training t=28967, loss=0.02831050008535385\n", - "Surface training t=28968, loss=0.021363914478570223\n", - "Surface training t=28969, loss=0.03179134614765644\n", - "Surface training t=28970, loss=0.028797121718525887\n", - "Surface training t=28971, loss=0.026906350627541542\n", - "Surface training t=28972, loss=0.02887977473437786\n", - "Surface training t=28973, loss=0.023250059224665165\n", - "Surface training t=28974, loss=0.03166938293725252\n", - "Surface training t=28975, loss=0.028340721502900124\n", - "Surface training t=28976, loss=0.028005415573716164\n", - "Surface training t=28977, loss=0.025534235406666994\n", - "Surface training t=28978, loss=0.029161717742681503\n", - "Surface training t=28979, loss=0.042498474940657616\n", - "Surface training t=28980, loss=0.028495926409959793\n", - "Surface training t=28981, loss=0.034326525405049324\n", - "Surface training t=28982, loss=0.03026292659342289\n", - "Surface training t=28983, loss=0.03169596567749977\n", - "Surface training t=28984, loss=0.02642642054706812\n", - "Surface training t=28985, loss=0.02460915595293045\n", - "Surface training t=28986, loss=0.023667446337640285\n", - "Surface training t=28987, loss=0.018627166748046875\n", - "Surface training t=28988, loss=0.016595509834587574\n", - "Surface training t=28989, loss=0.020105899311602116\n", - "Surface training t=28990, loss=0.020076075568795204\n", - "Surface training t=28991, loss=0.01841938216239214\n", - "Surface training t=28992, loss=0.01878813561052084\n", - "Surface training t=28993, loss=0.016288040205836296\n", - "Surface training t=28994, loss=0.019725517369806767\n", - "Surface training t=28995, loss=0.02965123299509287\n", - "Surface training t=28996, loss=0.024747767485678196\n", - "Surface training t=28997, loss=0.021781429648399353\n", - "Surface training t=28998, loss=0.022513565607368946\n", - "Surface training t=28999, loss=0.027518387883901596\n", - "Surface training t=29000, loss=0.02601041179150343\n", - "Surface training t=29001, loss=0.021231855265796185\n", - "Surface training t=29002, loss=0.021035823971033096\n", - "Surface training t=29003, loss=0.023322278633713722\n", - "Surface training t=29004, loss=0.021750114858150482\n", - "Surface training t=29005, loss=0.020877010188996792\n", - "Surface training t=29006, loss=0.021619701758027077\n", - "Surface training t=29007, loss=0.023700899444520473\n", - "Surface training t=29008, loss=0.022101880982518196\n", - "Surface training t=29009, loss=0.02399952244013548\n", - "Surface training t=29010, loss=0.018945744261145592\n", - "Surface training t=29011, loss=0.025493111461400986\n", - "Surface training t=29012, loss=0.021821946837008\n", - "Surface training t=29013, loss=0.025528921745717525\n", - "Surface training t=29014, loss=0.022250334732234478\n", - "Surface training t=29015, loss=0.020716868340969086\n", - "Surface training t=29016, loss=0.016701930202543736\n", - "Surface training t=29017, loss=0.017323646694421768\n", - "Surface training t=29018, loss=0.019664826802909374\n", - "Surface training t=29019, loss=0.014055895619094372\n", - "Surface training t=29020, loss=0.019447033759206533\n", - "Surface training t=29021, loss=0.013848271686583757\n", - "Surface training t=29022, loss=0.018778609111905098\n", - "Surface training t=29023, loss=0.01968843024224043\n", - "Surface training t=29024, loss=0.020904745906591415\n", - "Surface training t=29025, loss=0.019562757574021816\n", - "Surface training t=29026, loss=0.018358048051595688\n", - "Surface training t=29027, loss=0.018602230586111546\n", - "Surface training t=29028, loss=0.02153825107961893\n", - "Surface training t=29029, loss=0.021220793947577477\n", - "Surface training t=29030, loss=0.022448874078691006\n", - "Surface training t=29031, loss=0.02965468540787697\n", - "Surface training t=29032, loss=0.02786991186439991\n", - "Surface training t=29033, loss=0.02151462994515896\n", - "Surface training t=29034, loss=0.022925357334315777\n", - "Surface training t=29035, loss=0.023618855513632298\n", - "Surface training t=29036, loss=0.02438303269445896\n", - "Surface training t=29037, loss=0.02121406514197588\n", - "Surface training t=29038, loss=0.02392054721713066\n", - "Surface training t=29039, loss=0.02198473922908306\n", - "Surface training t=29040, loss=0.019114630296826363\n", - "Surface training t=29041, loss=0.019874799996614456\n", - "Surface training t=29042, loss=0.01971979159861803\n", - "Surface training t=29043, loss=0.024318194948136806\n", - "Surface training t=29044, loss=0.02373883780092001\n", - "Surface training t=29045, loss=0.022397511638700962\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=29046, loss=0.02365478314459324\n", - "Surface training t=29047, loss=0.019000920467078686\n", - "Surface training t=29048, loss=0.016217736061662436\n", - "Surface training t=29049, loss=0.01839001104235649\n", - "Surface training t=29050, loss=0.016377100721001625\n", - "Surface training t=29051, loss=0.014949609525501728\n", - "Surface training t=29052, loss=0.014391777571290731\n", - "Surface training t=29053, loss=0.01698257215321064\n", - "Surface training t=29054, loss=0.01973069179803133\n", - "Surface training t=29055, loss=0.019637588411569595\n", - "Surface training t=29056, loss=0.020920664072036743\n", - "Surface training t=29057, loss=0.018585271202027798\n", - "Surface training t=29058, loss=0.015574693214148283\n", - "Surface training t=29059, loss=0.01574795925989747\n", - "Surface training t=29060, loss=0.017517096363008022\n", - "Surface training t=29061, loss=0.027605675160884857\n", - "Surface training t=29062, loss=0.041939616203308105\n", - "Surface training t=29063, loss=0.030446985736489296\n", - "Surface training t=29064, loss=0.0289367763325572\n", - "Surface training t=29065, loss=0.030250038020312786\n", - "Surface training t=29066, loss=0.027966447174549103\n", - "Surface training t=29067, loss=0.03712089918553829\n", - "Surface training t=29068, loss=0.02530212700366974\n", - "Surface training t=29069, loss=0.031778546050190926\n", - "Surface training t=29070, loss=0.028310182504355907\n", - "Surface training t=29071, loss=0.03136526979506016\n", - "Surface training t=29072, loss=0.02518876362591982\n", - "Surface training t=29073, loss=0.025934044271707535\n", - "Surface training t=29074, loss=0.025129356421530247\n", - "Surface training t=29075, loss=0.024759688414633274\n", - "Surface training t=29076, loss=0.023445624858140945\n", - "Surface training t=29077, loss=0.025275157764554024\n", - "Surface training t=29078, loss=0.030595588497817516\n", - "Surface training t=29079, loss=0.018973620608448982\n", - "Surface training t=29080, loss=0.02272775210440159\n", - "Surface training t=29081, loss=0.01890822034329176\n", - "Surface training t=29082, loss=0.018763805739581585\n", - "Surface training t=29083, loss=0.0198887400329113\n", - "Surface training t=29084, loss=0.013873269781470299\n", - "Surface training t=29085, loss=0.016722016036510468\n", - "Surface training t=29086, loss=0.016068466007709503\n", - "Surface training t=29087, loss=0.018215368036180735\n", - "Surface training t=29088, loss=0.02239479124546051\n", - "Surface training t=29089, loss=0.02291128132492304\n", - "Surface training t=29090, loss=0.030494517646729946\n", - "Surface training t=29091, loss=0.02865348570048809\n", - "Surface training t=29092, loss=0.040721019729971886\n", - "Surface training t=29093, loss=0.0373959019780159\n", - "Surface training t=29094, loss=0.031535626389086246\n", - "Surface training t=29095, loss=0.023177453316748142\n", - "Surface training t=29096, loss=0.025034231133759022\n", - "Surface training t=29097, loss=0.038260262459516525\n", - "Surface training t=29098, loss=0.02607919368892908\n", - "Surface training t=29099, loss=0.026672638952732086\n", - "Surface training t=29100, loss=0.026893381029367447\n", - "Surface training t=29101, loss=0.0267958901822567\n", - "Surface training t=29102, loss=0.031211095862090588\n", - "Surface training t=29103, loss=0.023375158198177814\n", - "Surface training t=29104, loss=0.01526568178087473\n", - "Surface training t=29105, loss=0.020774400793015957\n", - "Surface training t=29106, loss=0.0155120138078928\n", - "Surface training t=29107, loss=0.013624104205518961\n", - "Surface training t=29108, loss=0.017780032940208912\n", - "Surface training t=29109, loss=0.01646027248352766\n", - "Surface training t=29110, loss=0.020263610407710075\n", - "Surface training t=29111, loss=0.019472203217446804\n", - "Surface training t=29112, loss=0.014038304798305035\n", - "Surface training t=29113, loss=0.023983566090464592\n", - "Surface training t=29114, loss=0.024921216070652008\n", - "Surface training t=29115, loss=0.02679633069783449\n", - "Surface training t=29116, loss=0.029677349142730236\n", - "Surface training t=29117, loss=0.027380717918276787\n", - "Surface training t=29118, loss=0.03638037107884884\n", - "Surface training t=29119, loss=0.025472650304436684\n", - "Surface training t=29120, loss=0.022736193612217903\n", - "Surface training t=29121, loss=0.02303934656083584\n", - "Surface training t=29122, loss=0.02412707917392254\n", - "Surface training t=29123, loss=0.0209156246855855\n", - "Surface training t=29124, loss=0.015140050556510687\n", - "Surface training t=29125, loss=0.020905494689941406\n", - "Surface training t=29126, loss=0.017991550732403994\n", - "Surface training t=29127, loss=0.01971312239766121\n", - "Surface training t=29128, loss=0.027680118568241596\n", - "Surface training t=29129, loss=0.024545645341277122\n", - "Surface training t=29130, loss=0.024562422186136246\n", - "Surface training t=29131, loss=0.019870370626449585\n", - "Surface training t=29132, loss=0.02242872677743435\n", - "Surface training t=29133, loss=0.017490342259407043\n", - "Surface training t=29134, loss=0.018191833049058914\n", - "Surface training t=29135, loss=0.015123885124921799\n", - "Surface training t=29136, loss=0.01972009427845478\n", - "Surface training t=29137, loss=0.018441757187247276\n", - "Surface training t=29138, loss=0.023781546391546726\n", - "Surface training t=29139, loss=0.02267547557130456\n", - "Surface training t=29140, loss=0.04036393202841282\n", - "Surface training t=29141, loss=0.025511451065540314\n", - "Surface training t=29142, loss=0.02549172844737768\n", - "Surface training t=29143, loss=0.02379641029983759\n", - "Surface training t=29144, loss=0.020553006790578365\n", - "Surface training t=29145, loss=0.03043218143284321\n", - "Surface training t=29146, loss=0.029002659022808075\n", - "Surface training t=29147, loss=0.031513637863099575\n", - "Surface training t=29148, loss=0.020550472661852837\n", - "Surface training t=29149, loss=0.02111596893519163\n", - "Surface training t=29150, loss=0.020428799092769623\n", - "Surface training t=29151, loss=0.016455935779958963\n", - "Surface training t=29152, loss=0.01994434092193842\n", - "Surface training t=29153, loss=0.01939000701531768\n", - "Surface training t=29154, loss=0.030014317482709885\n", - "Surface training t=29155, loss=0.018613822758197784\n", - "Surface training t=29156, loss=0.03286219108849764\n", - "Surface training t=29157, loss=0.02469373494386673\n", - "Surface training t=29158, loss=0.030068861320614815\n", - "Surface training t=29159, loss=0.031307765282690525\n", - "Surface training t=29160, loss=0.02456492930650711\n", - "Surface training t=29161, loss=0.023273021914064884\n", - "Surface training t=29162, loss=0.01840691128745675\n", - "Surface training t=29163, loss=0.01964744133874774\n", - "Surface training t=29164, loss=0.017116663977503777\n", - "Surface training t=29165, loss=0.019438568502664566\n", - "Surface training t=29166, loss=0.01942500565201044\n", - "Surface training t=29167, loss=0.026316150091588497\n", - "Surface training t=29168, loss=0.021960845217108727\n", - "Surface training t=29169, loss=0.037206023931503296\n", - "Surface training t=29170, loss=0.023588016629219055\n", - "Surface training t=29171, loss=0.02397868037223816\n", - "Surface training t=29172, loss=0.018405282869935036\n", - "Surface training t=29173, loss=0.01898869313299656\n", - "Surface training t=29174, loss=0.017798809334635735\n", - "Surface training t=29175, loss=0.01959795318543911\n", - "Surface training t=29176, loss=0.019226194359362125\n", - "Surface training t=29177, loss=0.01778280781581998\n", - "Surface training t=29178, loss=0.022001356817781925\n", - "Surface training t=29179, loss=0.021079701371490955\n", - "Surface training t=29180, loss=0.022535481490194798\n", - "Surface training t=29181, loss=0.023041159845888615\n", - "Surface training t=29182, loss=0.017192560248076916\n", - "Surface training t=29183, loss=0.020600805059075356\n", - "Surface training t=29184, loss=0.020657140761613846\n", - "Surface training t=29185, loss=0.018676089122891426\n", - "Surface training t=29186, loss=0.02421855367720127\n", - "Surface training t=29187, loss=0.019537806510925293\n", - "Surface training t=29188, loss=0.022753284312784672\n", - "Surface training t=29189, loss=0.02269265428185463\n", - "Surface training t=29190, loss=0.02129492722451687\n", - "Surface training t=29191, loss=0.023215893656015396\n", - "Surface training t=29192, loss=0.019843578338623047\n", - "Surface training t=29193, loss=0.018458771519362926\n", - "Surface training t=29194, loss=0.020084106363356113\n", - "Surface training t=29195, loss=0.018168329261243343\n", - "Surface training t=29196, loss=0.01645067147910595\n", - "Surface training t=29197, loss=0.014021266251802444\n", - "Surface training t=29198, loss=0.018999028019607067\n", - "Surface training t=29199, loss=0.014657091349363327\n", - "Surface training t=29200, loss=0.020985567942261696\n", - "Surface training t=29201, loss=0.019296150654554367\n", - "Surface training t=29202, loss=0.015884382650256157\n", - "Surface training t=29203, loss=0.020510658621788025\n", - "Surface training t=29204, loss=0.022551078349351883\n", - "Surface training t=29205, loss=0.027913900092244148\n", - "Surface training t=29206, loss=0.022670513950288296\n", - "Surface training t=29207, loss=0.022257410921156406\n", - "Surface training t=29208, loss=0.01989260222762823\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=29209, loss=0.02028017397969961\n", - "Surface training t=29210, loss=0.020237069576978683\n", - "Surface training t=29211, loss=0.018841424956917763\n", - "Surface training t=29212, loss=0.01623879000544548\n", - "Surface training t=29213, loss=0.013659941963851452\n", - "Surface training t=29214, loss=0.01849756482988596\n", - "Surface training t=29215, loss=0.021340693347156048\n", - "Surface training t=29216, loss=0.014382253400981426\n", - "Surface training t=29217, loss=0.020169914700090885\n", - "Surface training t=29218, loss=0.01980801671743393\n", - "Surface training t=29219, loss=0.02505636401474476\n", - "Surface training t=29220, loss=0.023336791433393955\n", - "Surface training t=29221, loss=0.026109951548278332\n", - "Surface training t=29222, loss=0.023057502694427967\n", - "Surface training t=29223, loss=0.021965505555272102\n", - "Surface training t=29224, loss=0.02689413633197546\n", - "Surface training t=29225, loss=0.018129597418010235\n", - "Surface training t=29226, loss=0.028236418962478638\n", - "Surface training t=29227, loss=0.021503658033907413\n", - "Surface training t=29228, loss=0.019109779968857765\n", - "Surface training t=29229, loss=0.016900899820029736\n", - "Surface training t=29230, loss=0.01902852300554514\n", - "Surface training t=29231, loss=0.018580819480121136\n", - "Surface training t=29232, loss=0.01950472593307495\n", - "Surface training t=29233, loss=0.017330854199826717\n", - "Surface training t=29234, loss=0.02372224908322096\n", - "Surface training t=29235, loss=0.031132232397794724\n", - "Surface training t=29236, loss=0.022282726131379604\n", - "Surface training t=29237, loss=0.025475237518548965\n", - "Surface training t=29238, loss=0.02886307705193758\n", - "Surface training t=29239, loss=0.023722671903669834\n", - "Surface training t=29240, loss=0.024788102135062218\n", - "Surface training t=29241, loss=0.02905537374317646\n", - "Surface training t=29242, loss=0.022905477322638035\n", - "Surface training t=29243, loss=0.024247776716947556\n", - "Surface training t=29244, loss=0.01874797698110342\n", - "Surface training t=29245, loss=0.019943115301430225\n", - "Surface training t=29246, loss=0.020948332734405994\n", - "Surface training t=29247, loss=0.021441608667373657\n", - "Surface training t=29248, loss=0.025065426714718342\n", - "Surface training t=29249, loss=0.028221272863447666\n", - "Surface training t=29250, loss=0.02639345731586218\n", - "Surface training t=29251, loss=0.03278641030192375\n", - "Surface training t=29252, loss=0.02747097983956337\n", - "Surface training t=29253, loss=0.024299509823322296\n", - "Surface training t=29254, loss=0.017981605604290962\n", - "Surface training t=29255, loss=0.013690900057554245\n", - "Surface training t=29256, loss=0.01294645806774497\n", - "Surface training t=29257, loss=0.017528342083096504\n", - "Surface training t=29258, loss=0.017040971666574478\n", - "Surface training t=29259, loss=0.015492101199924946\n", - "Surface training t=29260, loss=0.021295467391610146\n", - "Surface training t=29261, loss=0.019248931668698788\n", - "Surface training t=29262, loss=0.017660814337432384\n", - "Surface training t=29263, loss=0.020385310985147953\n", - "Surface training t=29264, loss=0.025747723877429962\n", - "Surface training t=29265, loss=0.045113204047083855\n", - "Surface training t=29266, loss=0.03633182682096958\n", - "Surface training t=29267, loss=0.03886093385517597\n", - "Surface training t=29268, loss=0.05038223974406719\n", - "Surface training t=29269, loss=0.03262940235435963\n", - "Surface training t=29270, loss=0.02717206533998251\n", - "Surface training t=29271, loss=0.03855464793741703\n", - "Surface training t=29272, loss=0.02835122775286436\n", - "Surface training t=29273, loss=0.02457977831363678\n", - "Surface training t=29274, loss=0.0404912494122982\n", - "Surface training t=29275, loss=0.02791808918118477\n", - "Surface training t=29276, loss=0.02052509319037199\n", - "Surface training t=29277, loss=0.02311832085251808\n", - "Surface training t=29278, loss=0.018979868851602077\n", - "Surface training t=29279, loss=0.024195742793381214\n", - "Surface training t=29280, loss=0.02151592541486025\n", - "Surface training t=29281, loss=0.018146363086998463\n", - "Surface training t=29282, loss=0.020632341504096985\n", - "Surface training t=29283, loss=0.018075432628393173\n", - "Surface training t=29284, loss=0.02723690029233694\n", - "Surface training t=29285, loss=0.0210111141204834\n", - "Surface training t=29286, loss=0.02050198893994093\n", - "Surface training t=29287, loss=0.022049177438020706\n", - "Surface training t=29288, loss=0.024195008911192417\n", - "Surface training t=29289, loss=0.031491803005337715\n", - "Surface training t=29290, loss=0.02928187232464552\n", - "Surface training t=29291, loss=0.022766555659472942\n", - "Surface training t=29292, loss=0.024096111766994\n", - "Surface training t=29293, loss=0.022281189914792776\n", - "Surface training t=29294, loss=0.018541046418249607\n", - "Surface training t=29295, loss=0.015765012241899967\n", - "Surface training t=29296, loss=0.01497985003516078\n", - "Surface training t=29297, loss=0.023968957364559174\n", - "Surface training t=29298, loss=0.018437738064676523\n", - "Surface training t=29299, loss=0.017441716976463795\n", - "Surface training t=29300, loss=0.017258302308619022\n", - "Surface training t=29301, loss=0.021580509841442108\n", - "Surface training t=29302, loss=0.022159661166369915\n", - "Surface training t=29303, loss=0.023492394015192986\n", - "Surface training t=29304, loss=0.020599202252924442\n", - "Surface training t=29305, loss=0.02138794120401144\n", - "Surface training t=29306, loss=0.018887692131102085\n", - "Surface training t=29307, loss=0.020644099451601505\n", - "Surface training t=29308, loss=0.02087893709540367\n", - "Surface training t=29309, loss=0.02240639179944992\n", - "Surface training t=29310, loss=0.022672610357403755\n", - "Surface training t=29311, loss=0.02765767090022564\n", - "Surface training t=29312, loss=0.02190059283748269\n", - "Surface training t=29313, loss=0.015846506226807833\n", - "Surface training t=29314, loss=0.02502893004566431\n", - "Surface training t=29315, loss=0.022942733950912952\n", - "Surface training t=29316, loss=0.030611030757427216\n", - "Surface training t=29317, loss=0.03171465918421745\n", - "Surface training t=29318, loss=0.02372313104569912\n", - "Surface training t=29319, loss=0.026660412549972534\n", - "Surface training t=29320, loss=0.03181570954620838\n", - "Surface training t=29321, loss=0.029494822025299072\n", - "Surface training t=29322, loss=0.026431843638420105\n", - "Surface training t=29323, loss=0.028689892031252384\n", - "Surface training t=29324, loss=0.03692781552672386\n", - "Surface training t=29325, loss=0.0292420806363225\n", - "Surface training t=29326, loss=0.026006408035755157\n", - "Surface training t=29327, loss=0.02711578831076622\n", - "Surface training t=29328, loss=0.021251468919217587\n", - "Surface training t=29329, loss=0.017630507238209248\n", - "Surface training t=29330, loss=0.019691821187734604\n", - "Surface training t=29331, loss=0.014388163108378649\n", - "Surface training t=29332, loss=0.015401257667690516\n", - "Surface training t=29333, loss=0.017178313340991735\n", - "Surface training t=29334, loss=0.020154439844191074\n", - "Surface training t=29335, loss=0.01654163096100092\n", - "Surface training t=29336, loss=0.01931009255349636\n", - "Surface training t=29337, loss=0.021685520187020302\n", - "Surface training t=29338, loss=0.022712526842951775\n", - "Surface training t=29339, loss=0.02255096472799778\n", - "Surface training t=29340, loss=0.02056992705911398\n", - "Surface training t=29341, loss=0.02637281734496355\n", - "Surface training t=29342, loss=0.021924849599599838\n", - "Surface training t=29343, loss=0.01893447618931532\n", - "Surface training t=29344, loss=0.01799632143229246\n", - "Surface training t=29345, loss=0.02570034097880125\n", - "Surface training t=29346, loss=0.026208776980638504\n", - "Surface training t=29347, loss=0.016478739213198423\n", - "Surface training t=29348, loss=0.019847498275339603\n", - "Surface training t=29349, loss=0.021129991859197617\n", - "Surface training t=29350, loss=0.020673324819654226\n", - "Surface training t=29351, loss=0.02670413814485073\n", - "Surface training t=29352, loss=0.02008654922246933\n", - "Surface training t=29353, loss=0.01706905709579587\n", - "Surface training t=29354, loss=0.014661320019513369\n", - "Surface training t=29355, loss=0.01674505230039358\n", - "Surface training t=29356, loss=0.01796823600307107\n", - "Surface training t=29357, loss=0.018027547746896744\n", - "Surface training t=29358, loss=0.019483771175146103\n", - "Surface training t=29359, loss=0.02215210348367691\n", - "Surface training t=29360, loss=0.021011849865317345\n", - "Surface training t=29361, loss=0.021571194753050804\n", - "Surface training t=29362, loss=0.02106405608355999\n", - "Surface training t=29363, loss=0.02752532623708248\n", - "Surface training t=29364, loss=0.024394833482801914\n", - "Surface training t=29365, loss=0.023987092077732086\n", - "Surface training t=29366, loss=0.020378471352159977\n", - "Surface training t=29367, loss=0.01730310544371605\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=29368, loss=0.019583331421017647\n", - "Surface training t=29369, loss=0.019394539762288332\n", - "Surface training t=29370, loss=0.0166319552809\n", - "Surface training t=29371, loss=0.02518338803201914\n", - "Surface training t=29372, loss=0.022376442328095436\n", - "Surface training t=29373, loss=0.031769792549312115\n", - "Surface training t=29374, loss=0.026970344595611095\n", - "Surface training t=29375, loss=0.02402542717754841\n", - "Surface training t=29376, loss=0.031439694575965405\n", - "Surface training t=29377, loss=0.02417639084160328\n", - "Surface training t=29378, loss=0.02395413164049387\n", - "Surface training t=29379, loss=0.028180010616779327\n", - "Surface training t=29380, loss=0.021694854833185673\n", - "Surface training t=29381, loss=0.018867562524974346\n", - "Surface training t=29382, loss=0.017978125251829624\n", - "Surface training t=29383, loss=0.018406430259346962\n", - "Surface training t=29384, loss=0.013503322377800941\n", - "Surface training t=29385, loss=0.015190440230071545\n", - "Surface training t=29386, loss=0.014592408668249846\n", - "Surface training t=29387, loss=0.018536150455474854\n", - "Surface training t=29388, loss=0.013827044051140547\n", - "Surface training t=29389, loss=0.019632547162473202\n", - "Surface training t=29390, loss=0.020400664769113064\n", - "Surface training t=29391, loss=0.019324022345244884\n", - "Surface training t=29392, loss=0.021567024290561676\n", - "Surface training t=29393, loss=0.02208765083923936\n", - "Surface training t=29394, loss=0.023997562937438488\n", - "Surface training t=29395, loss=0.026177686639130116\n", - "Surface training t=29396, loss=0.021909545175731182\n", - "Surface training t=29397, loss=0.02401138376444578\n", - "Surface training t=29398, loss=0.0404724795371294\n", - "Surface training t=29399, loss=0.030462765134871006\n", - "Surface training t=29400, loss=0.027183149941265583\n", - "Surface training t=29401, loss=0.02951975166797638\n", - "Surface training t=29402, loss=0.03385128080844879\n", - "Surface training t=29403, loss=0.03221687488257885\n", - "Surface training t=29404, loss=0.028475367464125156\n", - "Surface training t=29405, loss=0.02739800699055195\n", - "Surface training t=29406, loss=0.03414727374911308\n", - "Surface training t=29407, loss=0.027074034325778484\n", - "Surface training t=29408, loss=0.035030593164265156\n", - "Surface training t=29409, loss=0.02430464792996645\n", - "Surface training t=29410, loss=0.019341975916177034\n", - "Surface training t=29411, loss=0.015602043364197016\n", - "Surface training t=29412, loss=0.014371404889971018\n", - "Surface training t=29413, loss=0.01681789569556713\n", - "Surface training t=29414, loss=0.012915321625769138\n", - "Surface training t=29415, loss=0.014116611797362566\n", - "Surface training t=29416, loss=0.015919675584882498\n", - "Surface training t=29417, loss=0.01725784782320261\n", - "Surface training t=29418, loss=0.019672885537147522\n", - "Surface training t=29419, loss=0.0241195410490036\n", - "Surface training t=29420, loss=0.03107471950352192\n", - "Surface training t=29421, loss=0.03113140445202589\n", - "Surface training t=29422, loss=0.032946085557341576\n", - "Surface training t=29423, loss=0.03527084458619356\n", - "Surface training t=29424, loss=0.048583053052425385\n", - "Surface training t=29425, loss=0.031590577214956284\n", - "Surface training t=29426, loss=0.03631907142698765\n", - "Surface training t=29427, loss=0.03344922885298729\n", - "Surface training t=29428, loss=0.022930738516151905\n", - "Surface training t=29429, loss=0.017076756805181503\n", - "Surface training t=29430, loss=0.021931233815848827\n", - "Surface training t=29431, loss=0.015717410948127508\n", - "Surface training t=29432, loss=0.018428323790431023\n", - "Surface training t=29433, loss=0.0193688552826643\n", - "Surface training t=29434, loss=0.018753331154584885\n", - "Surface training t=29435, loss=0.016884385608136654\n", - "Surface training t=29436, loss=0.023841767571866512\n", - "Surface training t=29437, loss=0.0367765836417675\n", - "Surface training t=29438, loss=0.031195112504065037\n", - "Surface training t=29439, loss=0.032971800304949284\n", - "Surface training t=29440, loss=0.04029292240738869\n", - "Surface training t=29441, loss=0.02532463427633047\n", - "Surface training t=29442, loss=0.03003362100571394\n", - "Surface training t=29443, loss=0.03138005826622248\n", - "Surface training t=29444, loss=0.04467909410595894\n", - "Surface training t=29445, loss=0.031953610479831696\n", - "Surface training t=29446, loss=0.03888222575187683\n", - "Surface training t=29447, loss=0.02919499482959509\n", - "Surface training t=29448, loss=0.031166321597993374\n", - "Surface training t=29449, loss=0.025690799579024315\n", - "Surface training t=29450, loss=0.02947564795613289\n", - "Surface training t=29451, loss=0.030030897818505764\n", - "Surface training t=29452, loss=0.031788713298738\n", - "Surface training t=29453, loss=0.02934448979794979\n", - "Surface training t=29454, loss=0.023346341215074062\n", - "Surface training t=29455, loss=0.02769977878779173\n", - "Surface training t=29456, loss=0.02111941948533058\n", - "Surface training t=29457, loss=0.02243757527321577\n", - "Surface training t=29458, loss=0.02939139399677515\n", - "Surface training t=29459, loss=0.018665705807507038\n", - "Surface training t=29460, loss=0.01933712512254715\n", - "Surface training t=29461, loss=0.01860786648467183\n", - "Surface training t=29462, loss=0.019889009185135365\n", - "Surface training t=29463, loss=0.03182004485279322\n", - "Surface training t=29464, loss=0.0224622106179595\n", - "Surface training t=29465, loss=0.024221650324761868\n", - "Surface training t=29466, loss=0.022662472911179066\n", - "Surface training t=29467, loss=0.022655491717159748\n", - "Surface training t=29468, loss=0.024885278195142746\n", - "Surface training t=29469, loss=0.02688170224428177\n", - "Surface training t=29470, loss=0.032831630669534206\n", - "Surface training t=29471, loss=0.031546613201498985\n", - "Surface training t=29472, loss=0.02988307923078537\n", - "Surface training t=29473, loss=0.02587086707353592\n", - "Surface training t=29474, loss=0.02764324191957712\n", - "Surface training t=29475, loss=0.028215081430971622\n", - "Surface training t=29476, loss=0.03327199071645737\n", - "Surface training t=29477, loss=0.029089517891407013\n", - "Surface training t=29478, loss=0.03980428911745548\n", - "Surface training t=29479, loss=0.02608107589185238\n", - "Surface training t=29480, loss=0.02490478102117777\n", - "Surface training t=29481, loss=0.028838111087679863\n", - "Surface training t=29482, loss=0.03492502309381962\n", - "Surface training t=29483, loss=0.027312958613038063\n", - "Surface training t=29484, loss=0.021275797858834267\n", - "Surface training t=29485, loss=0.01988876983523369\n", - "Surface training t=29486, loss=0.017097957897931337\n", - "Surface training t=29487, loss=0.020779625978320837\n", - "Surface training t=29488, loss=0.014191565103828907\n", - "Surface training t=29489, loss=0.020086741540580988\n", - "Surface training t=29490, loss=0.013589482754468918\n", - "Surface training t=29491, loss=0.017669321969151497\n", - "Surface training t=29492, loss=0.01603561220690608\n", - "Surface training t=29493, loss=0.012580476235598326\n", - "Surface training t=29494, loss=0.017303123138844967\n", - "Surface training t=29495, loss=0.016309037804603577\n", - "Surface training t=29496, loss=0.02181675285100937\n", - "Surface training t=29497, loss=0.020296700298786163\n", - "Surface training t=29498, loss=0.018416711129248142\n", - "Surface training t=29499, loss=0.016903511248528957\n", - "Surface training t=29500, loss=0.020620360039174557\n", - "Surface training t=29501, loss=0.023600898683071136\n", - "Surface training t=29502, loss=0.021705593913793564\n", - "Surface training t=29503, loss=0.020471067167818546\n", - "Surface training t=29504, loss=0.02166762202978134\n", - "Surface training t=29505, loss=0.017352741211652756\n", - "Surface training t=29506, loss=0.021798133850097656\n", - "Surface training t=29507, loss=0.021737227216362953\n", - "Surface training t=29508, loss=0.016715078614652157\n", - "Surface training t=29509, loss=0.019836957566440105\n", - "Surface training t=29510, loss=0.02274981141090393\n", - "Surface training t=29511, loss=0.019335986115038395\n", - "Surface training t=29512, loss=0.0175213273614645\n", - "Surface training t=29513, loss=0.018229457084089518\n", - "Surface training t=29514, loss=0.019324075430631638\n", - "Surface training t=29515, loss=0.01951152551919222\n", - "Surface training t=29516, loss=0.025622384622693062\n", - "Surface training t=29517, loss=0.017422419972717762\n", - "Surface training t=29518, loss=0.0197078175842762\n", - "Surface training t=29519, loss=0.020047455094754696\n", - "Surface training t=29520, loss=0.020529597997665405\n", - "Surface training t=29521, loss=0.017994807101786137\n", - "Surface training t=29522, loss=0.01986873708665371\n", - "Surface training t=29523, loss=0.02335538249462843\n", - "Surface training t=29524, loss=0.022425963543355465\n", - "Surface training t=29525, loss=0.02968546375632286\n", - "Surface training t=29526, loss=0.020053458400070667\n", - "Surface training t=29527, loss=0.02992229349911213\n", - "Surface training t=29528, loss=0.0223309388384223\n", - "Surface training t=29529, loss=0.027400633320212364\n", - "Surface training t=29530, loss=0.024457491002976894\n", - "Surface training t=29531, loss=0.017720289062708616\n", - "Surface training t=29532, loss=0.017845946364104748\n", - "Surface training t=29533, loss=0.03504388779401779\n", - "Surface training t=29534, loss=0.02644712943583727\n", - "Surface training t=29535, loss=0.025794202461838722\n", - "Surface training t=29536, loss=0.032259054481983185\n", - "Surface training t=29537, loss=0.023616457358002663\n", - "Surface training t=29538, loss=0.033657921478152275\n", - "Surface training t=29539, loss=0.024008145555853844\n", - "Surface training t=29540, loss=0.026354470290243626\n", - "Surface training t=29541, loss=0.023582992143929005\n", - "Surface training t=29542, loss=0.028335070237517357\n", - "Surface training t=29543, loss=0.021920649334788322\n", - "Surface training t=29544, loss=0.025348499417304993\n", - "Surface training t=29545, loss=0.03366624005138874\n", - "Surface training t=29546, loss=0.02339800726622343\n", - "Surface training t=29547, loss=0.03580856695771217\n", - "Surface training t=29548, loss=0.02399979904294014\n", - "Surface training t=29549, loss=0.030684998258948326\n", - "Surface training t=29550, loss=0.028315476141870022\n", - "Surface training t=29551, loss=0.02336817979812622\n", - "Surface training t=29552, loss=0.022250869311392307\n", - "Surface training t=29553, loss=0.024952294304966927\n", - "Surface training t=29554, loss=0.02589479275047779\n", - "Surface training t=29555, loss=0.022311614826321602\n", - "Surface training t=29556, loss=0.03107792790979147\n", - "Surface training t=29557, loss=0.028414483182132244\n", - "Surface training t=29558, loss=0.027178998105227947\n", - "Surface training t=29559, loss=0.026786159723997116\n", - "Surface training t=29560, loss=0.024786902591586113\n", - "Surface training t=29561, loss=0.02555557992309332\n", - "Surface training t=29562, loss=0.035740869119763374\n", - "Surface training t=29563, loss=0.022794298827648163\n", - "Surface training t=29564, loss=0.037112802267074585\n", - "Surface training t=29565, loss=0.02799899782985449\n", - "Surface training t=29566, loss=0.03569687530398369\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=29567, loss=0.02475880552083254\n", - "Surface training t=29568, loss=0.025749681517481804\n", - "Surface training t=29569, loss=0.027271827682852745\n", - "Surface training t=29570, loss=0.02011022437363863\n", - "Surface training t=29571, loss=0.021458139643073082\n", - "Surface training t=29572, loss=0.02654960099607706\n", - "Surface training t=29573, loss=0.024014399386942387\n", - "Surface training t=29574, loss=0.024801072664558887\n", - "Surface training t=29575, loss=0.029582575894892216\n", - "Surface training t=29576, loss=0.02415164466947317\n", - "Surface training t=29577, loss=0.026303276419639587\n", - "Surface training t=29578, loss=0.03741687163710594\n", - "Surface training t=29579, loss=0.022091861814260483\n", - "Surface training t=29580, loss=0.026933244429528713\n", - "Surface training t=29581, loss=0.02378106117248535\n", - "Surface training t=29582, loss=0.03515072353184223\n", - "Surface training t=29583, loss=0.02922656200826168\n", - "Surface training t=29584, loss=0.02729581482708454\n", - "Surface training t=29585, loss=0.022264248691499233\n", - "Surface training t=29586, loss=0.02256679628044367\n", - "Surface training t=29587, loss=0.021777364425361156\n", - "Surface training t=29588, loss=0.023205159232020378\n", - "Surface training t=29589, loss=0.021864193491637707\n", - "Surface training t=29590, loss=0.022179963067173958\n", - "Surface training t=29591, loss=0.01924283057451248\n", - "Surface training t=29592, loss=0.026082657277584076\n", - "Surface training t=29593, loss=0.022666187956929207\n", - "Surface training t=29594, loss=0.029782886616885662\n", - "Surface training t=29595, loss=0.022903243079781532\n", - "Surface training t=29596, loss=0.02119898423552513\n", - "Surface training t=29597, loss=0.021346301771700382\n", - "Surface training t=29598, loss=0.01853455137461424\n", - "Surface training t=29599, loss=0.026257683522999287\n", - "Surface training t=29600, loss=0.018598022405058146\n", - "Surface training t=29601, loss=0.025964653119444847\n", - "Surface training t=29602, loss=0.022004151716828346\n", - "Surface training t=29603, loss=0.021862090565264225\n", - "Surface training t=29604, loss=0.023362922482192516\n", - "Surface training t=29605, loss=0.016468938440084457\n", - "Surface training t=29606, loss=0.017783548682928085\n", - "Surface training t=29607, loss=0.01411726651713252\n", - "Surface training t=29608, loss=0.014458620455116034\n", - "Surface training t=29609, loss=0.017261475324630737\n", - "Surface training t=29610, loss=0.02419565198943019\n", - "Surface training t=29611, loss=0.022871244698762894\n", - "Surface training t=29612, loss=0.030119390226900578\n", - "Surface training t=29613, loss=0.02209140546619892\n", - "Surface training t=29614, loss=0.018272737972438335\n", - "Surface training t=29615, loss=0.017883997410535812\n", - "Surface training t=29616, loss=0.021214899607002735\n", - "Surface training t=29617, loss=0.02050775568932295\n", - "Surface training t=29618, loss=0.023993327282369137\n", - "Surface training t=29619, loss=0.028156431391835213\n", - "Surface training t=29620, loss=0.027623753994703293\n", - "Surface training t=29621, loss=0.04202774539589882\n", - "Surface training t=29622, loss=0.029473312199115753\n", - "Surface training t=29623, loss=0.030000525526702404\n", - "Surface training t=29624, loss=0.02211825642734766\n", - "Surface training t=29625, loss=0.015525564085692167\n", - "Surface training t=29626, loss=0.014226252678781748\n", - "Surface training t=29627, loss=0.019230348989367485\n", - "Surface training t=29628, loss=0.029354729689657688\n", - "Surface training t=29629, loss=0.024400577880442142\n", - "Surface training t=29630, loss=0.025558216497302055\n", - "Surface training t=29631, loss=0.01861862652003765\n", - "Surface training t=29632, loss=0.024203951470553875\n", - "Surface training t=29633, loss=0.022617959417402744\n", - "Surface training t=29634, loss=0.017292147036641836\n", - "Surface training t=29635, loss=0.018560541793704033\n", - "Surface training t=29636, loss=0.016839567571878433\n", - "Surface training t=29637, loss=0.019571200013160706\n", - "Surface training t=29638, loss=0.01864648424088955\n", - "Surface training t=29639, loss=0.016224182210862637\n", - "Surface training t=29640, loss=0.022502263076603413\n", - "Surface training t=29641, loss=0.02109378855675459\n", - "Surface training t=29642, loss=0.021711258217692375\n", - "Surface training t=29643, loss=0.022792129777371883\n", - "Surface training t=29644, loss=0.018778773956000805\n", - "Surface training t=29645, loss=0.015395778696984053\n", - "Surface training t=29646, loss=0.01983144972473383\n", - "Surface training t=29647, loss=0.023535221815109253\n", - "Surface training t=29648, loss=0.01988779567182064\n", - "Surface training t=29649, loss=0.018785260152071714\n", - "Surface training t=29650, loss=0.025851329788565636\n", - "Surface training t=29651, loss=0.02036323957145214\n", - "Surface training t=29652, loss=0.018074178136885166\n", - "Surface training t=29653, loss=0.0163605697453022\n", - "Surface training t=29654, loss=0.018782184924930334\n", - "Surface training t=29655, loss=0.023432280868291855\n", - "Surface training t=29656, loss=0.020849241875112057\n", - "Surface training t=29657, loss=0.017664218321442604\n", - "Surface training t=29658, loss=0.020799074321985245\n", - "Surface training t=29659, loss=0.01663651457056403\n", - "Surface training t=29660, loss=0.016496209893375635\n", - "Surface training t=29661, loss=0.01702385675162077\n", - "Surface training t=29662, loss=0.01453926507383585\n", - "Surface training t=29663, loss=0.012836601585149765\n", - "Surface training t=29664, loss=0.01861500460654497\n", - "Surface training t=29665, loss=0.014850514940917492\n", - "Surface training t=29666, loss=0.01733216503635049\n", - "Surface training t=29667, loss=0.01959361881017685\n", - "Surface training t=29668, loss=0.015564522705972195\n", - "Surface training t=29669, loss=0.016947218216955662\n", - "Surface training t=29670, loss=0.017223251052200794\n", - "Surface training t=29671, loss=0.01917840540409088\n", - "Surface training t=29672, loss=0.019630596041679382\n", - "Surface training t=29673, loss=0.018777587451040745\n", - "Surface training t=29674, loss=0.020675363019108772\n", - "Surface training t=29675, loss=0.01967785693705082\n", - "Surface training t=29676, loss=0.017175479792058468\n", - "Surface training t=29677, loss=0.016369094140827656\n", - "Surface training t=29678, loss=0.02133307233452797\n", - "Surface training t=29679, loss=0.030416259542107582\n", - "Surface training t=29680, loss=0.019442042335867882\n", - "Surface training t=29681, loss=0.019787516444921494\n", - "Surface training t=29682, loss=0.019965705927461386\n", - "Surface training t=29683, loss=0.014823487028479576\n", - "Surface training t=29684, loss=0.01634002709761262\n", - "Surface training t=29685, loss=0.01482740556821227\n", - "Surface training t=29686, loss=0.017588055226951838\n", - "Surface training t=29687, loss=0.016701769083738327\n", - "Surface training t=29688, loss=0.01692149043083191\n", - "Surface training t=29689, loss=0.013840577099472284\n", - "Surface training t=29690, loss=0.017711053602397442\n", - "Surface training t=29691, loss=0.015047397464513779\n", - "Surface training t=29692, loss=0.016697216778993607\n", - "Surface training t=29693, loss=0.014408404473215342\n", - "Surface training t=29694, loss=0.01996171986684203\n", - "Surface training t=29695, loss=0.016629245597869158\n", - "Surface training t=29696, loss=0.01642533764243126\n", - "Surface training t=29697, loss=0.014021879527717829\n", - "Surface training t=29698, loss=0.02031697239726782\n", - "Surface training t=29699, loss=0.023931751027703285\n", - "Surface training t=29700, loss=0.02210702421143651\n", - "Surface training t=29701, loss=0.01987493224442005\n", - "Surface training t=29702, loss=0.015795886516571045\n", - "Surface training t=29703, loss=0.016110908705741167\n", - "Surface training t=29704, loss=0.01890998287126422\n", - "Surface training t=29705, loss=0.015659977681934834\n", - "Surface training t=29706, loss=0.016956143081188202\n", - "Surface training t=29707, loss=0.018176656682044268\n", - "Surface training t=29708, loss=0.017635583877563477\n", - "Surface training t=29709, loss=0.014587538782507181\n", - "Surface training t=29710, loss=0.02498567197471857\n", - "Surface training t=29711, loss=0.020480629056692123\n", - "Surface training t=29712, loss=0.018000999931246042\n", - "Surface training t=29713, loss=0.024383334442973137\n", - "Surface training t=29714, loss=0.023536997847259045\n", - "Surface training t=29715, loss=0.024314158596098423\n", - "Surface training t=29716, loss=0.018718082457780838\n", - "Surface training t=29717, loss=0.02956498321145773\n", - "Surface training t=29718, loss=0.023268534801900387\n", - "Surface training t=29719, loss=0.021544702351093292\n", - "Surface training t=29720, loss=0.021416292525827885\n", - "Surface training t=29721, loss=0.018578249029815197\n", - "Surface training t=29722, loss=0.0205730851739645\n", - "Surface training t=29723, loss=0.02019891981035471\n", - "Surface training t=29724, loss=0.027872641570866108\n", - "Surface training t=29725, loss=0.021472956985235214\n", - "Surface training t=29726, loss=0.019467683508992195\n", - "Surface training t=29727, loss=0.019790716469287872\n", - "Surface training t=29728, loss=0.021712946705520153\n", - "Surface training t=29729, loss=0.019040679559111595\n", - "Surface training t=29730, loss=0.017476605251431465\n", - "Surface training t=29731, loss=0.01804474927484989\n", - "Surface training t=29732, loss=0.021810476668179035\n", - "Surface training t=29733, loss=0.015769347082823515\n", - "Surface training t=29734, loss=0.018455184530466795\n", - "Surface training t=29735, loss=0.01941262185573578\n", - "Surface training t=29736, loss=0.030244327150285244\n", - "Surface training t=29737, loss=0.02086898684501648\n", - "Surface training t=29738, loss=0.023434383794665337\n", - "Surface training t=29739, loss=0.020617444068193436\n", - "Surface training t=29740, loss=0.021015114150941372\n", - "Surface training t=29741, loss=0.021029938012361526\n", - "Surface training t=29742, loss=0.014487390406429768\n", - "Surface training t=29743, loss=0.016313265077769756\n", - "Surface training t=29744, loss=0.017555433325469494\n", - "Surface training t=29745, loss=0.017005237750709057\n", - "Surface training t=29746, loss=0.01943307090550661\n", - "Surface training t=29747, loss=0.01830923091620207\n", - "Surface training t=29748, loss=0.017674684524536133\n", - "Surface training t=29749, loss=0.02199212647974491\n", - "Surface training t=29750, loss=0.01697978051379323\n", - "Surface training t=29751, loss=0.017465852200984955\n", - "Surface training t=29752, loss=0.02186691015958786\n", - "Surface training t=29753, loss=0.02639701310545206\n", - "Surface training t=29754, loss=0.027212275192141533\n", - "Surface training t=29755, loss=0.02876330353319645\n", - "Surface training t=29756, loss=0.0258684316650033\n", - "Surface training t=29757, loss=0.0299200639128685\n", - "Surface training t=29758, loss=0.02748439460992813\n", - "Surface training t=29759, loss=0.025767656974494457\n", - "Surface training t=29760, loss=0.031119368970394135\n", - "Surface training t=29761, loss=0.026126042939722538\n", - "Surface training t=29762, loss=0.022317731752991676\n", - "Surface training t=29763, loss=0.029162914492189884\n", - "Surface training t=29764, loss=0.023656032979488373\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=29765, loss=0.016956055536866188\n", - "Surface training t=29766, loss=0.019558067433536053\n", - "Surface training t=29767, loss=0.023637499660253525\n", - "Surface training t=29768, loss=0.0249646520242095\n", - "Surface training t=29769, loss=0.022520417347550392\n", - "Surface training t=29770, loss=0.02576547395437956\n", - "Surface training t=29771, loss=0.02561144344508648\n", - "Surface training t=29772, loss=0.01382001442834735\n", - "Surface training t=29773, loss=0.018536929972469807\n", - "Surface training t=29774, loss=0.034269120544195175\n", - "Surface training t=29775, loss=0.03775027208030224\n", - "Surface training t=29776, loss=0.030297388322651386\n", - "Surface training t=29777, loss=0.04409445635974407\n", - "Surface training t=29778, loss=0.04274570196866989\n", - "Surface training t=29779, loss=0.03362511470913887\n", - "Surface training t=29780, loss=0.03916196525096893\n", - "Surface training t=29781, loss=0.041422126814723015\n", - "Surface training t=29782, loss=0.031338369473814964\n", - "Surface training t=29783, loss=0.03141450043767691\n", - "Surface training t=29784, loss=0.0323192048817873\n", - "Surface training t=29785, loss=0.03344509843736887\n", - "Surface training t=29786, loss=0.026646245270967484\n", - "Surface training t=29787, loss=0.022895346395671368\n", - "Surface training t=29788, loss=0.025487881153821945\n", - "Surface training t=29789, loss=0.024377775378525257\n", - "Surface training t=29790, loss=0.022395480424165726\n", - "Surface training t=29791, loss=0.026486877351999283\n", - "Surface training t=29792, loss=0.02231714129447937\n", - "Surface training t=29793, loss=0.026445355266332626\n", - "Surface training t=29794, loss=0.03576459735631943\n", - "Surface training t=29795, loss=0.026310067623853683\n", - "Surface training t=29796, loss=0.030548982322216034\n", - "Surface training t=29797, loss=0.02735634706914425\n", - "Surface training t=29798, loss=0.021867597475647926\n", - "Surface training t=29799, loss=0.029501114040613174\n", - "Surface training t=29800, loss=0.022474393248558044\n", - "Surface training t=29801, loss=0.021917909383773804\n", - "Surface training t=29802, loss=0.0236304746940732\n", - "Surface training t=29803, loss=0.027166773565113544\n", - "Surface training t=29804, loss=0.019848919473588467\n", - "Surface training t=29805, loss=0.01635623862966895\n", - "Surface training t=29806, loss=0.017867091111838818\n", - "Surface training t=29807, loss=0.014093933627009392\n", - "Surface training t=29808, loss=0.017371566966176033\n", - "Surface training t=29809, loss=0.020894217304885387\n", - "Surface training t=29810, loss=0.020786705426871777\n", - "Surface training t=29811, loss=0.01723946537822485\n", - "Surface training t=29812, loss=0.014667208306491375\n", - "Surface training t=29813, loss=0.021070796065032482\n", - "Surface training t=29814, loss=0.016498122364282608\n", - "Surface training t=29815, loss=0.019323824904859066\n", - "Surface training t=29816, loss=0.023630964569747448\n", - "Surface training t=29817, loss=0.019100741017609835\n", - "Surface training t=29818, loss=0.0201626168563962\n", - "Surface training t=29819, loss=0.018714705482125282\n", - "Surface training t=29820, loss=0.028260022401809692\n", - "Surface training t=29821, loss=0.024133740924298763\n", - "Surface training t=29822, loss=0.03949212096631527\n", - "Surface training t=29823, loss=0.030551202595233917\n", - "Surface training t=29824, loss=0.028837747871875763\n", - "Surface training t=29825, loss=0.043964486569166183\n", - "Surface training t=29826, loss=0.0315208500251174\n", - "Surface training t=29827, loss=0.043217411264777184\n", - "Surface training t=29828, loss=0.040820635855197906\n", - "Surface training t=29829, loss=0.027639145962893963\n", - "Surface training t=29830, loss=0.03685053251683712\n", - "Surface training t=29831, loss=0.053267670795321465\n", - "Surface training t=29832, loss=0.03338435757905245\n", - "Surface training t=29833, loss=0.034456295892596245\n", - "Surface training t=29834, loss=0.03831023536622524\n", - "Surface training t=29835, loss=0.03215789096429944\n", - "Surface training t=29836, loss=0.03216804005205631\n", - "Surface training t=29837, loss=0.03775830194354057\n", - "Surface training t=29838, loss=0.0289964834228158\n", - "Surface training t=29839, loss=0.026733197271823883\n", - "Surface training t=29840, loss=0.021111072041094303\n", - "Surface training t=29841, loss=0.022943045012652874\n", - "Surface training t=29842, loss=0.022401430644094944\n", - "Surface training t=29843, loss=0.021067174151539803\n", - "Surface training t=29844, loss=0.021948888897895813\n", - "Surface training t=29845, loss=0.023562116548419\n", - "Surface training t=29846, loss=0.017937767319381237\n", - "Surface training t=29847, loss=0.022302063181996346\n", - "Surface training t=29848, loss=0.026492957025766373\n", - "Surface training t=29849, loss=0.025242981500923634\n", - "Surface training t=29850, loss=0.018926067277789116\n", - "Surface training t=29851, loss=0.019880699925124645\n", - "Surface training t=29852, loss=0.02215729746967554\n", - "Surface training t=29853, loss=0.02385109756141901\n", - "Surface training t=29854, loss=0.025536350905895233\n", - "Surface training t=29855, loss=0.018466725014150143\n", - "Surface training t=29856, loss=0.018444428220391273\n", - "Surface training t=29857, loss=0.021265394985675812\n", - "Surface training t=29858, loss=0.022503361105918884\n", - "Surface training t=29859, loss=0.027162892743945122\n", - "Surface training t=29860, loss=0.018499884754419327\n", - "Surface training t=29861, loss=0.01788622047752142\n", - "Surface training t=29862, loss=0.026463842950761318\n", - "Surface training t=29863, loss=0.023549973033368587\n", - "Surface training t=29864, loss=0.025976010598242283\n", - "Surface training t=29865, loss=0.03111969493329525\n", - "Surface training t=29866, loss=0.021951322443783283\n", - "Surface training t=29867, loss=0.01594352535903454\n", - "Surface training t=29868, loss=0.025850324891507626\n", - "Surface training t=29869, loss=0.024150501936674118\n", - "Surface training t=29870, loss=0.021279857493937016\n", - "Surface training t=29871, loss=0.027887058444321156\n", - "Surface training t=29872, loss=0.02519084233790636\n", - "Surface training t=29873, loss=0.023912601172924042\n", - "Surface training t=29874, loss=0.02248184010386467\n", - "Surface training t=29875, loss=0.02645520307123661\n", - "Surface training t=29876, loss=0.022230667993426323\n", - "Surface training t=29877, loss=0.020519452169537544\n", - "Surface training t=29878, loss=0.023555954918265343\n", - "Surface training t=29879, loss=0.023764507845044136\n", - "Surface training t=29880, loss=0.019906549714505672\n", - "Surface training t=29881, loss=0.017668385058641434\n", - "Surface training t=29882, loss=0.016709087416529655\n", - "Surface training t=29883, loss=0.01885075494647026\n", - "Surface training t=29884, loss=0.016649367287755013\n", - "Surface training t=29885, loss=0.018492022063583136\n", - "Surface training t=29886, loss=0.02134653925895691\n", - "Surface training t=29887, loss=0.022268468514084816\n", - "Surface training t=29888, loss=0.01828672271221876\n", - "Surface training t=29889, loss=0.021973959635943174\n", - "Surface training t=29890, loss=0.020839079283177853\n", - "Surface training t=29891, loss=0.023682722821831703\n", - "Surface training t=29892, loss=0.0172679228708148\n", - "Surface training t=29893, loss=0.02072834689170122\n", - "Surface training t=29894, loss=0.021198797039687634\n", - "Surface training t=29895, loss=0.02180352807044983\n", - "Surface training t=29896, loss=0.02964165899902582\n", - "Surface training t=29897, loss=0.021031071431934834\n", - "Surface training t=29898, loss=0.0275711826980114\n", - "Surface training t=29899, loss=0.02875956241041422\n", - "Surface training t=29900, loss=0.03277090843766928\n", - "Surface training t=29901, loss=0.0358774047344923\n", - "Surface training t=29902, loss=0.024120579473674297\n", - "Surface training t=29903, loss=0.022778033278882504\n", - "Surface training t=29904, loss=0.019836565479636192\n", - "Surface training t=29905, loss=0.019876624457538128\n", - "Surface training t=29906, loss=0.025227314792573452\n", - "Surface training t=29907, loss=0.024410754907876253\n", - "Surface training t=29908, loss=0.026927822269499302\n", - "Surface training t=29909, loss=0.025020082481205463\n", - "Surface training t=29910, loss=0.0245969220995903\n", - "Surface training t=29911, loss=0.02024265006184578\n", - "Surface training t=29912, loss=0.015593790914863348\n", - "Surface training t=29913, loss=0.027205895632505417\n", - "Surface training t=29914, loss=0.026862865313887596\n", - "Surface training t=29915, loss=0.020365812815725803\n", - "Surface training t=29916, loss=0.030282177962362766\n", - "Surface training t=29917, loss=0.022527108900249004\n", - "Surface training t=29918, loss=0.023563306778669357\n", - "Surface training t=29919, loss=0.03521520085632801\n", - "Surface training t=29920, loss=0.025697171688079834\n", - "Surface training t=29921, loss=0.03385773580521345\n", - "Surface training t=29922, loss=0.03205635026097298\n", - "Surface training t=29923, loss=0.028877452947199345\n", - "Surface training t=29924, loss=0.03841453418135643\n", - "Surface training t=29925, loss=0.028819821774959564\n", - "Surface training t=29926, loss=0.03132994379848242\n", - "Surface training t=29927, loss=0.029990973882377148\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=29928, loss=0.02642448339611292\n", - "Surface training t=29929, loss=0.027686800807714462\n", - "Surface training t=29930, loss=0.021089820191264153\n", - "Surface training t=29931, loss=0.02075739111751318\n", - "Surface training t=29932, loss=0.020472056232392788\n", - "Surface training t=29933, loss=0.01676527038216591\n", - "Surface training t=29934, loss=0.02317589521408081\n", - "Surface training t=29935, loss=0.018600044306367636\n", - "Surface training t=29936, loss=0.01698196865618229\n", - "Surface training t=29937, loss=0.020858106203377247\n", - "Surface training t=29938, loss=0.019633425399661064\n", - "Surface training t=29939, loss=0.019619879312813282\n", - "Surface training t=29940, loss=0.017031174153089523\n", - "Surface training t=29941, loss=0.022278064861893654\n", - "Surface training t=29942, loss=0.018939172849059105\n", - "Surface training t=29943, loss=0.020570162683725357\n", - "Surface training t=29944, loss=0.023606237024068832\n", - "Surface training t=29945, loss=0.029701542109251022\n", - "Surface training t=29946, loss=0.026683267205953598\n", - "Surface training t=29947, loss=0.03314616531133652\n", - "Surface training t=29948, loss=0.029278100468218327\n", - "Surface training t=29949, loss=0.032357266172766685\n", - "Surface training t=29950, loss=0.03977397456765175\n", - "Surface training t=29951, loss=0.02930482104420662\n", - "Surface training t=29952, loss=0.026708455756306648\n", - "Surface training t=29953, loss=0.02707634773105383\n", - "Surface training t=29954, loss=0.01695746462792158\n", - "Surface training t=29955, loss=0.020759696140885353\n", - "Surface training t=29956, loss=0.023641916923224926\n", - "Surface training t=29957, loss=0.028189082629978657\n", - "Surface training t=29958, loss=0.031408810056746006\n", - "Surface training t=29959, loss=0.024554569274187088\n", - "Surface training t=29960, loss=0.025314408354461193\n", - "Surface training t=29961, loss=0.022815164178609848\n", - "Surface training t=29962, loss=0.019645512104034424\n", - "Surface training t=29963, loss=0.016298016533255577\n", - "Surface training t=29964, loss=0.01823502592742443\n", - "Surface training t=29965, loss=0.020264480262994766\n", - "Surface training t=29966, loss=0.020106705836951733\n", - "Surface training t=29967, loss=0.02497521135956049\n", - "Surface training t=29968, loss=0.026623583398759365\n", - "Surface training t=29969, loss=0.022527338936924934\n", - "Surface training t=29970, loss=0.0234150318428874\n", - "Surface training t=29971, loss=0.023072976619005203\n", - "Surface training t=29972, loss=0.027752848342061043\n", - "Surface training t=29973, loss=0.02444053627550602\n", - "Surface training t=29974, loss=0.031344374641776085\n", - "Surface training t=29975, loss=0.02630443312227726\n", - "Surface training t=29976, loss=0.028761442750692368\n", - "Surface training t=29977, loss=0.02367036510258913\n", - "Surface training t=29978, loss=0.022562340833246708\n", - "Surface training t=29979, loss=0.027838035486638546\n", - "Surface training t=29980, loss=0.03453532326966524\n", - "Surface training t=29981, loss=0.029410461895167828\n", - "Surface training t=29982, loss=0.029580150730907917\n", - "Surface training t=29983, loss=0.03222330566495657\n", - "Surface training t=29984, loss=0.020469761453568935\n", - "Surface training t=29985, loss=0.025193878449499607\n", - "Surface training t=29986, loss=0.026290534995496273\n", - "Surface training t=29987, loss=0.024595025926828384\n", - "Surface training t=29988, loss=0.024653633125126362\n", - "Surface training t=29989, loss=0.02577211055904627\n", - "Surface training t=29990, loss=0.021138569340109825\n", - "Surface training t=29991, loss=0.021417255513370037\n", - "Surface training t=29992, loss=0.02271947544068098\n", - "Surface training t=29993, loss=0.03226565849035978\n", - "Surface training t=29994, loss=0.02346069924533367\n", - "Surface training t=29995, loss=0.029104454442858696\n", - "Surface training t=29996, loss=0.033703284338116646\n", - "Surface training t=29997, loss=0.028827685862779617\n", - "Surface training t=29998, loss=0.02709855232387781\n", - "Surface training t=29999, loss=0.02614935953170061\n", - "Surface training t=30000, loss=0.02121205534785986\n", - "Surface training t=30001, loss=0.019366323947906494\n", - "Surface training t=30002, loss=0.02009957656264305\n", - "Surface training t=30003, loss=0.02429382223635912\n", - "Surface training t=30004, loss=0.026982540264725685\n", - "Surface training t=30005, loss=0.019755369052290916\n", - "Surface training t=30006, loss=0.023983304388821125\n", - "Surface training t=30007, loss=0.025794003158807755\n", - "Surface training t=30008, loss=0.020284302532672882\n", - "Surface training t=30009, loss=0.018048047553747892\n", - "Surface training t=30010, loss=0.028002538718283176\n", - "Surface training t=30011, loss=0.01623123837634921\n", - "Surface training t=30012, loss=0.01995561085641384\n", - "Surface training t=30013, loss=0.018577595241367817\n", - "Surface training t=30014, loss=0.01499567599967122\n", - "Surface training t=30015, loss=0.020400434732437134\n", - "Surface training t=30016, loss=0.017057958990335464\n", - "Surface training t=30017, loss=0.01994256256148219\n", - "Surface training t=30018, loss=0.017478258349001408\n", - "Surface training t=30019, loss=0.022696410305798054\n", - "Surface training t=30020, loss=0.02762853540480137\n", - "Surface training t=30021, loss=0.021894045174121857\n", - "Surface training t=30022, loss=0.021228255704045296\n", - "Surface training t=30023, loss=0.03134319186210632\n", - "Surface training t=30024, loss=0.022667404264211655\n", - "Surface training t=30025, loss=0.022603895515203476\n", - "Surface training t=30026, loss=0.029594890773296356\n", - "Surface training t=30027, loss=0.031882429495453835\n", - "Surface training t=30028, loss=0.027673528529703617\n", - "Surface training t=30029, loss=0.02681001555174589\n", - "Surface training t=30030, loss=0.020423143170773983\n", - "Surface training t=30031, loss=0.024343044497072697\n", - "Surface training t=30032, loss=0.02300516702234745\n", - "Surface training t=30033, loss=0.021536018699407578\n", - "Surface training t=30034, loss=0.024779162369668484\n", - "Surface training t=30035, loss=0.02337107341736555\n", - "Surface training t=30036, loss=0.02754343394190073\n", - "Surface training t=30037, loss=0.022670172154903412\n", - "Surface training t=30038, loss=0.03342890739440918\n", - "Surface training t=30039, loss=0.02117978036403656\n", - "Surface training t=30040, loss=0.026369054801762104\n", - "Surface training t=30041, loss=0.02700144611299038\n", - "Surface training t=30042, loss=0.01765490137040615\n", - "Surface training t=30043, loss=0.018366679549217224\n", - "Surface training t=30044, loss=0.016619027592241764\n", - "Surface training t=30045, loss=0.018888727761805058\n", - "Surface training t=30046, loss=0.014766551554203033\n", - "Surface training t=30047, loss=0.017029359005391598\n", - "Surface training t=30048, loss=0.0177621990442276\n", - "Surface training t=30049, loss=0.021549150347709656\n", - "Surface training t=30050, loss=0.019239983521401882\n", - "Surface training t=30051, loss=0.01978157740086317\n", - "Surface training t=30052, loss=0.020651943050324917\n", - "Surface training t=30053, loss=0.024527840316295624\n", - "Surface training t=30054, loss=0.026113157160580158\n", - "Surface training t=30055, loss=0.02252940833568573\n", - "Surface training t=30056, loss=0.020874837413430214\n", - "Surface training t=30057, loss=0.020957719534635544\n", - "Surface training t=30058, loss=0.02230705600231886\n", - "Surface training t=30059, loss=0.021434967406094074\n", - "Surface training t=30060, loss=0.019883456639945507\n", - "Surface training t=30061, loss=0.019384542480111122\n", - "Surface training t=30062, loss=0.020793313160538673\n", - "Surface training t=30063, loss=0.01930155698210001\n", - "Surface training t=30064, loss=0.024890273809432983\n", - "Surface training t=30065, loss=0.028030998073518276\n", - "Surface training t=30066, loss=0.022131385281682014\n", - "Surface training t=30067, loss=0.022545292042195797\n", - "Surface training t=30068, loss=0.017846661619842052\n", - "Surface training t=30069, loss=0.021931500174105167\n", - "Surface training t=30070, loss=0.023292768746614456\n", - "Surface training t=30071, loss=0.017087172716856003\n", - "Surface training t=30072, loss=0.022082599811255932\n", - "Surface training t=30073, loss=0.02352540660649538\n", - "Surface training t=30074, loss=0.01837153546512127\n", - "Surface training t=30075, loss=0.019731877371668816\n", - "Surface training t=30076, loss=0.017526560463011265\n", - "Surface training t=30077, loss=0.023539508692920208\n", - "Surface training t=30078, loss=0.022596430964767933\n", - "Surface training t=30079, loss=0.02023919578641653\n", - "Surface training t=30080, loss=0.02203489188104868\n", - "Surface training t=30081, loss=0.028204704634845257\n", - "Surface training t=30082, loss=0.020013445988297462\n", - "Surface training t=30083, loss=0.018154735676944256\n", - "Surface training t=30084, loss=0.019858418963849545\n", - "Surface training t=30085, loss=0.014326940756291151\n", - "Surface training t=30086, loss=0.012438159435987473\n", - "Surface training t=30087, loss=0.01474677212536335\n", - "Surface training t=30088, loss=0.022937225177884102\n", - "Surface training t=30089, loss=0.01909984601661563\n", - "Surface training t=30090, loss=0.02017873339354992\n", - "Surface training t=30091, loss=0.019638894125819206\n", - "Surface training t=30092, loss=0.02052687481045723\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=30093, loss=0.026641232892870903\n", - "Surface training t=30094, loss=0.021247711032629013\n", - "Surface training t=30095, loss=0.023207041434943676\n", - "Surface training t=30096, loss=0.021659404039382935\n", - "Surface training t=30097, loss=0.021060915663838387\n", - "Surface training t=30098, loss=0.021518023684620857\n", - "Surface training t=30099, loss=0.019540305249392986\n", - "Surface training t=30100, loss=0.019942390732467175\n", - "Surface training t=30101, loss=0.01891844905912876\n", - "Surface training t=30102, loss=0.019467510282993317\n", - "Surface training t=30103, loss=0.029407584108412266\n", - "Surface training t=30104, loss=0.022293605841696262\n", - "Surface training t=30105, loss=0.016435297206044197\n", - "Surface training t=30106, loss=0.018737328238785267\n", - "Surface training t=30107, loss=0.015193986240774393\n", - "Surface training t=30108, loss=0.01343499543145299\n", - "Surface training t=30109, loss=0.015497022774070501\n", - "Surface training t=30110, loss=0.014747264795005322\n", - "Surface training t=30111, loss=0.016089362557977438\n", - "Surface training t=30112, loss=0.01763517502695322\n", - "Surface training t=30113, loss=0.021029677242040634\n", - "Surface training t=30114, loss=0.022969553247094154\n", - "Surface training t=30115, loss=0.020270648412406445\n", - "Surface training t=30116, loss=0.021282700821757317\n", - "Surface training t=30117, loss=0.015602228697389364\n", - "Surface training t=30118, loss=0.014922526199370623\n", - "Surface training t=30119, loss=0.01924442732706666\n", - "Surface training t=30120, loss=0.02450655773282051\n", - "Surface training t=30121, loss=0.026616438291966915\n", - "Surface training t=30122, loss=0.025301342830061913\n", - "Surface training t=30123, loss=0.019549040123820305\n", - "Surface training t=30124, loss=0.03179658018052578\n", - "Surface training t=30125, loss=0.02592285443097353\n", - "Surface training t=30126, loss=0.0286439536139369\n", - "Surface training t=30127, loss=0.026456313207745552\n", - "Surface training t=30128, loss=0.03530466463416815\n", - "Surface training t=30129, loss=0.02657717652618885\n", - "Surface training t=30130, loss=0.027264916338026524\n", - "Surface training t=30131, loss=0.023518324363976717\n", - "Surface training t=30132, loss=0.03125813603401184\n", - "Surface training t=30133, loss=0.024959313683211803\n", - "Surface training t=30134, loss=0.01820072252303362\n", - "Surface training t=30135, loss=0.01923261024057865\n", - "Surface training t=30136, loss=0.018911337479948997\n", - "Surface training t=30137, loss=0.023085668683052063\n", - "Surface training t=30138, loss=0.023622374050319195\n", - "Surface training t=30139, loss=0.018195015378296375\n", - "Surface training t=30140, loss=0.02072493825107813\n", - "Surface training t=30141, loss=0.01564959902316332\n", - "Surface training t=30142, loss=0.014064391609281301\n", - "Surface training t=30143, loss=0.02372051775455475\n", - "Surface training t=30144, loss=0.026914574205875397\n", - "Surface training t=30145, loss=0.029591906815767288\n", - "Surface training t=30146, loss=0.02413240633904934\n", - "Surface training t=30147, loss=0.02207991061732173\n", - "Surface training t=30148, loss=0.029191850684583187\n", - "Surface training t=30149, loss=0.028419803828001022\n", - "Surface training t=30150, loss=0.02498050034046173\n", - "Surface training t=30151, loss=0.036567214876413345\n", - "Surface training t=30152, loss=0.034224728122353554\n", - "Surface training t=30153, loss=0.025589806959033012\n", - "Surface training t=30154, loss=0.023875524289906025\n", - "Surface training t=30155, loss=0.0284960875287652\n", - "Surface training t=30156, loss=0.027679838240146637\n", - "Surface training t=30157, loss=0.02502734586596489\n", - "Surface training t=30158, loss=0.023351416923105717\n", - "Surface training t=30159, loss=0.023581373505294323\n", - "Surface training t=30160, loss=0.022862049750983715\n", - "Surface training t=30161, loss=0.021836928091943264\n", - "Surface training t=30162, loss=0.022787057794630527\n", - "Surface training t=30163, loss=0.01712504867464304\n", - "Surface training t=30164, loss=0.016679388470947742\n", - "Surface training t=30165, loss=0.015289675910025835\n", - "Surface training t=30166, loss=0.02296159416437149\n", - "Surface training t=30167, loss=0.020962289534509182\n", - "Surface training t=30168, loss=0.02163452235981822\n", - "Surface training t=30169, loss=0.021404827013611794\n", - "Surface training t=30170, loss=0.01801363192498684\n", - "Surface training t=30171, loss=0.01767650432884693\n", - "Surface training t=30172, loss=0.01439825538545847\n", - "Surface training t=30173, loss=0.019715573638677597\n", - "Surface training t=30174, loss=0.01549709215760231\n", - "Surface training t=30175, loss=0.018298951908946037\n", - "Surface training t=30176, loss=0.022534270770847797\n", - "Surface training t=30177, loss=0.019427484832704067\n", - "Surface training t=30178, loss=0.017375607043504715\n", - "Surface training t=30179, loss=0.01697184145450592\n", - "Surface training t=30180, loss=0.015129862818866968\n", - "Surface training t=30181, loss=0.017470458522439003\n", - "Surface training t=30182, loss=0.018843566067516804\n", - "Surface training t=30183, loss=0.019897734746336937\n", - "Surface training t=30184, loss=0.02016375120729208\n", - "Surface training t=30185, loss=0.021061338484287262\n", - "Surface training t=30186, loss=0.02243449818342924\n", - "Surface training t=30187, loss=0.02328451629728079\n", - "Surface training t=30188, loss=0.02659463882446289\n", - "Surface training t=30189, loss=0.02070104330778122\n", - "Surface training t=30190, loss=0.025356742553412914\n", - "Surface training t=30191, loss=0.03020498715341091\n", - "Surface training t=30192, loss=0.05169198848307133\n", - "Surface training t=30193, loss=0.03051411546766758\n", - "Surface training t=30194, loss=0.04847103916108608\n", - "Surface training t=30195, loss=0.03129871655255556\n", - "Surface training t=30196, loss=0.02587093599140644\n", - "Surface training t=30197, loss=0.021591484546661377\n", - "Surface training t=30198, loss=0.019869668409228325\n", - "Surface training t=30199, loss=0.019766757264733315\n", - "Surface training t=30200, loss=0.022085273638367653\n", - "Surface training t=30201, loss=0.02049893606454134\n", - "Surface training t=30202, loss=0.015168290119618177\n", - "Surface training t=30203, loss=0.018302184529602528\n", - "Surface training t=30204, loss=0.019269919022917747\n", - "Surface training t=30205, loss=0.015817560255527496\n", - "Surface training t=30206, loss=0.014598476234823465\n", - "Surface training t=30207, loss=0.017432482447475195\n", - "Surface training t=30208, loss=0.02118737529963255\n", - "Surface training t=30209, loss=0.01604813151061535\n", - "Surface training t=30210, loss=0.01781326998025179\n", - "Surface training t=30211, loss=0.014138655737042427\n", - "Surface training t=30212, loss=0.015071232803165913\n", - "Surface training t=30213, loss=0.0181210208684206\n", - "Surface training t=30214, loss=0.01804128661751747\n", - "Surface training t=30215, loss=0.017196201719343662\n", - "Surface training t=30216, loss=0.015662228222936392\n", - "Surface training t=30217, loss=0.019243518821895123\n", - "Surface training t=30218, loss=0.02025447692722082\n", - "Surface training t=30219, loss=0.015877728816121817\n", - "Surface training t=30220, loss=0.016834781505167484\n", - "Surface training t=30221, loss=0.020500591956079006\n", - "Surface training t=30222, loss=0.01679364126175642\n", - "Surface training t=30223, loss=0.014803662896156311\n", - "Surface training t=30224, loss=0.013795065693557262\n", - "Surface training t=30225, loss=0.01784153562039137\n", - "Surface training t=30226, loss=0.020948628429323435\n", - "Surface training t=30227, loss=0.018319767899811268\n", - "Surface training t=30228, loss=0.026581693440675735\n", - "Surface training t=30229, loss=0.018649731762707233\n", - "Surface training t=30230, loss=0.019411936402320862\n", - "Surface training t=30231, loss=0.01519564026966691\n", - "Surface training t=30232, loss=0.023018358275294304\n", - "Surface training t=30233, loss=0.01790808606892824\n", - "Surface training t=30234, loss=0.018248924985527992\n", - "Surface training t=30235, loss=0.01858637109398842\n", - "Surface training t=30236, loss=0.022444315254688263\n", - "Surface training t=30237, loss=0.022405878640711308\n", - "Surface training t=30238, loss=0.018499448895454407\n", - "Surface training t=30239, loss=0.018348570447415113\n", - "Surface training t=30240, loss=0.024542628787457943\n", - "Surface training t=30241, loss=0.034406282007694244\n", - "Surface training t=30242, loss=0.031788005493581295\n", - "Surface training t=30243, loss=0.028325458988547325\n", - "Surface training t=30244, loss=0.034543175250291824\n", - "Surface training t=30245, loss=0.03342744708061218\n", - "Surface training t=30246, loss=0.02738950587809086\n", - "Surface training t=30247, loss=0.02821602113544941\n", - "Surface training t=30248, loss=0.029483179561793804\n", - "Surface training t=30249, loss=0.02845163643360138\n", - "Surface training t=30250, loss=0.02345473598688841\n", - "Surface training t=30251, loss=0.022569100372493267\n", - "Surface training t=30252, loss=0.0276763578876853\n", - "Surface training t=30253, loss=0.025665512308478355\n", - "Surface training t=30254, loss=0.02726823091506958\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=30255, loss=0.024767662398517132\n", - "Surface training t=30256, loss=0.020433559082448483\n", - "Surface training t=30257, loss=0.01760731264948845\n", - "Surface training t=30258, loss=0.017924470361322165\n", - "Surface training t=30259, loss=0.015080548357218504\n", - "Surface training t=30260, loss=0.016705811955034733\n", - "Surface training t=30261, loss=0.015264589805155993\n", - "Surface training t=30262, loss=0.016447209287434816\n", - "Surface training t=30263, loss=0.014977718237787485\n", - "Surface training t=30264, loss=0.015434719622135162\n", - "Surface training t=30265, loss=0.01987127587199211\n", - "Surface training t=30266, loss=0.016954248305410147\n", - "Surface training t=30267, loss=0.02245166338980198\n", - "Surface training t=30268, loss=0.03115004301071167\n", - "Surface training t=30269, loss=0.020067655481398106\n", - "Surface training t=30270, loss=0.02890156302601099\n", - "Surface training t=30271, loss=0.019489459227770567\n", - "Surface training t=30272, loss=0.025641875341534615\n", - "Surface training t=30273, loss=0.02837575227022171\n", - "Surface training t=30274, loss=0.0322549669072032\n", - "Surface training t=30275, loss=0.03275369945913553\n", - "Surface training t=30276, loss=0.026061322540044785\n", - "Surface training t=30277, loss=0.02343699149787426\n", - "Surface training t=30278, loss=0.022319745272397995\n", - "Surface training t=30279, loss=0.021894452162086964\n", - "Surface training t=30280, loss=0.021009432151913643\n", - "Surface training t=30281, loss=0.020227530039846897\n", - "Surface training t=30282, loss=0.023190980777144432\n", - "Surface training t=30283, loss=0.018586162012070417\n", - "Surface training t=30284, loss=0.025667348876595497\n", - "Surface training t=30285, loss=0.018072500824928284\n", - "Surface training t=30286, loss=0.027356602251529694\n", - "Surface training t=30287, loss=0.019473527558147907\n", - "Surface training t=30288, loss=0.024035231210291386\n", - "Surface training t=30289, loss=0.027668505907058716\n", - "Surface training t=30290, loss=0.0236518457531929\n", - "Surface training t=30291, loss=0.0244386475533247\n", - "Surface training t=30292, loss=0.04424732364714146\n", - "Surface training t=30293, loss=0.02741921693086624\n", - "Surface training t=30294, loss=0.027135031297802925\n", - "Surface training t=30295, loss=0.02970596682280302\n", - "Surface training t=30296, loss=0.025326097384095192\n", - "Surface training t=30297, loss=0.023243378847837448\n", - "Surface training t=30298, loss=0.016997373662889004\n", - "Surface training t=30299, loss=0.018193554133176804\n", - "Surface training t=30300, loss=0.02084594313055277\n", - "Surface training t=30301, loss=0.028769508004188538\n", - "Surface training t=30302, loss=0.023036371916532516\n", - "Surface training t=30303, loss=0.027847088873386383\n", - "Surface training t=30304, loss=0.023002672009170055\n", - "Surface training t=30305, loss=0.02203697105869651\n", - "Surface training t=30306, loss=0.027588012628257275\n", - "Surface training t=30307, loss=0.016985359601676464\n", - "Surface training t=30308, loss=0.024464777670800686\n", - "Surface training t=30309, loss=0.02113099955022335\n", - "Surface training t=30310, loss=0.018752921372652054\n", - "Surface training t=30311, loss=0.021460979245603085\n", - "Surface training t=30312, loss=0.019586590118706226\n", - "Surface training t=30313, loss=0.020058623515069485\n", - "Surface training t=30314, loss=0.021525805816054344\n", - "Surface training t=30315, loss=0.01811946928501129\n", - "Surface training t=30316, loss=0.01913685631006956\n", - "Surface training t=30317, loss=0.02180527988821268\n", - "Surface training t=30318, loss=0.024725121445953846\n", - "Surface training t=30319, loss=0.02053782856091857\n", - "Surface training t=30320, loss=0.021076440811157227\n", - "Surface training t=30321, loss=0.022037615068256855\n", - "Surface training t=30322, loss=0.01979187037795782\n", - "Surface training t=30323, loss=0.019685188308358192\n", - "Surface training t=30324, loss=0.03214937634766102\n", - "Surface training t=30325, loss=0.023386275395751\n", - "Surface training t=30326, loss=0.019882965832948685\n", - "Surface training t=30327, loss=0.020132120233029127\n", - "Surface training t=30328, loss=0.027854792773723602\n", - "Surface training t=30329, loss=0.02432952355593443\n", - "Surface training t=30330, loss=0.02100077737122774\n", - "Surface training t=30331, loss=0.02939605712890625\n", - "Surface training t=30332, loss=0.02311095781624317\n", - "Surface training t=30333, loss=0.02284516766667366\n", - "Surface training t=30334, loss=0.01786146964877844\n", - "Surface training t=30335, loss=0.016299735754728317\n", - "Surface training t=30336, loss=0.011743294540792704\n", - "Surface training t=30337, loss=0.017099441029131413\n", - "Surface training t=30338, loss=0.016122776549309492\n", - "Surface training t=30339, loss=0.015606751665472984\n", - "Surface training t=30340, loss=0.021370931528508663\n", - "Surface training t=30341, loss=0.018419820349663496\n", - "Surface training t=30342, loss=0.017135869711637497\n", - "Surface training t=30343, loss=0.020294656045734882\n", - "Surface training t=30344, loss=0.017176250461488962\n", - "Surface training t=30345, loss=0.020970270037651062\n", - "Surface training t=30346, loss=0.016350108198821545\n", - "Surface training t=30347, loss=0.014395551290363073\n", - "Surface training t=30348, loss=0.018276960588991642\n", - "Surface training t=30349, loss=0.017672039102762938\n", - "Surface training t=30350, loss=0.019752216525375843\n", - "Surface training t=30351, loss=0.02042219042778015\n", - "Surface training t=30352, loss=0.016011111438274384\n", - "Surface training t=30353, loss=0.014998744707554579\n", - "Surface training t=30354, loss=0.01917335484176874\n", - "Surface training t=30355, loss=0.01764701446518302\n", - "Surface training t=30356, loss=0.015045455656945705\n", - "Surface training t=30357, loss=0.029754954390227795\n", - "Surface training t=30358, loss=0.021887335926294327\n", - "Surface training t=30359, loss=0.015616110526025295\n", - "Surface training t=30360, loss=0.016711031086742878\n", - "Surface training t=30361, loss=0.01583361579105258\n", - "Surface training t=30362, loss=0.021908404305577278\n", - "Surface training t=30363, loss=0.022882329300045967\n", - "Surface training t=30364, loss=0.02294286247342825\n", - "Surface training t=30365, loss=0.021125219296664\n", - "Surface training t=30366, loss=0.02768889721482992\n", - "Surface training t=30367, loss=0.01879495568573475\n", - "Surface training t=30368, loss=0.01865407219156623\n", - "Surface training t=30369, loss=0.0237520607188344\n", - "Surface training t=30370, loss=0.029049724340438843\n", - "Surface training t=30371, loss=0.03305353131145239\n", - "Surface training t=30372, loss=0.029393106698989868\n", - "Surface training t=30373, loss=0.019447889178991318\n", - "Surface training t=30374, loss=0.023014944046735764\n", - "Surface training t=30375, loss=0.02863044012337923\n", - "Surface training t=30376, loss=0.03723658435046673\n", - "Surface training t=30377, loss=0.027772845700383186\n", - "Surface training t=30378, loss=0.033472731709480286\n", - "Surface training t=30379, loss=0.03179283067584038\n", - "Surface training t=30380, loss=0.03808809444308281\n", - "Surface training t=30381, loss=0.0325018335133791\n", - "Surface training t=30382, loss=0.041165195405483246\n", - "Surface training t=30383, loss=0.039441149681806564\n", - "Surface training t=30384, loss=0.03405898064374924\n", - "Surface training t=30385, loss=0.045501573011279106\n", - "Surface training t=30386, loss=0.035266779363155365\n", - "Surface training t=30387, loss=0.028466753661632538\n", - "Surface training t=30388, loss=0.02740198839455843\n", - "Surface training t=30389, loss=0.033092984929680824\n", - "Surface training t=30390, loss=0.027453687973320484\n", - "Surface training t=30391, loss=0.035893005318939686\n", - "Surface training t=30392, loss=0.03485035616904497\n", - "Surface training t=30393, loss=0.044424306601285934\n", - "Surface training t=30394, loss=0.03392850048840046\n", - "Surface training t=30395, loss=0.02373263891786337\n", - "Surface training t=30396, loss=0.022137273102998734\n", - "Surface training t=30397, loss=0.022272082045674324\n", - "Surface training t=30398, loss=0.022729823365807533\n", - "Surface training t=30399, loss=0.019451047759503126\n", - "Surface training t=30400, loss=0.026139666326344013\n", - "Surface training t=30401, loss=0.02455348428338766\n", - "Surface training t=30402, loss=0.020513633266091347\n", - "Surface training t=30403, loss=0.014471313916146755\n", - "Surface training t=30404, loss=0.018001343123614788\n", - "Surface training t=30405, loss=0.018512009643018246\n", - "Surface training t=30406, loss=0.025820263661444187\n", - "Surface training t=30407, loss=0.020157049410045147\n", - "Surface training t=30408, loss=0.02314098086208105\n", - "Surface training t=30409, loss=0.021796721033751965\n", - "Surface training t=30410, loss=0.02163080219179392\n", - "Surface training t=30411, loss=0.01678782980889082\n", - "Surface training t=30412, loss=0.018573741894215345\n", - "Surface training t=30413, loss=0.019267013296484947\n", - "Surface training t=30414, loss=0.024992997758090496\n", - "Surface training t=30415, loss=0.021609937772154808\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=30416, loss=0.024737635627388954\n", - "Surface training t=30417, loss=0.020386621356010437\n", - "Surface training t=30418, loss=0.019847537390887737\n", - "Surface training t=30419, loss=0.017960297875106335\n", - "Surface training t=30420, loss=0.021155452355742455\n", - "Surface training t=30421, loss=0.01833919622004032\n", - "Surface training t=30422, loss=0.020925402641296387\n", - "Surface training t=30423, loss=0.021316079422831535\n", - "Surface training t=30424, loss=0.01604806585237384\n", - "Surface training t=30425, loss=0.017381984740495682\n", - "Surface training t=30426, loss=0.018530248198658228\n", - "Surface training t=30427, loss=0.023220956325531006\n", - "Surface training t=30428, loss=0.028316419571638107\n", - "Surface training t=30429, loss=0.02392439730465412\n", - "Surface training t=30430, loss=0.01780786830931902\n", - "Surface training t=30431, loss=0.02622645813971758\n", - "Surface training t=30432, loss=0.020184789784252644\n", - "Surface training t=30433, loss=0.02595561183989048\n", - "Surface training t=30434, loss=0.019215531647205353\n", - "Surface training t=30435, loss=0.01961960457265377\n", - "Surface training t=30436, loss=0.020292650908231735\n", - "Surface training t=30437, loss=0.023040710017085075\n", - "Surface training t=30438, loss=0.025698029436171055\n", - "Surface training t=30439, loss=0.02763746865093708\n", - "Surface training t=30440, loss=0.023235472850501537\n", - "Surface training t=30441, loss=0.04087135195732117\n", - "Surface training t=30442, loss=0.02989078313112259\n", - "Surface training t=30443, loss=0.033881233073771\n", - "Surface training t=30444, loss=0.04510360024869442\n", - "Surface training t=30445, loss=0.03763153217732906\n", - "Surface training t=30446, loss=0.035235695540905\n", - "Surface training t=30447, loss=0.03435867093503475\n", - "Surface training t=30448, loss=0.033244469203054905\n", - "Surface training t=30449, loss=0.03811822086572647\n", - "Surface training t=30450, loss=0.045479342341423035\n", - "Surface training t=30451, loss=0.03599133342504501\n", - "Surface training t=30452, loss=0.04466931335628033\n", - "Surface training t=30453, loss=0.03523289691656828\n", - "Surface training t=30454, loss=0.034524308517575264\n", - "Surface training t=30455, loss=0.03486512787640095\n", - "Surface training t=30456, loss=0.029109743423759937\n", - "Surface training t=30457, loss=0.023273350670933723\n", - "Surface training t=30458, loss=0.027932888828217983\n", - "Surface training t=30459, loss=0.027298112399876118\n", - "Surface training t=30460, loss=0.020396835170686245\n", - "Surface training t=30461, loss=0.02070951648056507\n", - "Surface training t=30462, loss=0.019007153809070587\n", - "Surface training t=30463, loss=0.019498310983181\n", - "Surface training t=30464, loss=0.016328041441738605\n", - "Surface training t=30465, loss=0.017097472213208675\n", - "Surface training t=30466, loss=0.020037801936268806\n", - "Surface training t=30467, loss=0.023553671315312386\n", - "Surface training t=30468, loss=0.016765085980296135\n", - "Surface training t=30469, loss=0.015388489235192537\n", - "Surface training t=30470, loss=0.01615159399807453\n", - "Surface training t=30471, loss=0.018153498880565166\n", - "Surface training t=30472, loss=0.023022697307169437\n", - "Surface training t=30473, loss=0.020226852037012577\n", - "Surface training t=30474, loss=0.019535979256033897\n", - "Surface training t=30475, loss=0.025623860768973827\n", - "Surface training t=30476, loss=0.01731927413493395\n", - "Surface training t=30477, loss=0.02644845936447382\n", - "Surface training t=30478, loss=0.02764751110225916\n", - "Surface training t=30479, loss=0.026048356667160988\n", - "Surface training t=30480, loss=0.023519201204180717\n", - "Surface training t=30481, loss=0.023733170703053474\n", - "Surface training t=30482, loss=0.016077525448054075\n", - "Surface training t=30483, loss=0.018965084105730057\n", - "Surface training t=30484, loss=0.016808882355690002\n", - "Surface training t=30485, loss=0.021113798022270203\n", - "Surface training t=30486, loss=0.02338599320501089\n", - "Surface training t=30487, loss=0.015455417335033417\n", - "Surface training t=30488, loss=0.019334642216563225\n", - "Surface training t=30489, loss=0.019305717200040817\n", - "Surface training t=30490, loss=0.021050759591162205\n", - "Surface training t=30491, loss=0.021417281590402126\n", - "Surface training t=30492, loss=0.02548833843320608\n", - "Surface training t=30493, loss=0.02299786638468504\n", - "Surface training t=30494, loss=0.01673975819721818\n", - "Surface training t=30495, loss=0.01769954664632678\n", - "Surface training t=30496, loss=0.019478258676826954\n", - "Surface training t=30497, loss=0.017933111172169447\n", - "Surface training t=30498, loss=0.020964650437235832\n", - "Surface training t=30499, loss=0.012581111397594213\n", - "Surface training t=30500, loss=0.018675796687602997\n", - "Surface training t=30501, loss=0.026184489019215107\n", - "Surface training t=30502, loss=0.02420988492667675\n", - "Surface training t=30503, loss=0.01963402796536684\n", - "Surface training t=30504, loss=0.018589258193969727\n", - "Surface training t=30505, loss=0.02121660578995943\n", - "Surface training t=30506, loss=0.017309337854385376\n", - "Surface training t=30507, loss=0.013991652056574821\n", - "Surface training t=30508, loss=0.02030839305371046\n", - "Surface training t=30509, loss=0.026818901300430298\n", - "Surface training t=30510, loss=0.024728833697736263\n", - "Surface training t=30511, loss=0.029563486576080322\n", - "Surface training t=30512, loss=0.02868972159922123\n", - "Surface training t=30513, loss=0.02749725431203842\n", - "Surface training t=30514, loss=0.02431714814156294\n", - "Surface training t=30515, loss=0.02669634111225605\n", - "Surface training t=30516, loss=0.01613633893430233\n", - "Surface training t=30517, loss=0.014170636422932148\n", - "Surface training t=30518, loss=0.018834155052900314\n", - "Surface training t=30519, loss=0.022858685813844204\n", - "Surface training t=30520, loss=0.018346437718719244\n", - "Surface training t=30521, loss=0.026759743690490723\n", - "Surface training t=30522, loss=0.03292881418019533\n", - "Surface training t=30523, loss=0.024595823138952255\n", - "Surface training t=30524, loss=0.0324977757409215\n", - "Surface training t=30525, loss=0.03232346195727587\n", - "Surface training t=30526, loss=0.02275900077074766\n", - "Surface training t=30527, loss=0.02008875086903572\n", - "Surface training t=30528, loss=0.02594356145709753\n", - "Surface training t=30529, loss=0.021043360233306885\n", - "Surface training t=30530, loss=0.020611178129911423\n", - "Surface training t=30531, loss=0.02410210855305195\n", - "Surface training t=30532, loss=0.021246613934636116\n", - "Surface training t=30533, loss=0.025120475329458714\n", - "Surface training t=30534, loss=0.01797364093363285\n", - "Surface training t=30535, loss=0.02280845958739519\n", - "Surface training t=30536, loss=0.022264896892011166\n", - "Surface training t=30537, loss=0.02167173381894827\n", - "Surface training t=30538, loss=0.023530724458396435\n", - "Surface training t=30539, loss=0.025690333917737007\n", - "Surface training t=30540, loss=0.01688190270215273\n", - "Surface training t=30541, loss=0.017317157238721848\n", - "Surface training t=30542, loss=0.014639560598880053\n", - "Surface training t=30543, loss=0.01774778589606285\n", - "Surface training t=30544, loss=0.01964077167212963\n", - "Surface training t=30545, loss=0.017807023599743843\n", - "Surface training t=30546, loss=0.017487955279648304\n", - "Surface training t=30547, loss=0.015027317218482494\n", - "Surface training t=30548, loss=0.018069779500365257\n", - "Surface training t=30549, loss=0.01476193219423294\n", - "Surface training t=30550, loss=0.022547096945345402\n", - "Surface training t=30551, loss=0.019233337603509426\n", - "Surface training t=30552, loss=0.014497108291834593\n", - "Surface training t=30553, loss=0.01833906304091215\n", - "Surface training t=30554, loss=0.014423145912587643\n", - "Surface training t=30555, loss=0.017537535168230534\n", - "Surface training t=30556, loss=0.012981365900486708\n", - "Surface training t=30557, loss=0.018926560878753662\n", - "Surface training t=30558, loss=0.01916605420410633\n", - "Surface training t=30559, loss=0.017635755240917206\n", - "Surface training t=30560, loss=0.02197412308305502\n", - "Surface training t=30561, loss=0.022463388741016388\n", - "Surface training t=30562, loss=0.016457892023026943\n", - "Surface training t=30563, loss=0.024166353046894073\n", - "Surface training t=30564, loss=0.023484227247536182\n", - "Surface training t=30565, loss=0.021873525343835354\n", - "Surface training t=30566, loss=0.020223813131451607\n", - "Surface training t=30567, loss=0.01646760944277048\n", - "Surface training t=30568, loss=0.01997016929090023\n", - "Surface training t=30569, loss=0.023825600743293762\n", - "Surface training t=30570, loss=0.04392109252512455\n", - "Surface training t=30571, loss=0.031810885295271873\n", - "Surface training t=30572, loss=0.035784751176834106\n", - "Surface training t=30573, loss=0.04953017644584179\n", - "Surface training t=30574, loss=0.03280220925807953\n", - "Surface training t=30575, loss=0.047600919380784035\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=30576, loss=0.04137565474957228\n", - "Surface training t=30577, loss=0.028518376871943474\n", - "Surface training t=30578, loss=0.04212718829512596\n", - "Surface training t=30579, loss=0.034916749224066734\n", - "Surface training t=30580, loss=0.03310711123049259\n", - "Surface training t=30581, loss=0.03320298343896866\n", - "Surface training t=30582, loss=0.03506818413734436\n", - "Surface training t=30583, loss=0.028503963723778725\n", - "Surface training t=30584, loss=0.025666335597634315\n", - "Surface training t=30585, loss=0.026028256863355637\n", - "Surface training t=30586, loss=0.031375402584671974\n", - "Surface training t=30587, loss=0.029314936138689518\n", - "Surface training t=30588, loss=0.02390549797564745\n", - "Surface training t=30589, loss=0.028352237306535244\n", - "Surface training t=30590, loss=0.027437439188361168\n", - "Surface training t=30591, loss=0.029373208060860634\n", - "Surface training t=30592, loss=0.018964664079248905\n", - "Surface training t=30593, loss=0.029299275018274784\n", - "Surface training t=30594, loss=0.026617701165378094\n", - "Surface training t=30595, loss=0.023650435730814934\n", - "Surface training t=30596, loss=0.0378093458712101\n", - "Surface training t=30597, loss=0.02142405789345503\n", - "Surface training t=30598, loss=0.037339530885219574\n", - "Surface training t=30599, loss=0.022802975960075855\n", - "Surface training t=30600, loss=0.02749539166688919\n", - "Surface training t=30601, loss=0.028043298050761223\n", - "Surface training t=30602, loss=0.020216525997966528\n", - "Surface training t=30603, loss=0.022359076887369156\n", - "Surface training t=30604, loss=0.02400452457368374\n", - "Surface training t=30605, loss=0.01690005511045456\n", - "Surface training t=30606, loss=0.020345482509583235\n", - "Surface training t=30607, loss=0.019705379381775856\n", - "Surface training t=30608, loss=0.026350123807787895\n", - "Surface training t=30609, loss=0.025834323838353157\n", - "Surface training t=30610, loss=0.019386257976293564\n", - "Surface training t=30611, loss=0.033303163945674896\n", - "Surface training t=30612, loss=0.02399855013936758\n", - "Surface training t=30613, loss=0.022622719407081604\n", - "Surface training t=30614, loss=0.023023373447358608\n", - "Surface training t=30615, loss=0.018259103409945965\n", - "Surface training t=30616, loss=0.023933332413434982\n", - "Surface training t=30617, loss=0.023835711181163788\n", - "Surface training t=30618, loss=0.017450283747166395\n", - "Surface training t=30619, loss=0.023859770968556404\n", - "Surface training t=30620, loss=0.028325723484158516\n", - "Surface training t=30621, loss=0.020053766667842865\n", - "Surface training t=30622, loss=0.019750433042645454\n", - "Surface training t=30623, loss=0.01810718048363924\n", - "Surface training t=30624, loss=0.01783764362335205\n", - "Surface training t=30625, loss=0.02057238481938839\n", - "Surface training t=30626, loss=0.018188506364822388\n", - "Surface training t=30627, loss=0.015942785888910294\n", - "Surface training t=30628, loss=0.015775613486766815\n", - "Surface training t=30629, loss=0.013515650294721127\n", - "Surface training t=30630, loss=0.018944703973829746\n", - "Surface training t=30631, loss=0.01778400456532836\n", - "Surface training t=30632, loss=0.02133302576839924\n", - "Surface training t=30633, loss=0.01687951758503914\n", - "Surface training t=30634, loss=0.018380057066679\n", - "Surface training t=30635, loss=0.01770534086972475\n", - "Surface training t=30636, loss=0.02100516203790903\n", - "Surface training t=30637, loss=0.023858544416725636\n", - "Surface training t=30638, loss=0.021896008402109146\n", - "Surface training t=30639, loss=0.02179577760398388\n", - "Surface training t=30640, loss=0.023882689885795116\n", - "Surface training t=30641, loss=0.030803125351667404\n", - "Surface training t=30642, loss=0.03315687458962202\n", - "Surface training t=30643, loss=0.028795775026082993\n", - "Surface training t=30644, loss=0.024631555192172527\n", - "Surface training t=30645, loss=0.024311221204698086\n", - "Surface training t=30646, loss=0.027559563517570496\n", - "Surface training t=30647, loss=0.02143586240708828\n", - "Surface training t=30648, loss=0.024630757048726082\n", - "Surface training t=30649, loss=0.022555391304194927\n", - "Surface training t=30650, loss=0.02157822996377945\n", - "Surface training t=30651, loss=0.02822139672935009\n", - "Surface training t=30652, loss=0.02969115786254406\n", - "Surface training t=30653, loss=0.024477345868945122\n", - "Surface training t=30654, loss=0.023532788269221783\n", - "Surface training t=30655, loss=0.021311523392796516\n", - "Surface training t=30656, loss=0.028848969377577305\n", - "Surface training t=30657, loss=0.021409199573099613\n", - "Surface training t=30658, loss=0.018951947800815105\n", - "Surface training t=30659, loss=0.02254068572074175\n", - "Surface training t=30660, loss=0.01643639337271452\n", - "Surface training t=30661, loss=0.015871526673436165\n", - "Surface training t=30662, loss=0.018341180868446827\n", - "Surface training t=30663, loss=0.015469701960682869\n", - "Surface training t=30664, loss=0.017045216634869576\n", - "Surface training t=30665, loss=0.014416616410017014\n", - "Surface training t=30666, loss=0.02078006137162447\n", - "Surface training t=30667, loss=0.01777272205799818\n", - "Surface training t=30668, loss=0.017429118044674397\n", - "Surface training t=30669, loss=0.014409711118787527\n", - "Surface training t=30670, loss=0.0168314753100276\n", - "Surface training t=30671, loss=0.015039064455777407\n", - "Surface training t=30672, loss=0.01605619490146637\n", - "Surface training t=30673, loss=0.01954176276922226\n", - "Surface training t=30674, loss=0.019101842306554317\n", - "Surface training t=30675, loss=0.016898096073418856\n", - "Surface training t=30676, loss=0.020283610559999943\n", - "Surface training t=30677, loss=0.013591774739325047\n", - "Surface training t=30678, loss=0.013689756393432617\n", - "Surface training t=30679, loss=0.016232993453741074\n", - "Surface training t=30680, loss=0.015376986935734749\n", - "Surface training t=30681, loss=0.018204539082944393\n", - "Surface training t=30682, loss=0.01672386797145009\n", - "Surface training t=30683, loss=0.01559029845520854\n", - "Surface training t=30684, loss=0.01629507727921009\n", - "Surface training t=30685, loss=0.02120337914675474\n", - "Surface training t=30686, loss=0.023559930734336376\n", - "Surface training t=30687, loss=0.02187881339341402\n", - "Surface training t=30688, loss=0.020329533610492945\n", - "Surface training t=30689, loss=0.018855771981179714\n", - "Surface training t=30690, loss=0.018687570467591286\n", - "Surface training t=30691, loss=0.01667479146271944\n", - "Surface training t=30692, loss=0.01931939832866192\n", - "Surface training t=30693, loss=0.019668705761432648\n", - "Surface training t=30694, loss=0.02146190172061324\n", - "Surface training t=30695, loss=0.01914101280272007\n", - "Surface training t=30696, loss=0.015028176363557577\n", - "Surface training t=30697, loss=0.014962790999561548\n", - "Surface training t=30698, loss=0.015066292136907578\n", - "Surface training t=30699, loss=0.020940730813890696\n", - "Surface training t=30700, loss=0.018807812593877316\n", - "Surface training t=30701, loss=0.015472308732569218\n", - "Surface training t=30702, loss=0.022288269363343716\n", - "Surface training t=30703, loss=0.019436330534517765\n", - "Surface training t=30704, loss=0.016380468383431435\n", - "Surface training t=30705, loss=0.01696903631091118\n", - "Surface training t=30706, loss=0.017413124442100525\n", - "Surface training t=30707, loss=0.02087066601961851\n", - "Surface training t=30708, loss=0.018038753420114517\n", - "Surface training t=30709, loss=0.01592828892171383\n", - "Surface training t=30710, loss=0.018449645955115557\n", - "Surface training t=30711, loss=0.015108661726117134\n", - "Surface training t=30712, loss=0.020707485266029835\n", - "Surface training t=30713, loss=0.020454774610698223\n", - "Surface training t=30714, loss=0.019247029908001423\n", - "Surface training t=30715, loss=0.023490614257752895\n", - "Surface training t=30716, loss=0.023550412617623806\n", - "Surface training t=30717, loss=0.02110708551481366\n", - "Surface training t=30718, loss=0.02418868988752365\n", - "Surface training t=30719, loss=0.023113254457712173\n", - "Surface training t=30720, loss=0.019302714616060257\n", - "Surface training t=30721, loss=0.020672340877354145\n", - "Surface training t=30722, loss=0.022239629179239273\n", - "Surface training t=30723, loss=0.01864449493587017\n", - "Surface training t=30724, loss=0.022415971383452415\n", - "Surface training t=30725, loss=0.020731224678456783\n", - "Surface training t=30726, loss=0.01700976211577654\n", - "Surface training t=30727, loss=0.020064614713191986\n", - "Surface training t=30728, loss=0.01832054927945137\n", - "Surface training t=30729, loss=0.018034445121884346\n", - "Surface training t=30730, loss=0.015852182172238827\n", - "Surface training t=30731, loss=0.026176284067332745\n", - "Surface training t=30732, loss=0.019539727829396725\n", - "Surface training t=30733, loss=0.018138673156499863\n", - "Surface training t=30734, loss=0.021168198436498642\n", - "Surface training t=30735, loss=0.017996075563132763\n", - "Surface training t=30736, loss=0.01673252461478114\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=30737, loss=0.013125296216458082\n", - "Surface training t=30738, loss=0.01781069952994585\n", - "Surface training t=30739, loss=0.01965885329991579\n", - "Surface training t=30740, loss=0.02197952289134264\n", - "Surface training t=30741, loss=0.020777767524123192\n", - "Surface training t=30742, loss=0.02307339757680893\n", - "Surface training t=30743, loss=0.020579921081662178\n", - "Surface training t=30744, loss=0.015565100125968456\n", - "Surface training t=30745, loss=0.014686009380966425\n", - "Surface training t=30746, loss=0.017404763959348202\n", - "Surface training t=30747, loss=0.018704101908951998\n", - "Surface training t=30748, loss=0.019524057395756245\n", - "Surface training t=30749, loss=0.01487384457141161\n", - "Surface training t=30750, loss=0.016180651262402534\n", - "Surface training t=30751, loss=0.013479314278811216\n", - "Surface training t=30752, loss=0.01997518725693226\n", - "Surface training t=30753, loss=0.018605953082442284\n", - "Surface training t=30754, loss=0.025010778568685055\n", - "Surface training t=30755, loss=0.024744135327637196\n", - "Surface training t=30756, loss=0.02880672924220562\n", - "Surface training t=30757, loss=0.02906864508986473\n", - "Surface training t=30758, loss=0.032422468066215515\n", - "Surface training t=30759, loss=0.020230038091540337\n", - "Surface training t=30760, loss=0.02950431313365698\n", - "Surface training t=30761, loss=0.021607653237879276\n", - "Surface training t=30762, loss=0.01747121149674058\n", - "Surface training t=30763, loss=0.02573563065379858\n", - "Surface training t=30764, loss=0.022110690362751484\n", - "Surface training t=30765, loss=0.01861858880147338\n", - "Surface training t=30766, loss=0.022342287003993988\n", - "Surface training t=30767, loss=0.025539526715874672\n", - "Surface training t=30768, loss=0.022268389351665974\n", - "Surface training t=30769, loss=0.024488944560289383\n", - "Surface training t=30770, loss=0.021054468117654324\n", - "Surface training t=30771, loss=0.023669585585594177\n", - "Surface training t=30772, loss=0.02162361890077591\n", - "Surface training t=30773, loss=0.021504126489162445\n", - "Surface training t=30774, loss=0.021165631711483\n", - "Surface training t=30775, loss=0.013458941597491503\n", - "Surface training t=30776, loss=0.02191029954701662\n", - "Surface training t=30777, loss=0.01899200864136219\n", - "Surface training t=30778, loss=0.02665200922638178\n", - "Surface training t=30779, loss=0.022206529043614864\n", - "Surface training t=30780, loss=0.017873436212539673\n", - "Surface training t=30781, loss=0.02577979303896427\n", - "Surface training t=30782, loss=0.018717357888817787\n", - "Surface training t=30783, loss=0.028974590823054314\n", - "Surface training t=30784, loss=0.02667070832103491\n", - "Surface training t=30785, loss=0.023196294903755188\n", - "Surface training t=30786, loss=0.021854261867702007\n", - "Surface training t=30787, loss=0.025726059451699257\n", - "Surface training t=30788, loss=0.02738122083246708\n", - "Surface training t=30789, loss=0.034002939239144325\n", - "Surface training t=30790, loss=0.027227938175201416\n", - "Surface training t=30791, loss=0.023363211192190647\n", - "Surface training t=30792, loss=0.026771003380417824\n", - "Surface training t=30793, loss=0.029815979301929474\n", - "Surface training t=30794, loss=0.035365333780646324\n", - "Surface training t=30795, loss=0.026186014525592327\n", - "Surface training t=30796, loss=0.035637445747852325\n", - "Surface training t=30797, loss=0.03240780532360077\n", - "Surface training t=30798, loss=0.0410111416131258\n", - "Surface training t=30799, loss=0.029243584722280502\n", - "Surface training t=30800, loss=0.02623093221336603\n", - "Surface training t=30801, loss=0.03360269032418728\n", - "Surface training t=30802, loss=0.025829712860286236\n", - "Surface training t=30803, loss=0.023407457396388054\n", - "Surface training t=30804, loss=0.02338547632098198\n", - "Surface training t=30805, loss=0.024569100700318813\n", - "Surface training t=30806, loss=0.024327071849256754\n", - "Surface training t=30807, loss=0.02983145322650671\n", - "Surface training t=30808, loss=0.024181769229471684\n", - "Surface training t=30809, loss=0.02227955963462591\n", - "Surface training t=30810, loss=0.019163536839187145\n", - "Surface training t=30811, loss=0.015098688192665577\n", - "Surface training t=30812, loss=0.016502326354384422\n", - "Surface training t=30813, loss=0.015512467361986637\n", - "Surface training t=30814, loss=0.014463773928582668\n", - "Surface training t=30815, loss=0.013880625832825899\n", - "Surface training t=30816, loss=0.016512103378772736\n", - "Surface training t=30817, loss=0.018687967211008072\n", - "Surface training t=30818, loss=0.019375205971300602\n", - "Surface training t=30819, loss=0.02227914985269308\n", - "Surface training t=30820, loss=0.015592919662594795\n", - "Surface training t=30821, loss=0.01374881248921156\n", - "Surface training t=30822, loss=0.019704723730683327\n", - "Surface training t=30823, loss=0.02387682255357504\n", - "Surface training t=30824, loss=0.018586951307952404\n", - "Surface training t=30825, loss=0.027766231447458267\n", - "Surface training t=30826, loss=0.02731917053461075\n", - "Surface training t=30827, loss=0.0220790421590209\n", - "Surface training t=30828, loss=0.018243389204144478\n", - "Surface training t=30829, loss=0.01949572190642357\n", - "Surface training t=30830, loss=0.01761636510491371\n", - "Surface training t=30831, loss=0.02119380608201027\n", - "Surface training t=30832, loss=0.022258838638663292\n", - "Surface training t=30833, loss=0.018783987499773502\n", - "Surface training t=30834, loss=0.020214281976222992\n", - "Surface training t=30835, loss=0.01803181041032076\n", - "Surface training t=30836, loss=0.026370616629719734\n", - "Surface training t=30837, loss=0.027431846596300602\n", - "Surface training t=30838, loss=0.01898652408272028\n", - "Surface training t=30839, loss=0.02226497232913971\n", - "Surface training t=30840, loss=0.02229951787739992\n", - "Surface training t=30841, loss=0.021350139752030373\n", - "Surface training t=30842, loss=0.031020919792354107\n", - "Surface training t=30843, loss=0.0315221231430769\n", - "Surface training t=30844, loss=0.02932106051594019\n", - "Surface training t=30845, loss=0.023981391452252865\n", - "Surface training t=30846, loss=0.03161788359284401\n", - "Surface training t=30847, loss=0.023105645552277565\n", - "Surface training t=30848, loss=0.028568396344780922\n", - "Surface training t=30849, loss=0.026794892735779285\n", - "Surface training t=30850, loss=0.022686856798827648\n", - "Surface training t=30851, loss=0.019068048801273108\n", - "Surface training t=30852, loss=0.01817223895341158\n", - "Surface training t=30853, loss=0.02188391052186489\n", - "Surface training t=30854, loss=0.024184051901102066\n", - "Surface training t=30855, loss=0.024561375379562378\n", - "Surface training t=30856, loss=0.0243116095662117\n", - "Surface training t=30857, loss=0.02541803102940321\n", - "Surface training t=30858, loss=0.027682330459356308\n", - "Surface training t=30859, loss=0.0227094367146492\n", - "Surface training t=30860, loss=0.029706399887800217\n", - "Surface training t=30861, loss=0.022352258674800396\n", - "Surface training t=30862, loss=0.02352918405085802\n", - "Surface training t=30863, loss=0.029915506951510906\n", - "Surface training t=30864, loss=0.02646341361105442\n", - "Surface training t=30865, loss=0.019958623684942722\n", - "Surface training t=30866, loss=0.019989054650068283\n", - "Surface training t=30867, loss=0.019956338219344616\n", - "Surface training t=30868, loss=0.018857352435588837\n", - "Surface training t=30869, loss=0.01528285164386034\n", - "Surface training t=30870, loss=0.01860093232244253\n", - "Surface training t=30871, loss=0.02080896496772766\n", - "Surface training t=30872, loss=0.017420226708054543\n", - "Surface training t=30873, loss=0.018046105280518532\n", - "Surface training t=30874, loss=0.019015257246792316\n", - "Surface training t=30875, loss=0.01877467054873705\n", - "Surface training t=30876, loss=0.02130650170147419\n", - "Surface training t=30877, loss=0.022657714784145355\n", - "Surface training t=30878, loss=0.023233805783092976\n", - "Surface training t=30879, loss=0.030612265691161156\n", - "Surface training t=30880, loss=0.02090730145573616\n", - "Surface training t=30881, loss=0.02911736350506544\n", - "Surface training t=30882, loss=0.019671724177896976\n", - "Surface training t=30883, loss=0.024170946329832077\n", - "Surface training t=30884, loss=0.016475358977913857\n", - "Surface training t=30885, loss=0.01905378047376871\n", - "Surface training t=30886, loss=0.020240182988345623\n", - "Surface training t=30887, loss=0.019570395350456238\n", - "Surface training t=30888, loss=0.01601585652679205\n", - "Surface training t=30889, loss=0.015471785329282284\n", - "Surface training t=30890, loss=0.01609754841774702\n", - "Surface training t=30891, loss=0.014005512464791536\n", - "Surface training t=30892, loss=0.012359629850834608\n", - "Surface training t=30893, loss=0.015824039466679096\n", - "Surface training t=30894, loss=0.01726577151566744\n", - "Surface training t=30895, loss=0.021512148901820183\n", - "Surface training t=30896, loss=0.02467342745512724\n", - "Surface training t=30897, loss=0.025814545340836048\n", - "Surface training t=30898, loss=0.02878912352025509\n", - "Surface training t=30899, loss=0.0266472976654768\n", - "Surface training t=30900, loss=0.03776436299085617\n", - "Surface training t=30901, loss=0.029542719945311546\n", - "Surface training t=30902, loss=0.04443454183638096\n", - "Surface training t=30903, loss=0.02453674655407667\n", - "Surface training t=30904, loss=0.029457329772412777\n", - "Surface training t=30905, loss=0.02815253846347332\n", - "Surface training t=30906, loss=0.02623734436929226\n", - "Surface training t=30907, loss=0.034443242475390434\n", - "Surface training t=30908, loss=0.028003198094666004\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=30909, loss=0.028077365830540657\n", - "Surface training t=30910, loss=0.024272669106721878\n", - "Surface training t=30911, loss=0.03179974853992462\n", - "Surface training t=30912, loss=0.02376769669353962\n", - "Surface training t=30913, loss=0.01769090536981821\n", - "Surface training t=30914, loss=0.018193818628787994\n", - "Surface training t=30915, loss=0.02025389578193426\n", - "Surface training t=30916, loss=0.02130327746272087\n", - "Surface training t=30917, loss=0.022393220104277134\n", - "Surface training t=30918, loss=0.017398124560713768\n", - "Surface training t=30919, loss=0.019123036414384842\n", - "Surface training t=30920, loss=0.0172822424210608\n", - "Surface training t=30921, loss=0.021231520920991898\n", - "Surface training t=30922, loss=0.019781234674155712\n", - "Surface training t=30923, loss=0.020351174287497997\n", - "Surface training t=30924, loss=0.014669748954474926\n", - "Surface training t=30925, loss=0.016189510002732277\n", - "Surface training t=30926, loss=0.017223238945007324\n", - "Surface training t=30927, loss=0.014447185210883617\n", - "Surface training t=30928, loss=0.016017980873584747\n", - "Surface training t=30929, loss=0.019062248058617115\n", - "Surface training t=30930, loss=0.01746402308344841\n", - "Surface training t=30931, loss=0.017800084315240383\n", - "Surface training t=30932, loss=0.015481628943234682\n", - "Surface training t=30933, loss=0.018137947656214237\n", - "Surface training t=30934, loss=0.024591870605945587\n", - "Surface training t=30935, loss=0.01768628228455782\n", - "Surface training t=30936, loss=0.020778309553861618\n", - "Surface training t=30937, loss=0.027078957296907902\n", - "Surface training t=30938, loss=0.025300106033682823\n", - "Surface training t=30939, loss=0.020607872866094112\n", - "Surface training t=30940, loss=0.03694678656756878\n", - "Surface training t=30941, loss=0.024529782123863697\n", - "Surface training t=30942, loss=0.03494008257985115\n", - "Surface training t=30943, loss=0.023868978023529053\n", - "Surface training t=30944, loss=0.026098966132849455\n", - "Surface training t=30945, loss=0.048745639622211456\n", - "Surface training t=30946, loss=0.03630760312080383\n", - "Surface training t=30947, loss=0.027730916626751423\n", - "Surface training t=30948, loss=0.05387418158352375\n", - "Surface training t=30949, loss=0.03213398531079292\n", - "Surface training t=30950, loss=0.037733133882284164\n", - "Surface training t=30951, loss=0.03565196879208088\n", - "Surface training t=30952, loss=0.029742212034761906\n", - "Surface training t=30953, loss=0.03660757839679718\n", - "Surface training t=30954, loss=0.03049459680914879\n", - "Surface training t=30955, loss=0.030100581236183643\n", - "Surface training t=30956, loss=0.026863140054047108\n", - "Surface training t=30957, loss=0.026667636819183826\n", - "Surface training t=30958, loss=0.027028869837522507\n", - "Surface training t=30959, loss=0.02784813567996025\n", - "Surface training t=30960, loss=0.02644863072782755\n", - "Surface training t=30961, loss=0.021351893432438374\n", - "Surface training t=30962, loss=0.019245279021561146\n", - "Surface training t=30963, loss=0.016852005384862423\n", - "Surface training t=30964, loss=0.01761044468730688\n", - "Surface training t=30965, loss=0.01798113528639078\n", - "Surface training t=30966, loss=0.0230823066085577\n", - "Surface training t=30967, loss=0.025647557340562344\n", - "Surface training t=30968, loss=0.02329183742403984\n", - "Surface training t=30969, loss=0.02689053677022457\n", - "Surface training t=30970, loss=0.029048506170511246\n", - "Surface training t=30971, loss=0.02520553022623062\n", - "Surface training t=30972, loss=0.025722422637045383\n", - "Surface training t=30973, loss=0.026408463716506958\n", - "Surface training t=30974, loss=0.03558485209941864\n", - "Surface training t=30975, loss=0.03089703619480133\n", - "Surface training t=30976, loss=0.028457660228013992\n", - "Surface training t=30977, loss=0.03301105089485645\n", - "Surface training t=30978, loss=0.02523920312523842\n", - "Surface training t=30979, loss=0.0236098850145936\n", - "Surface training t=30980, loss=0.02485487051308155\n", - "Surface training t=30981, loss=0.02250255737453699\n", - "Surface training t=30982, loss=0.019029458053410053\n", - "Surface training t=30983, loss=0.022178327664732933\n", - "Surface training t=30984, loss=0.017912553623318672\n", - "Surface training t=30985, loss=0.021222950890660286\n", - "Surface training t=30986, loss=0.02610298153012991\n", - "Surface training t=30987, loss=0.021767484489828348\n", - "Surface training t=30988, loss=0.03321005217730999\n", - "Surface training t=30989, loss=0.022938740439713\n", - "Surface training t=30990, loss=0.0240258714184165\n", - "Surface training t=30991, loss=0.0289052901789546\n", - "Surface training t=30992, loss=0.018610458821058273\n", - "Surface training t=30993, loss=0.021977203898131847\n", - "Surface training t=30994, loss=0.020711303688585758\n", - "Surface training t=30995, loss=0.022785566747188568\n", - "Surface training t=30996, loss=0.018467310816049576\n", - "Surface training t=30997, loss=0.017401807941496372\n", - "Surface training t=30998, loss=0.019616620615124702\n", - "Surface training t=30999, loss=0.019204518757760525\n", - "Surface training t=31000, loss=0.019811619073152542\n", - "Surface training t=31001, loss=0.02556951530277729\n", - "Surface training t=31002, loss=0.020460618659853935\n", - "Surface training t=31003, loss=0.022358646616339684\n", - "Surface training t=31004, loss=0.028320465236902237\n", - "Surface training t=31005, loss=0.02071969583630562\n", - "Surface training t=31006, loss=0.022763632237911224\n", - "Surface training t=31007, loss=0.020372424274683\n", - "Surface training t=31008, loss=0.025409973226487637\n", - "Surface training t=31009, loss=0.023668045178055763\n", - "Surface training t=31010, loss=0.03170101251453161\n", - "Surface training t=31011, loss=0.022899296134710312\n", - "Surface training t=31012, loss=0.025456794537603855\n", - "Surface training t=31013, loss=0.025671658106148243\n", - "Surface training t=31014, loss=0.020453290082514286\n", - "Surface training t=31015, loss=0.02065649814903736\n", - "Surface training t=31016, loss=0.016090385615825653\n", - "Surface training t=31017, loss=0.020154946483671665\n", - "Surface training t=31018, loss=0.015864025335758924\n", - "Surface training t=31019, loss=0.01771001610904932\n", - "Surface training t=31020, loss=0.021515926346182823\n", - "Surface training t=31021, loss=0.023560301400721073\n", - "Surface training t=31022, loss=0.01964735798537731\n", - "Surface training t=31023, loss=0.02589493151754141\n", - "Surface training t=31024, loss=0.02175731025636196\n", - "Surface training t=31025, loss=0.022722672671079636\n", - "Surface training t=31026, loss=0.022190591786056757\n", - "Surface training t=31027, loss=0.021762527525424957\n", - "Surface training t=31028, loss=0.02824784442782402\n", - "Surface training t=31029, loss=0.0262697571888566\n", - "Surface training t=31030, loss=0.022571607492864132\n", - "Surface training t=31031, loss=0.019349422305822372\n", - "Surface training t=31032, loss=0.02234881930053234\n", - "Surface training t=31033, loss=0.019784159027040005\n", - "Surface training t=31034, loss=0.019587356597185135\n", - "Surface training t=31035, loss=0.020676455460488796\n", - "Surface training t=31036, loss=0.02028217539191246\n", - "Surface training t=31037, loss=0.020596111193299294\n", - "Surface training t=31038, loss=0.020807961001992226\n", - "Surface training t=31039, loss=0.024005405604839325\n", - "Surface training t=31040, loss=0.023144403472542763\n", - "Surface training t=31041, loss=0.023539898917078972\n", - "Surface training t=31042, loss=0.017985929735004902\n", - "Surface training t=31043, loss=0.022717738524079323\n", - "Surface training t=31044, loss=0.02929131593555212\n", - "Surface training t=31045, loss=0.031141839921474457\n", - "Surface training t=31046, loss=0.02761219535022974\n", - "Surface training t=31047, loss=0.029102216474711895\n", - "Surface training t=31048, loss=0.04869444668292999\n", - "Surface training t=31049, loss=0.027459953911602497\n", - "Surface training t=31050, loss=0.029733208008110523\n", - "Surface training t=31051, loss=0.05715685337781906\n", - "Surface training t=31052, loss=0.034520335495471954\n", - "Surface training t=31053, loss=0.03778074588626623\n", - "Surface training t=31054, loss=0.02507773693650961\n", - "Surface training t=31055, loss=0.023427413776516914\n", - "Surface training t=31056, loss=0.02497377060353756\n", - "Surface training t=31057, loss=0.02869915682822466\n", - "Surface training t=31058, loss=0.027871381491422653\n", - "Surface training t=31059, loss=0.037260839715600014\n", - "Surface training t=31060, loss=0.025497371330857277\n", - "Surface training t=31061, loss=0.022888343781232834\n", - "Surface training t=31062, loss=0.02046285942196846\n", - "Surface training t=31063, loss=0.018293297849595547\n", - "Surface training t=31064, loss=0.024722625501453876\n", - "Surface training t=31065, loss=0.025639524683356285\n", - "Surface training t=31066, loss=0.026434236206114292\n", - "Surface training t=31067, loss=0.023108912631869316\n", - "Surface training t=31068, loss=0.023828454315662384\n", - "Surface training t=31069, loss=0.028068164363503456\n", - "Surface training t=31070, loss=0.030044580809772015\n", - "Surface training t=31071, loss=0.04427112266421318\n", - "Surface training t=31072, loss=0.031697848811745644\n", - "Surface training t=31073, loss=0.04473084211349487\n", - "Surface training t=31074, loss=0.047311995178461075\n", - "Surface training t=31075, loss=0.03307529725134373\n", - "Surface training t=31076, loss=0.02815055660903454\n", - "Surface training t=31077, loss=0.02512978669255972\n", - "Surface training t=31078, loss=0.0176415485329926\n", - "Surface training t=31079, loss=0.02315274439752102\n", - "Surface training t=31080, loss=0.02824767306447029\n", - "Surface training t=31081, loss=0.02392004895955324\n", - "Surface training t=31082, loss=0.023432815447449684\n", - "Surface training t=31083, loss=0.025105107575654984\n", - "Surface training t=31084, loss=0.025284831412136555\n", - "Surface training t=31085, loss=0.03308192174881697\n", - "Surface training t=31086, loss=0.025357069447636604\n", - "Surface training t=31087, loss=0.031169716268777847\n", - "Surface training t=31088, loss=0.03378382883965969\n", - "Surface training t=31089, loss=0.02773091569542885\n", - "Surface training t=31090, loss=0.029849437065422535\n", - "Surface training t=31091, loss=0.022697472013533115\n", - "Surface training t=31092, loss=0.035506078973412514\n", - "Surface training t=31093, loss=0.026789920404553413\n", - "Surface training t=31094, loss=0.03538598492741585\n", - "Surface training t=31095, loss=0.023504395969212055\n", - "Surface training t=31096, loss=0.022009778767824173\n", - "Surface training t=31097, loss=0.017276298254728317\n", - "Surface training t=31098, loss=0.021733173169195652\n", - "Surface training t=31099, loss=0.016176776960492134\n", - "Surface training t=31100, loss=0.022259457036852837\n", - "Surface training t=31101, loss=0.02731277421116829\n", - "Surface training t=31102, loss=0.021559261716902256\n", - "Surface training t=31103, loss=0.022247301414608955\n", - "Surface training t=31104, loss=0.023073342628777027\n", - "Surface training t=31105, loss=0.0176779143512249\n", - "Surface training t=31106, loss=0.0179916275665164\n", - "Surface training t=31107, loss=0.018177254125475883\n", - "Surface training t=31108, loss=0.018801226280629635\n", - "Surface training t=31109, loss=0.022633477114140987\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=31110, loss=0.018861854448914528\n", - "Surface training t=31111, loss=0.017792578786611557\n", - "Surface training t=31112, loss=0.01645312737673521\n", - "Surface training t=31113, loss=0.014172923751175404\n", - "Surface training t=31114, loss=0.017144797835499048\n", - "Surface training t=31115, loss=0.014572849497199059\n", - "Surface training t=31116, loss=0.017840477637946606\n", - "Surface training t=31117, loss=0.022377949208021164\n", - "Surface training t=31118, loss=0.01587688084691763\n", - "Surface training t=31119, loss=0.022743981331586838\n", - "Surface training t=31120, loss=0.018923147581517696\n", - "Surface training t=31121, loss=0.01717520784586668\n", - "Surface training t=31122, loss=0.024499655701220036\n", - "Surface training t=31123, loss=0.016796457581222057\n", - "Surface training t=31124, loss=0.017950231209397316\n", - "Surface training t=31125, loss=0.018842319957911968\n", - "Surface training t=31126, loss=0.01484040729701519\n", - "Surface training t=31127, loss=0.018870347179472446\n", - "Surface training t=31128, loss=0.020044599194079638\n", - "Surface training t=31129, loss=0.01759258285164833\n", - "Surface training t=31130, loss=0.02439814619719982\n", - "Surface training t=31131, loss=0.022560017183423042\n", - "Surface training t=31132, loss=0.021354402415454388\n", - "Surface training t=31133, loss=0.021472240798175335\n", - "Surface training t=31134, loss=0.016401457600295544\n", - "Surface training t=31135, loss=0.017074682284146547\n", - "Surface training t=31136, loss=0.015429545659571886\n", - "Surface training t=31137, loss=0.014599511865526438\n", - "Surface training t=31138, loss=0.01808188110589981\n", - "Surface training t=31139, loss=0.012429197318851948\n", - "Surface training t=31140, loss=0.016935926862061024\n", - "Surface training t=31141, loss=0.013910643756389618\n", - "Surface training t=31142, loss=0.016864206176251173\n", - "Surface training t=31143, loss=0.019263855181634426\n", - "Surface training t=31144, loss=0.021192224696278572\n", - "Surface training t=31145, loss=0.017702936194837093\n", - "Surface training t=31146, loss=0.021343142725527287\n", - "Surface training t=31147, loss=0.02573332842439413\n", - "Surface training t=31148, loss=0.022030198946595192\n", - "Surface training t=31149, loss=0.023689416237175465\n", - "Surface training t=31150, loss=0.02303020842373371\n", - "Surface training t=31151, loss=0.018329672515392303\n", - "Surface training t=31152, loss=0.020318481139838696\n", - "Surface training t=31153, loss=0.025073405355215073\n", - "Surface training t=31154, loss=0.023520108312368393\n", - "Surface training t=31155, loss=0.024246327579021454\n", - "Surface training t=31156, loss=0.01666281186044216\n", - "Surface training t=31157, loss=0.013397786766290665\n", - "Surface training t=31158, loss=0.015098975971341133\n", - "Surface training t=31159, loss=0.014989637769758701\n", - "Surface training t=31160, loss=0.01771657168865204\n", - "Surface training t=31161, loss=0.014833731111139059\n", - "Surface training t=31162, loss=0.012682870496064425\n", - "Surface training t=31163, loss=0.016027767211198807\n", - "Surface training t=31164, loss=0.02117341011762619\n", - "Surface training t=31165, loss=0.01837847102433443\n", - "Surface training t=31166, loss=0.014765241649001837\n", - "Surface training t=31167, loss=0.02025699056684971\n", - "Surface training t=31168, loss=0.019737769849598408\n", - "Surface training t=31169, loss=0.01788717694580555\n", - "Surface training t=31170, loss=0.01447942666709423\n", - "Surface training t=31171, loss=0.017826471477746964\n", - "Surface training t=31172, loss=0.01728300005197525\n", - "Surface training t=31173, loss=0.01890139142051339\n", - "Surface training t=31174, loss=0.019110572524368763\n", - "Surface training t=31175, loss=0.018729595001786947\n", - "Surface training t=31176, loss=0.017666511237621307\n", - "Surface training t=31177, loss=0.02170017547905445\n", - "Surface training t=31178, loss=0.0187787851318717\n", - "Surface training t=31179, loss=0.020093818195164204\n", - "Surface training t=31180, loss=0.02149307820945978\n", - "Surface training t=31181, loss=0.02326325699687004\n", - "Surface training t=31182, loss=0.022861012257635593\n", - "Surface training t=31183, loss=0.023158648051321507\n", - "Surface training t=31184, loss=0.027772240340709686\n", - "Surface training t=31185, loss=0.026342935860157013\n", - "Surface training t=31186, loss=0.029307441785931587\n", - "Surface training t=31187, loss=0.03166802879422903\n", - "Surface training t=31188, loss=0.02894817292690277\n", - "Surface training t=31189, loss=0.030622167512774467\n", - "Surface training t=31190, loss=0.03945746272802353\n", - "Surface training t=31191, loss=0.02895102370530367\n", - "Surface training t=31192, loss=0.029045546427369118\n", - "Surface training t=31193, loss=0.03021124005317688\n", - "Surface training t=31194, loss=0.026771039701998234\n", - "Surface training t=31195, loss=0.021996885538101196\n", - "Surface training t=31196, loss=0.02343838009983301\n", - "Surface training t=31197, loss=0.023152335546910763\n", - "Surface training t=31198, loss=0.02368579152971506\n", - "Surface training t=31199, loss=0.030070949345827103\n", - "Surface training t=31200, loss=0.028669243678450584\n", - "Surface training t=31201, loss=0.02226813230663538\n", - "Surface training t=31202, loss=0.021684548817574978\n", - "Surface training t=31203, loss=0.03091593272984028\n", - "Surface training t=31204, loss=0.029637746512889862\n", - "Surface training t=31205, loss=0.02118044439703226\n", - "Surface training t=31206, loss=0.024375861510634422\n", - "Surface training t=31207, loss=0.025451596826314926\n", - "Surface training t=31208, loss=0.02716411091387272\n", - "Surface training t=31209, loss=0.02765549346804619\n", - "Surface training t=31210, loss=0.02577340416610241\n", - "Surface training t=31211, loss=0.020329607650637627\n", - "Surface training t=31212, loss=0.02320107351988554\n", - "Surface training t=31213, loss=0.02194057498127222\n", - "Surface training t=31214, loss=0.02108998131006956\n", - "Surface training t=31215, loss=0.02778080478310585\n", - "Surface training t=31216, loss=0.021425454877316952\n", - "Surface training t=31217, loss=0.02332341531291604\n", - "Surface training t=31218, loss=0.02817175444215536\n", - "Surface training t=31219, loss=0.028014250099658966\n", - "Surface training t=31220, loss=0.036898763850331306\n", - "Surface training t=31221, loss=0.029959427192807198\n", - "Surface training t=31222, loss=0.021934594959020615\n", - "Surface training t=31223, loss=0.021265167742967606\n", - "Surface training t=31224, loss=0.0201428453437984\n", - "Surface training t=31225, loss=0.020740190520882607\n", - "Surface training t=31226, loss=0.028476744890213013\n", - "Surface training t=31227, loss=0.025937138125300407\n", - "Surface training t=31228, loss=0.022631828673183918\n", - "Surface training t=31229, loss=0.025232222862541676\n", - "Surface training t=31230, loss=0.02835180051624775\n", - "Surface training t=31231, loss=0.029576119035482407\n", - "Surface training t=31232, loss=0.031588852405548096\n", - "Surface training t=31233, loss=0.026169544085860252\n", - "Surface training t=31234, loss=0.03297797217965126\n", - "Surface training t=31235, loss=0.02318707201629877\n", - "Surface training t=31236, loss=0.024345552548766136\n", - "Surface training t=31237, loss=0.021815288811922073\n", - "Surface training t=31238, loss=0.020580248907208443\n", - "Surface training t=31239, loss=0.017218823544681072\n", - "Surface training t=31240, loss=0.02134643029421568\n", - "Surface training t=31241, loss=0.020808217115700245\n", - "Surface training t=31242, loss=0.020731857046484947\n", - "Surface training t=31243, loss=0.021197820082306862\n", - "Surface training t=31244, loss=0.02206450328230858\n", - "Surface training t=31245, loss=0.02406348194926977\n", - "Surface training t=31246, loss=0.02285655215382576\n", - "Surface training t=31247, loss=0.02513351570814848\n", - "Surface training t=31248, loss=0.029294834472239017\n", - "Surface training t=31249, loss=0.02267823275178671\n", - "Surface training t=31250, loss=0.021331846714019775\n", - "Surface training t=31251, loss=0.02701607346534729\n", - "Surface training t=31252, loss=0.021445728838443756\n", - "Surface training t=31253, loss=0.023537684231996536\n", - "Surface training t=31254, loss=0.022607010789215565\n", - "Surface training t=31255, loss=0.020573781803250313\n", - "Surface training t=31256, loss=0.02333930879831314\n", - "Surface training t=31257, loss=0.017860862892121077\n", - "Surface training t=31258, loss=0.020002197474241257\n", - "Surface training t=31259, loss=0.01853177696466446\n", - "Surface training t=31260, loss=0.017543353606015444\n", - "Surface training t=31261, loss=0.015084166545420885\n", - "Surface training t=31262, loss=0.02070473972707987\n", - "Surface training t=31263, loss=0.017445386853069067\n", - "Surface training t=31264, loss=0.01962167676538229\n", - "Surface training t=31265, loss=0.017385901883244514\n", - "Surface training t=31266, loss=0.02342900261282921\n", - "Surface training t=31267, loss=0.026018520817160606\n", - "Surface training t=31268, loss=0.02204983774572611\n", - "Surface training t=31269, loss=0.025212572887539864\n", - "Surface training t=31270, loss=0.018358993344008923\n", - "Surface training t=31271, loss=0.017742310650646687\n", - "Surface training t=31272, loss=0.022545387037098408\n", - "Surface training t=31273, loss=0.020275787450373173\n", - "Surface training t=31274, loss=0.015618319623172283\n", - "Surface training t=31275, loss=0.012581860180944204\n", - "Surface training t=31276, loss=0.016806933097541332\n", - "Surface training t=31277, loss=0.01499428367242217\n", - "Surface training t=31278, loss=0.01727949921041727\n", - "Surface training t=31279, loss=0.012858332134783268\n", - "Surface training t=31280, loss=0.01751085091382265\n", - "Surface training t=31281, loss=0.020019681192934513\n", - "Surface training t=31282, loss=0.01857017818838358\n", - "Surface training t=31283, loss=0.013997839763760567\n", - "Surface training t=31284, loss=0.02085446286946535\n", - "Surface training t=31285, loss=0.01742195524275303\n", - "Surface training t=31286, loss=0.014043821953237057\n", - "Surface training t=31287, loss=0.01375392870977521\n", - "Surface training t=31288, loss=0.014616210479289293\n", - "Surface training t=31289, loss=0.015882056206464767\n", - "Surface training t=31290, loss=0.014082333538681269\n", - "Surface training t=31291, loss=0.01703295623883605\n", - "Surface training t=31292, loss=0.014341877773404121\n", - "Surface training t=31293, loss=0.01637892471626401\n", - "Surface training t=31294, loss=0.023103890009224415\n", - "Surface training t=31295, loss=0.023236796259880066\n", - "Surface training t=31296, loss=0.024074113927781582\n", - "Surface training t=31297, loss=0.028402808122336864\n", - "Surface training t=31298, loss=0.024726026691496372\n", - "Surface training t=31299, loss=0.026950877159833908\n", - "Surface training t=31300, loss=0.030244845896959305\n", - "Surface training t=31301, loss=0.02953343093395233\n", - "Surface training t=31302, loss=0.03109780792146921\n", - "Surface training t=31303, loss=0.027003244496881962\n", - "Surface training t=31304, loss=0.02254507690668106\n", - "Surface training t=31305, loss=0.032811244018375874\n", - "Surface training t=31306, loss=0.03034823201596737\n", - "Surface training t=31307, loss=0.03495967946946621\n", - "Surface training t=31308, loss=0.02738560177385807\n", - "Surface training t=31309, loss=0.02576202806085348\n", - "Surface training t=31310, loss=0.02934043575078249\n", - "Surface training t=31311, loss=0.020622425246983767\n", - "Surface training t=31312, loss=0.02233834657818079\n", - "Surface training t=31313, loss=0.020415770821273327\n", - "Surface training t=31314, loss=0.013159302063286304\n", - "Surface training t=31315, loss=0.011383767239749432\n", - "Surface training t=31316, loss=0.01777977216988802\n", - "Surface training t=31317, loss=0.01826987136155367\n", - "Surface training t=31318, loss=0.024924395605921745\n", - "Surface training t=31319, loss=0.015076599549502134\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=31320, loss=0.014566889498382807\n", - "Surface training t=31321, loss=0.013668870087713003\n", - "Surface training t=31322, loss=0.014749841764569283\n", - "Surface training t=31323, loss=0.017920562997460365\n", - "Surface training t=31324, loss=0.017024674452841282\n", - "Surface training t=31325, loss=0.030920916236937046\n", - "Surface training t=31326, loss=0.022411136887967587\n", - "Surface training t=31327, loss=0.02212773635983467\n", - "Surface training t=31328, loss=0.025568109937012196\n", - "Surface training t=31329, loss=0.019127259962260723\n", - "Surface training t=31330, loss=0.021244648844003677\n", - "Surface training t=31331, loss=0.01739572174847126\n", - "Surface training t=31332, loss=0.015532426536083221\n", - "Surface training t=31333, loss=0.015270924661308527\n", - "Surface training t=31334, loss=0.023110268637537956\n", - "Surface training t=31335, loss=0.020040569826960564\n", - "Surface training t=31336, loss=0.020372127182781696\n", - "Surface training t=31337, loss=0.017576446756720543\n", - "Surface training t=31338, loss=0.01888156868517399\n", - "Surface training t=31339, loss=0.015846344642341137\n", - "Surface training t=31340, loss=0.018043773248791695\n", - "Surface training t=31341, loss=0.014933302532881498\n", - "Surface training t=31342, loss=0.014851722400635481\n", - "Surface training t=31343, loss=0.017724746372550726\n", - "Surface training t=31344, loss=0.015861572697758675\n", - "Surface training t=31345, loss=0.016991546843200922\n", - "Surface training t=31346, loss=0.015080531127750874\n", - "Surface training t=31347, loss=0.021210570819675922\n", - "Surface training t=31348, loss=0.017777697183191776\n", - "Surface training t=31349, loss=0.019745752215385437\n", - "Surface training t=31350, loss=0.018176719546318054\n", - "Surface training t=31351, loss=0.021820809692144394\n", - "Surface training t=31352, loss=0.02488772664219141\n", - "Surface training t=31353, loss=0.031823476776480675\n", - "Surface training t=31354, loss=0.021492594853043556\n", - "Surface training t=31355, loss=0.023809523321688175\n", - "Surface training t=31356, loss=0.03030870109796524\n", - "Surface training t=31357, loss=0.02262212708592415\n", - "Surface training t=31358, loss=0.025444994680583477\n", - "Surface training t=31359, loss=0.03483946807682514\n", - "Surface training t=31360, loss=0.027057903818786144\n", - "Surface training t=31361, loss=0.030609088949859142\n", - "Surface training t=31362, loss=0.02150397840887308\n", - "Surface training t=31363, loss=0.023825164884328842\n", - "Surface training t=31364, loss=0.022786506451666355\n", - "Surface training t=31365, loss=0.016988455783575773\n", - "Surface training t=31366, loss=0.014332524966448545\n", - "Surface training t=31367, loss=0.013658751267939806\n", - "Surface training t=31368, loss=0.018549318425357342\n", - "Surface training t=31369, loss=0.014662113040685654\n", - "Surface training t=31370, loss=0.014820091426372528\n", - "Surface training t=31371, loss=0.01444930024445057\n", - "Surface training t=31372, loss=0.014484417624771595\n", - "Surface training t=31373, loss=0.012089679017663002\n", - "Surface training t=31374, loss=0.01877328660339117\n", - "Surface training t=31375, loss=0.019480491988360882\n", - "Surface training t=31376, loss=0.01918713329359889\n", - "Surface training t=31377, loss=0.018763063475489616\n", - "Surface training t=31378, loss=0.019069834612309933\n", - "Surface training t=31379, loss=0.01729932613670826\n", - "Surface training t=31380, loss=0.014968317933380604\n", - "Surface training t=31381, loss=0.017823030706495047\n", - "Surface training t=31382, loss=0.01841587759554386\n", - "Surface training t=31383, loss=0.01621126988902688\n", - "Surface training t=31384, loss=0.016770226415246725\n", - "Surface training t=31385, loss=0.018190770410001278\n", - "Surface training t=31386, loss=0.015844188164919615\n", - "Surface training t=31387, loss=0.018277236726135015\n", - "Surface training t=31388, loss=0.01751608122140169\n", - "Surface training t=31389, loss=0.020454647950828075\n", - "Surface training t=31390, loss=0.027139524929225445\n", - "Surface training t=31391, loss=0.015581587329506874\n", - "Surface training t=31392, loss=0.016138351056724787\n", - "Surface training t=31393, loss=0.014462967403233051\n", - "Surface training t=31394, loss=0.02169802412390709\n", - "Surface training t=31395, loss=0.027771499007940292\n", - "Surface training t=31396, loss=0.013778834138065577\n", - "Surface training t=31397, loss=0.01890389248728752\n", - "Surface training t=31398, loss=0.01297124708071351\n", - "Surface training t=31399, loss=0.0162758594378829\n", - "Surface training t=31400, loss=0.016856740228831768\n", - "Surface training t=31401, loss=0.016410741489380598\n", - "Surface training t=31402, loss=0.016846491023898125\n", - "Surface training t=31403, loss=0.02001732587814331\n", - "Surface training t=31404, loss=0.018806008622050285\n", - "Surface training t=31405, loss=0.01751884911209345\n", - "Surface training t=31406, loss=0.02642058953642845\n", - "Surface training t=31407, loss=0.029362106695771217\n", - "Surface training t=31408, loss=0.018860424868762493\n", - "Surface training t=31409, loss=0.021018455736339092\n", - "Surface training t=31410, loss=0.019402318634092808\n", - "Surface training t=31411, loss=0.020334629341959953\n", - "Surface training t=31412, loss=0.026226747781038284\n", - "Surface training t=31413, loss=0.02654543798416853\n", - "Surface training t=31414, loss=0.03079352341592312\n", - "Surface training t=31415, loss=0.02860096376389265\n", - "Surface training t=31416, loss=0.023068535141646862\n", - "Surface training t=31417, loss=0.03945389948785305\n", - "Surface training t=31418, loss=0.030740367248654366\n", - "Surface training t=31419, loss=0.04199008457362652\n", - "Surface training t=31420, loss=0.042596084997057915\n", - "Surface training t=31421, loss=0.024976913817226887\n", - "Surface training t=31422, loss=0.025651666335761547\n", - "Surface training t=31423, loss=0.023851764388382435\n", - "Surface training t=31424, loss=0.024917359463870525\n", - "Surface training t=31425, loss=0.020484191831201315\n", - "Surface training t=31426, loss=0.023077955469489098\n", - "Surface training t=31427, loss=0.02962053008377552\n", - "Surface training t=31428, loss=0.01984311593696475\n", - "Surface training t=31429, loss=0.018334840890020132\n", - "Surface training t=31430, loss=0.015771168284118176\n", - "Surface training t=31431, loss=0.01859208196401596\n", - "Surface training t=31432, loss=0.016121381893754005\n", - "Surface training t=31433, loss=0.01663816813379526\n", - "Surface training t=31434, loss=0.015997829381376505\n", - "Surface training t=31435, loss=0.0174195384606719\n", - "Surface training t=31436, loss=0.019129004329442978\n", - "Surface training t=31437, loss=0.020442655310034752\n", - "Surface training t=31438, loss=0.021663901396095753\n", - "Surface training t=31439, loss=0.023658533580601215\n", - "Surface training t=31440, loss=0.025907916016876698\n", - "Surface training t=31441, loss=0.021450551226735115\n", - "Surface training t=31442, loss=0.02550707757472992\n", - "Surface training t=31443, loss=0.02617199346423149\n", - "Surface training t=31444, loss=0.021295741200447083\n", - "Surface training t=31445, loss=0.029819704592227936\n", - "Surface training t=31446, loss=0.020934070460498333\n", - "Surface training t=31447, loss=0.02449663355946541\n", - "Surface training t=31448, loss=0.03478994220495224\n", - "Surface training t=31449, loss=0.020741766318678856\n", - "Surface training t=31450, loss=0.024327293038368225\n", - "Surface training t=31451, loss=0.024377258494496346\n", - "Surface training t=31452, loss=0.02049280796200037\n", - "Surface training t=31453, loss=0.02520717866718769\n", - "Surface training t=31454, loss=0.03185020387172699\n", - "Surface training t=31455, loss=0.02655896171927452\n", - "Surface training t=31456, loss=0.025310425087809563\n", - "Surface training t=31457, loss=0.02772080898284912\n", - "Surface training t=31458, loss=0.021805405616760254\n", - "Surface training t=31459, loss=0.01989186927676201\n", - "Surface training t=31460, loss=0.02458650153130293\n", - "Surface training t=31461, loss=0.02756587602198124\n", - "Surface training t=31462, loss=0.02573216985911131\n", - "Surface training t=31463, loss=0.03909456543624401\n", - "Surface training t=31464, loss=0.02851983532309532\n", - "Surface training t=31465, loss=0.038733597844839096\n", - "Surface training t=31466, loss=0.026969797909259796\n", - "Surface training t=31467, loss=0.03194193169474602\n", - "Surface training t=31468, loss=0.025412906892597675\n", - "Surface training t=31469, loss=0.02078048139810562\n", - "Surface training t=31470, loss=0.0295419804751873\n", - "Surface training t=31471, loss=0.018217979930341244\n", - "Surface training t=31472, loss=0.019404776394367218\n", - "Surface training t=31473, loss=0.02030657511204481\n", - "Surface training t=31474, loss=0.02429046295583248\n", - "Surface training t=31475, loss=0.018252789042890072\n", - "Surface training t=31476, loss=0.019092770293354988\n", - "Surface training t=31477, loss=0.016896547749638557\n", - "Surface training t=31478, loss=0.020611307583749294\n", - "Surface training t=31479, loss=0.02640987280756235\n", - "Surface training t=31480, loss=0.02030285820364952\n", - "Surface training t=31481, loss=0.017101735342293978\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=31482, loss=0.020773540250957012\n", - "Surface training t=31483, loss=0.01934247836470604\n", - "Surface training t=31484, loss=0.015454719308763742\n", - "Surface training t=31485, loss=0.01703680958598852\n", - "Surface training t=31486, loss=0.015698226168751717\n", - "Surface training t=31487, loss=0.015424089040607214\n", - "Surface training t=31488, loss=0.01883736066520214\n", - "Surface training t=31489, loss=0.0178697993978858\n", - "Surface training t=31490, loss=0.015125604346394539\n", - "Surface training t=31491, loss=0.022175723686814308\n", - "Surface training t=31492, loss=0.023303932510316372\n", - "Surface training t=31493, loss=0.021931841038167477\n", - "Surface training t=31494, loss=0.019261018373072147\n", - "Surface training t=31495, loss=0.01602647313848138\n", - "Surface training t=31496, loss=0.019522791728377342\n", - "Surface training t=31497, loss=0.01901439717039466\n", - "Surface training t=31498, loss=0.01877547614276409\n", - "Surface training t=31499, loss=0.01828390872105956\n", - "Surface training t=31500, loss=0.01840351428836584\n", - "Surface training t=31501, loss=0.016873972490429878\n", - "Surface training t=31502, loss=0.02078296197578311\n", - "Surface training t=31503, loss=0.017225646879523993\n", - "Surface training t=31504, loss=0.028245627880096436\n", - "Surface training t=31505, loss=0.023587489034980536\n", - "Surface training t=31506, loss=0.024284370243549347\n", - "Surface training t=31507, loss=0.019770989194512367\n", - "Surface training t=31508, loss=0.01993311196565628\n", - "Surface training t=31509, loss=0.02267392724752426\n", - "Surface training t=31510, loss=0.02231808751821518\n", - "Surface training t=31511, loss=0.028942031785845757\n", - "Surface training t=31512, loss=0.02571684867143631\n", - "Surface training t=31513, loss=0.03159850835800171\n", - "Surface training t=31514, loss=0.023438196629285812\n", - "Surface training t=31515, loss=0.04143303260207176\n", - "Surface training t=31516, loss=0.022424462717026472\n", - "Surface training t=31517, loss=0.02326946146786213\n", - "Surface training t=31518, loss=0.02817562874406576\n", - "Surface training t=31519, loss=0.023452979512512684\n", - "Surface training t=31520, loss=0.024603654630482197\n", - "Surface training t=31521, loss=0.020470992662012577\n", - "Surface training t=31522, loss=0.0195217477157712\n", - "Surface training t=31523, loss=0.015858526341617107\n", - "Surface training t=31524, loss=0.017085328698158264\n", - "Surface training t=31525, loss=0.020630222745239735\n", - "Surface training t=31526, loss=0.02047007903456688\n", - "Surface training t=31527, loss=0.020494802854955196\n", - "Surface training t=31528, loss=0.022076519206166267\n", - "Surface training t=31529, loss=0.021759798750281334\n", - "Surface training t=31530, loss=0.022743982262909412\n", - "Surface training t=31531, loss=0.021438458003103733\n", - "Surface training t=31532, loss=0.019486350007355213\n", - "Surface training t=31533, loss=0.017589805647730827\n", - "Surface training t=31534, loss=0.017537434585392475\n", - "Surface training t=31535, loss=0.018440968357026577\n", - "Surface training t=31536, loss=0.017599374987185\n", - "Surface training t=31537, loss=0.018488743342459202\n", - "Surface training t=31538, loss=0.018321858253329992\n", - "Surface training t=31539, loss=0.022004205733537674\n", - "Surface training t=31540, loss=0.016606703866273165\n", - "Surface training t=31541, loss=0.022002228535711765\n", - "Surface training t=31542, loss=0.020216984674334526\n", - "Surface training t=31543, loss=0.02022161241620779\n", - "Surface training t=31544, loss=0.019003278575837612\n", - "Surface training t=31545, loss=0.018740321043878794\n", - "Surface training t=31546, loss=0.013548875926062465\n", - "Surface training t=31547, loss=0.015488734934478998\n", - "Surface training t=31548, loss=0.01454890239983797\n", - "Surface training t=31549, loss=0.016977777238935232\n", - "Surface training t=31550, loss=0.014134691096842289\n", - "Surface training t=31551, loss=0.014772471971809864\n", - "Surface training t=31552, loss=0.017374878749251366\n", - "Surface training t=31553, loss=0.015216054394841194\n", - "Surface training t=31554, loss=0.01250514481216669\n", - "Surface training t=31555, loss=0.014916206244379282\n", - "Surface training t=31556, loss=0.019141544587910175\n", - "Surface training t=31557, loss=0.01589520974084735\n", - "Surface training t=31558, loss=0.0214325450360775\n", - "Surface training t=31559, loss=0.02147108130156994\n", - "Surface training t=31560, loss=0.017810934223234653\n", - "Surface training t=31561, loss=0.02038162387907505\n", - "Surface training t=31562, loss=0.01879693940281868\n", - "Surface training t=31563, loss=0.01187288947403431\n", - "Surface training t=31564, loss=0.02097765076905489\n", - "Surface training t=31565, loss=0.0225864015519619\n", - "Surface training t=31566, loss=0.022594663314521313\n", - "Surface training t=31567, loss=0.020515681244432926\n", - "Surface training t=31568, loss=0.016222139354795218\n", - "Surface training t=31569, loss=0.02122800424695015\n", - "Surface training t=31570, loss=0.017772743478417397\n", - "Surface training t=31571, loss=0.02312683779746294\n", - "Surface training t=31572, loss=0.01843951642513275\n", - "Surface training t=31573, loss=0.019143485464155674\n", - "Surface training t=31574, loss=0.0187136922031641\n", - "Surface training t=31575, loss=0.020812413655221462\n", - "Surface training t=31576, loss=0.019233556929975748\n", - "Surface training t=31577, loss=0.02216147817671299\n", - "Surface training t=31578, loss=0.021800247952342033\n", - "Surface training t=31579, loss=0.01733270101249218\n", - "Surface training t=31580, loss=0.016036442015320063\n", - "Surface training t=31581, loss=0.01872256165370345\n", - "Surface training t=31582, loss=0.021886066533625126\n", - "Surface training t=31583, loss=0.018651796504855156\n", - "Surface training t=31584, loss=0.027019179426133633\n", - "Surface training t=31585, loss=0.0237515801563859\n", - "Surface training t=31586, loss=0.02932501584291458\n", - "Surface training t=31587, loss=0.02517731674015522\n", - "Surface training t=31588, loss=0.023690635338425636\n", - "Surface training t=31589, loss=0.0218449579551816\n", - "Surface training t=31590, loss=0.02610913384705782\n", - "Surface training t=31591, loss=0.019483164884150028\n", - "Surface training t=31592, loss=0.031393518671393394\n", - "Surface training t=31593, loss=0.022950095124542713\n", - "Surface training t=31594, loss=0.021509915590286255\n", - "Surface training t=31595, loss=0.020459930412471294\n", - "Surface training t=31596, loss=0.02172758150845766\n", - "Surface training t=31597, loss=0.021586737595498562\n", - "Surface training t=31598, loss=0.01823993120342493\n", - "Surface training t=31599, loss=0.021478568203747272\n", - "Surface training t=31600, loss=0.016524906270205975\n", - "Surface training t=31601, loss=0.021365657448768616\n", - "Surface training t=31602, loss=0.01703025307506323\n", - "Surface training t=31603, loss=0.022389203310012817\n", - "Surface training t=31604, loss=0.01716245850548148\n", - "Surface training t=31605, loss=0.020873033441603184\n", - "Surface training t=31606, loss=0.029212414287030697\n", - "Surface training t=31607, loss=0.02488118316978216\n", - "Surface training t=31608, loss=0.026236635632812977\n", - "Surface training t=31609, loss=0.02839543204754591\n", - "Surface training t=31610, loss=0.018460062332451344\n", - "Surface training t=31611, loss=0.029536203481256962\n", - "Surface training t=31612, loss=0.028469271026551723\n", - "Surface training t=31613, loss=0.025525981560349464\n", - "Surface training t=31614, loss=0.025240185670554638\n", - "Surface training t=31615, loss=0.023143121041357517\n", - "Surface training t=31616, loss=0.02328728511929512\n", - "Surface training t=31617, loss=0.03892316110432148\n", - "Surface training t=31618, loss=0.034234113059937954\n", - "Surface training t=31619, loss=0.037115637212991714\n", - "Surface training t=31620, loss=0.03375200368463993\n", - "Surface training t=31621, loss=0.033706012181937695\n", - "Surface training t=31622, loss=0.020069553516805172\n", - "Surface training t=31623, loss=0.02563518099486828\n", - "Surface training t=31624, loss=0.032949321903288364\n", - "Surface training t=31625, loss=0.026188377290964127\n", - "Surface training t=31626, loss=0.02309979498386383\n", - "Surface training t=31627, loss=0.022624272853136063\n", - "Surface training t=31628, loss=0.021442892961204052\n", - "Surface training t=31629, loss=0.023314359597861767\n", - "Surface training t=31630, loss=0.02614258974790573\n", - "Surface training t=31631, loss=0.021561297588050365\n", - "Surface training t=31632, loss=0.01692625554278493\n", - "Surface training t=31633, loss=0.020083949901163578\n", - "Surface training t=31634, loss=0.019704554229974747\n", - "Surface training t=31635, loss=0.016888338141143322\n", - "Surface training t=31636, loss=0.015338391531258821\n", - "Surface training t=31637, loss=0.0178654370829463\n", - "Surface training t=31638, loss=0.017109195701777935\n", - "Surface training t=31639, loss=0.022074181586503983\n", - "Surface training t=31640, loss=0.02213677018880844\n", - "Surface training t=31641, loss=0.015590520109981298\n", - "Surface training t=31642, loss=0.022724890150129795\n", - "Surface training t=31643, loss=0.02794571779668331\n", - "Surface training t=31644, loss=0.022460637614130974\n", - "Surface training t=31645, loss=0.02304420806467533\n", - "Surface training t=31646, loss=0.026943267323076725\n", - "Surface training t=31647, loss=0.021516839042305946\n", - "Surface training t=31648, loss=0.028263344429433346\n", - "Surface training t=31649, loss=0.031796859577298164\n", - "Surface training t=31650, loss=0.021500715985894203\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=31651, loss=0.0266485707834363\n", - "Surface training t=31652, loss=0.017611790914088488\n", - "Surface training t=31653, loss=0.017531944438815117\n", - "Surface training t=31654, loss=0.01811231765896082\n", - "Surface training t=31655, loss=0.020903379656374454\n", - "Surface training t=31656, loss=0.020687540993094444\n", - "Surface training t=31657, loss=0.0237714946269989\n", - "Surface training t=31658, loss=0.023460181429982185\n", - "Surface training t=31659, loss=0.02771819196641445\n", - "Surface training t=31660, loss=0.020637279376387596\n", - "Surface training t=31661, loss=0.024966386146843433\n", - "Surface training t=31662, loss=0.02890391182154417\n", - "Surface training t=31663, loss=0.020380811765789986\n", - "Surface training t=31664, loss=0.02393200993537903\n", - "Surface training t=31665, loss=0.023006852716207504\n", - "Surface training t=31666, loss=0.032477072440087795\n", - "Surface training t=31667, loss=0.02512252051383257\n", - "Surface training t=31668, loss=0.026010616682469845\n", - "Surface training t=31669, loss=0.026309234090149403\n", - "Surface training t=31670, loss=0.02543742209672928\n", - "Surface training t=31671, loss=0.031589386984705925\n", - "Surface training t=31672, loss=0.02273723389953375\n", - "Surface training t=31673, loss=0.027001271955668926\n", - "Surface training t=31674, loss=0.02981269173324108\n", - "Surface training t=31675, loss=0.025980323553085327\n", - "Surface training t=31676, loss=0.026833162643015385\n", - "Surface training t=31677, loss=0.02878436353057623\n", - "Surface training t=31678, loss=0.029328991658985615\n", - "Surface training t=31679, loss=0.01881712581962347\n", - "Surface training t=31680, loss=0.027451329864561558\n", - "Surface training t=31681, loss=0.024492471478879452\n", - "Surface training t=31682, loss=0.02103446703404188\n", - "Surface training t=31683, loss=0.028981134295463562\n", - "Surface training t=31684, loss=0.029919108375906944\n", - "Surface training t=31685, loss=0.025292891077697277\n", - "Surface training t=31686, loss=0.022634731605648994\n", - "Surface training t=31687, loss=0.02227292489260435\n", - "Surface training t=31688, loss=0.021951855160295963\n", - "Surface training t=31689, loss=0.019914877600967884\n", - "Surface training t=31690, loss=0.0233811903744936\n", - "Surface training t=31691, loss=0.033341056667268276\n", - "Surface training t=31692, loss=0.025933729484677315\n", - "Surface training t=31693, loss=0.022089187987148762\n", - "Surface training t=31694, loss=0.01933689322322607\n", - "Surface training t=31695, loss=0.024419442750513554\n", - "Surface training t=31696, loss=0.02144687483087182\n", - "Surface training t=31697, loss=0.02776057180017233\n", - "Surface training t=31698, loss=0.0328294700011611\n", - "Surface training t=31699, loss=0.024158881045877934\n", - "Surface training t=31700, loss=0.028284059837460518\n", - "Surface training t=31701, loss=0.02283464465290308\n", - "Surface training t=31702, loss=0.02239036839455366\n", - "Surface training t=31703, loss=0.017245493829250336\n", - "Surface training t=31704, loss=0.018350102938711643\n", - "Surface training t=31705, loss=0.014715946279466152\n", - "Surface training t=31706, loss=0.016301451716572046\n", - "Surface training t=31707, loss=0.02783918846398592\n", - "Surface training t=31708, loss=0.022931700572371483\n", - "Surface training t=31709, loss=0.022013562731444836\n", - "Surface training t=31710, loss=0.02250766009092331\n", - "Surface training t=31711, loss=0.01696218829602003\n", - "Surface training t=31712, loss=0.02060869801789522\n", - "Surface training t=31713, loss=0.016850578598678112\n", - "Surface training t=31714, loss=0.02503543347120285\n", - "Surface training t=31715, loss=0.0212246086448431\n", - "Surface training t=31716, loss=0.01800009235739708\n", - "Surface training t=31717, loss=0.01946706371381879\n", - "Surface training t=31718, loss=0.022578797303140163\n", - "Surface training t=31719, loss=0.024907857179641724\n", - "Surface training t=31720, loss=0.026775093749165535\n", - "Surface training t=31721, loss=0.022009387612342834\n", - "Surface training t=31722, loss=0.02264727745205164\n", - "Surface training t=31723, loss=0.01846588496118784\n", - "Surface training t=31724, loss=0.01471830578520894\n", - "Surface training t=31725, loss=0.018612063489854336\n", - "Surface training t=31726, loss=0.015338202472776175\n", - "Surface training t=31727, loss=0.01642636489123106\n", - "Surface training t=31728, loss=0.015326833818107843\n", - "Surface training t=31729, loss=0.01621103100478649\n", - "Surface training t=31730, loss=0.012008856050670147\n", - "Surface training t=31731, loss=0.014462142251431942\n", - "Surface training t=31732, loss=0.01436497364193201\n", - "Surface training t=31733, loss=0.018834597431123257\n", - "Surface training t=31734, loss=0.02098620543256402\n", - "Surface training t=31735, loss=0.018927183467894793\n", - "Surface training t=31736, loss=0.021587176248431206\n", - "Surface training t=31737, loss=0.020583702251315117\n", - "Surface training t=31738, loss=0.022177569568157196\n", - "Surface training t=31739, loss=0.027163781225681305\n", - "Surface training t=31740, loss=0.02981044352054596\n", - "Surface training t=31741, loss=0.023409302346408367\n", - "Surface training t=31742, loss=0.021648630499839783\n", - "Surface training t=31743, loss=0.02226703055202961\n", - "Surface training t=31744, loss=0.020689726807177067\n", - "Surface training t=31745, loss=0.028286920860409737\n", - "Surface training t=31746, loss=0.02022380381822586\n", - "Surface training t=31747, loss=0.020489174406975508\n", - "Surface training t=31748, loss=0.024107552133500576\n", - "Surface training t=31749, loss=0.019826477393507957\n", - "Surface training t=31750, loss=0.017493192106485367\n", - "Surface training t=31751, loss=0.017016681842505932\n", - "Surface training t=31752, loss=0.017279379535466433\n", - "Surface training t=31753, loss=0.021308953873813152\n", - "Surface training t=31754, loss=0.01611924171447754\n", - "Surface training t=31755, loss=0.016762330196797848\n", - "Surface training t=31756, loss=0.017221621237695217\n", - "Surface training t=31757, loss=0.016303192358464003\n", - "Surface training t=31758, loss=0.016058363020420074\n", - "Surface training t=31759, loss=0.023664092645049095\n", - "Surface training t=31760, loss=0.01870372984558344\n", - "Surface training t=31761, loss=0.018116469494998455\n", - "Surface training t=31762, loss=0.017413201741874218\n", - "Surface training t=31763, loss=0.016572038643062115\n", - "Surface training t=31764, loss=0.016720180865377188\n", - "Surface training t=31765, loss=0.01942266244441271\n", - "Surface training t=31766, loss=0.018645627424120903\n", - "Surface training t=31767, loss=0.019204318523406982\n", - "Surface training t=31768, loss=0.021826637908816338\n", - "Surface training t=31769, loss=0.016722965054214\n", - "Surface training t=31770, loss=0.0164561215788126\n", - "Surface training t=31771, loss=0.015414470341056585\n", - "Surface training t=31772, loss=0.014285088516771793\n", - "Surface training t=31773, loss=0.013648869004100561\n", - "Surface training t=31774, loss=0.013413375243544579\n", - "Surface training t=31775, loss=0.01375039154663682\n", - "Surface training t=31776, loss=0.012153130024671555\n", - "Surface training t=31777, loss=0.015250655822455883\n", - "Surface training t=31778, loss=0.018823889084160328\n", - "Surface training t=31779, loss=0.02098730392754078\n", - "Surface training t=31780, loss=0.02383441850543022\n", - "Surface training t=31781, loss=0.02415100857615471\n", - "Surface training t=31782, loss=0.024158373475074768\n", - "Surface training t=31783, loss=0.024735563434660435\n", - "Surface training t=31784, loss=0.025087126530706882\n", - "Surface training t=31785, loss=0.0198984332382679\n", - "Surface training t=31786, loss=0.021569199860095978\n", - "Surface training t=31787, loss=0.019632717594504356\n", - "Surface training t=31788, loss=0.024515245109796524\n", - "Surface training t=31789, loss=0.01768570765852928\n", - "Surface training t=31790, loss=0.022332348860800266\n", - "Surface training t=31791, loss=0.01715732179582119\n", - "Surface training t=31792, loss=0.020023642107844353\n", - "Surface training t=31793, loss=0.02976500429213047\n", - "Surface training t=31794, loss=0.03755866549909115\n", - "Surface training t=31795, loss=0.025379382073879242\n", - "Surface training t=31796, loss=0.025220601819455624\n", - "Surface training t=31797, loss=0.029866471886634827\n", - "Surface training t=31798, loss=0.023152834735810757\n", - "Surface training t=31799, loss=0.0196651853621006\n", - "Surface training t=31800, loss=0.024986895732581615\n", - "Surface training t=31801, loss=0.01959201879799366\n", - "Surface training t=31802, loss=0.018239546567201614\n", - "Surface training t=31803, loss=0.01866935659199953\n", - "Surface training t=31804, loss=0.017856163904070854\n", - "Surface training t=31805, loss=0.01685479376465082\n", - "Surface training t=31806, loss=0.017743587493896484\n", - "Surface training t=31807, loss=0.014143044129014015\n", - "Surface training t=31808, loss=0.018932384438812733\n", - "Surface training t=31809, loss=0.01869814097881317\n", - "Surface training t=31810, loss=0.020578149240463972\n", - "Surface training t=31811, loss=0.02348928712308407\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=31812, loss=0.01807880587875843\n", - "Surface training t=31813, loss=0.019237026572227478\n", - "Surface training t=31814, loss=0.023182361386716366\n", - "Surface training t=31815, loss=0.02120669186115265\n", - "Surface training t=31816, loss=0.017645090352743864\n", - "Surface training t=31817, loss=0.023964369669556618\n", - "Surface training t=31818, loss=0.019304496236145496\n", - "Surface training t=31819, loss=0.020603908225893974\n", - "Surface training t=31820, loss=0.020956620573997498\n", - "Surface training t=31821, loss=0.0149376867339015\n", - "Surface training t=31822, loss=0.019458732567727566\n", - "Surface training t=31823, loss=0.022034022957086563\n", - "Surface training t=31824, loss=0.029232236556708813\n", - "Surface training t=31825, loss=0.03885439224541187\n", - "Surface training t=31826, loss=0.03419564012438059\n", - "Surface training t=31827, loss=0.03261382505297661\n", - "Surface training t=31828, loss=0.03439190238714218\n", - "Surface training t=31829, loss=0.0318774227052927\n", - "Surface training t=31830, loss=0.02771791350096464\n", - "Surface training t=31831, loss=0.033346982672810555\n", - "Surface training t=31832, loss=0.032054075971245766\n", - "Surface training t=31833, loss=0.030646996572613716\n", - "Surface training t=31834, loss=0.02416826318949461\n", - "Surface training t=31835, loss=0.02118463721126318\n", - "Surface training t=31836, loss=0.01851469185203314\n", - "Surface training t=31837, loss=0.02356844488531351\n", - "Surface training t=31838, loss=0.02480935398489237\n", - "Surface training t=31839, loss=0.029345953837037086\n", - "Surface training t=31840, loss=0.03002075105905533\n", - "Surface training t=31841, loss=0.03305264562368393\n", - "Surface training t=31842, loss=0.03197542019188404\n", - "Surface training t=31843, loss=0.041719451546669006\n", - "Surface training t=31844, loss=0.027194502763450146\n", - "Surface training t=31845, loss=0.03506763745099306\n", - "Surface training t=31846, loss=0.035982364788651466\n", - "Surface training t=31847, loss=0.027532867155969143\n", - "Surface training t=31848, loss=0.026990871876478195\n", - "Surface training t=31849, loss=0.03200885560363531\n", - "Surface training t=31850, loss=0.027010077610611916\n", - "Surface training t=31851, loss=0.020258440636098385\n", - "Surface training t=31852, loss=0.01793870236724615\n", - "Surface training t=31853, loss=0.01512637734413147\n", - "Surface training t=31854, loss=0.014465317130088806\n", - "Surface training t=31855, loss=0.027840206399559975\n", - "Surface training t=31856, loss=0.033416641876101494\n", - "Surface training t=31857, loss=0.028578493744134903\n", - "Surface training t=31858, loss=0.026541794650256634\n", - "Surface training t=31859, loss=0.030349519103765488\n", - "Surface training t=31860, loss=0.039059484377503395\n", - "Surface training t=31861, loss=0.029813772067427635\n", - "Surface training t=31862, loss=0.0274886442348361\n", - "Surface training t=31863, loss=0.03328533098101616\n", - "Surface training t=31864, loss=0.027151904068887234\n", - "Surface training t=31865, loss=0.03335694130510092\n", - "Surface training t=31866, loss=0.03924708254635334\n", - "Surface training t=31867, loss=0.03021279815584421\n", - "Surface training t=31868, loss=0.03182341158390045\n", - "Surface training t=31869, loss=0.03144690487533808\n", - "Surface training t=31870, loss=0.021245873533189297\n", - "Surface training t=31871, loss=0.02705573197454214\n", - "Surface training t=31872, loss=0.022461102344095707\n", - "Surface training t=31873, loss=0.02670020330697298\n", - "Surface training t=31874, loss=0.02247734647244215\n", - "Surface training t=31875, loss=0.02039347868412733\n", - "Surface training t=31876, loss=0.02167273685336113\n", - "Surface training t=31877, loss=0.02358545921742916\n", - "Surface training t=31878, loss=0.02487613633275032\n", - "Surface training t=31879, loss=0.023856227286159992\n", - "Surface training t=31880, loss=0.02334771677851677\n", - "Surface training t=31881, loss=0.01822221651673317\n", - "Surface training t=31882, loss=0.017911951057612896\n", - "Surface training t=31883, loss=0.02504351083189249\n", - "Surface training t=31884, loss=0.028783895075321198\n", - "Surface training t=31885, loss=0.028127682395279408\n", - "Surface training t=31886, loss=0.021867679432034492\n", - "Surface training t=31887, loss=0.03344609774649143\n", - "Surface training t=31888, loss=0.030733361840248108\n", - "Surface training t=31889, loss=0.02693911176174879\n", - "Surface training t=31890, loss=0.029915332794189453\n", - "Surface training t=31891, loss=0.04246007651090622\n", - "Surface training t=31892, loss=0.03011052869260311\n", - "Surface training t=31893, loss=0.032845936715602875\n", - "Surface training t=31894, loss=0.03884581848978996\n", - "Surface training t=31895, loss=0.027451064437627792\n", - "Surface training t=31896, loss=0.025101874954998493\n", - "Surface training t=31897, loss=0.03135473467409611\n", - "Surface training t=31898, loss=0.026297442615032196\n", - "Surface training t=31899, loss=0.030808190815150738\n", - "Surface training t=31900, loss=0.024411695078015327\n", - "Surface training t=31901, loss=0.02518498059362173\n", - "Surface training t=31902, loss=0.027201100252568722\n", - "Surface training t=31903, loss=0.02398465108126402\n", - "Surface training t=31904, loss=0.02328253909945488\n", - "Surface training t=31905, loss=0.026685772463679314\n", - "Surface training t=31906, loss=0.019380255602300167\n", - "Surface training t=31907, loss=0.020348680671304464\n", - "Surface training t=31908, loss=0.018777384888380766\n", - "Surface training t=31909, loss=0.02477563265711069\n", - "Surface training t=31910, loss=0.019548303447663784\n", - "Surface training t=31911, loss=0.02284923940896988\n", - "Surface training t=31912, loss=0.020172858610749245\n", - "Surface training t=31913, loss=0.017458231188356876\n", - "Surface training t=31914, loss=0.02155964355915785\n", - "Surface training t=31915, loss=0.01912716683000326\n", - "Surface training t=31916, loss=0.017593235708773136\n", - "Surface training t=31917, loss=0.013001868966966867\n", - "Surface training t=31918, loss=0.014506903011351824\n", - "Surface training t=31919, loss=0.01913201715797186\n", - "Surface training t=31920, loss=0.015314494259655476\n", - "Surface training t=31921, loss=0.013900575693696737\n", - "Surface training t=31922, loss=0.017660008743405342\n", - "Surface training t=31923, loss=0.013043311890214682\n", - "Surface training t=31924, loss=0.013793320395052433\n", - "Surface training t=31925, loss=0.014824292622506618\n", - "Surface training t=31926, loss=0.01483335392549634\n", - "Surface training t=31927, loss=0.02091361116617918\n", - "Surface training t=31928, loss=0.019282586872577667\n", - "Surface training t=31929, loss=0.01849161833524704\n", - "Surface training t=31930, loss=0.017267661169171333\n", - "Surface training t=31931, loss=0.016148705035448074\n", - "Surface training t=31932, loss=0.017386948689818382\n", - "Surface training t=31933, loss=0.01984660793095827\n", - "Surface training t=31934, loss=0.022434919141232967\n", - "Surface training t=31935, loss=0.02625484485179186\n", - "Surface training t=31936, loss=0.022779476828873158\n", - "Surface training t=31937, loss=0.02020704187452793\n", - "Surface training t=31938, loss=0.021434679627418518\n", - "Surface training t=31939, loss=0.0224551884457469\n", - "Surface training t=31940, loss=0.018486064858734608\n", - "Surface training t=31941, loss=0.02419462241232395\n", - "Surface training t=31942, loss=0.02151441853493452\n", - "Surface training t=31943, loss=0.024075797758996487\n", - "Surface training t=31944, loss=0.021379261277616024\n", - "Surface training t=31945, loss=0.019430256448686123\n", - "Surface training t=31946, loss=0.02167033776640892\n", - "Surface training t=31947, loss=0.025315916165709496\n", - "Surface training t=31948, loss=0.01693993527442217\n", - "Surface training t=31949, loss=0.02354517951607704\n", - "Surface training t=31950, loss=0.028678245842456818\n", - "Surface training t=31951, loss=0.022790505550801754\n", - "Surface training t=31952, loss=0.02006593346595764\n", - "Surface training t=31953, loss=0.023453914560377598\n", - "Surface training t=31954, loss=0.022708688862621784\n", - "Surface training t=31955, loss=0.014493880793452263\n", - "Surface training t=31956, loss=0.020037523470818996\n", - "Surface training t=31957, loss=0.019162897020578384\n", - "Surface training t=31958, loss=0.026270883157849312\n", - "Surface training t=31959, loss=0.019845396280288696\n", - "Surface training t=31960, loss=0.019973022863268852\n", - "Surface training t=31961, loss=0.017050166614353657\n", - "Surface training t=31962, loss=0.020500555634498596\n", - "Surface training t=31963, loss=0.012706402689218521\n", - "Surface training t=31964, loss=0.009979096706956625\n", - "Surface training t=31965, loss=0.015956182964146137\n", - "Surface training t=31966, loss=0.01584840239956975\n", - "Surface training t=31967, loss=0.016891983337700367\n", - "Surface training t=31968, loss=0.013613796792924404\n", - "Surface training t=31969, loss=0.01348243746906519\n", - "Surface training t=31970, loss=0.016423813998699188\n", - "Surface training t=31971, loss=0.014876525849103928\n", - "Surface training t=31972, loss=0.019666610285639763\n", - "Surface training t=31973, loss=0.02458795625716448\n", - "Surface training t=31974, loss=0.020709446631371975\n", - "Surface training t=31975, loss=0.027807668782770634\n", - "Surface training t=31976, loss=0.02767165284603834\n", - "Surface training t=31977, loss=0.032400213181972504\n", - "Surface training t=31978, loss=0.024999210610985756\n", - "Surface training t=31979, loss=0.03246892988681793\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=31980, loss=0.025067650713026524\n", - "Surface training t=31981, loss=0.033358894288539886\n", - "Surface training t=31982, loss=0.0228875745087862\n", - "Surface training t=31983, loss=0.023090598173439503\n", - "Surface training t=31984, loss=0.021593699231743813\n", - "Surface training t=31985, loss=0.01837494969367981\n", - "Surface training t=31986, loss=0.022617454640567303\n", - "Surface training t=31987, loss=0.020213716197758913\n", - "Surface training t=31988, loss=0.024015923961997032\n", - "Surface training t=31989, loss=0.02455836534500122\n", - "Surface training t=31990, loss=0.020400156266987324\n", - "Surface training t=31991, loss=0.025608133524656296\n", - "Surface training t=31992, loss=0.031387895345687866\n", - "Surface training t=31993, loss=0.02527075633406639\n", - "Surface training t=31994, loss=0.026943921111524105\n", - "Surface training t=31995, loss=0.02511338982731104\n", - "Surface training t=31996, loss=0.01971265859901905\n", - "Surface training t=31997, loss=0.017695109359920025\n", - "Surface training t=31998, loss=0.022239554673433304\n", - "Surface training t=31999, loss=0.02556985942646861\n", - "Surface training t=32000, loss=0.031327356584370136\n", - "Surface training t=32001, loss=0.03710460104048252\n", - "Surface training t=32002, loss=0.030051100999116898\n", - "Surface training t=32003, loss=0.02924490161240101\n", - "Surface training t=32004, loss=0.03283398877829313\n", - "Surface training t=32005, loss=0.027797465212643147\n", - "Surface training t=32006, loss=0.02537248283624649\n", - "Surface training t=32007, loss=0.025898395106196404\n", - "Surface training t=32008, loss=0.028007177636027336\n", - "Surface training t=32009, loss=0.022541391663253307\n", - "Surface training t=32010, loss=0.020624669268727303\n", - "Surface training t=32011, loss=0.014757452066987753\n", - "Surface training t=32012, loss=0.016444582492113113\n", - "Surface training t=32013, loss=0.016958783380687237\n", - "Surface training t=32014, loss=0.019478037022054195\n", - "Surface training t=32015, loss=0.020751510746777058\n", - "Surface training t=32016, loss=0.017917749471962452\n", - "Surface training t=32017, loss=0.020010861568152905\n", - "Surface training t=32018, loss=0.017271529417485\n", - "Surface training t=32019, loss=0.016943960450589657\n", - "Surface training t=32020, loss=0.019191091880202293\n", - "Surface training t=32021, loss=0.019367026165127754\n", - "Surface training t=32022, loss=0.02018184121698141\n", - "Surface training t=32023, loss=0.025191962718963623\n", - "Surface training t=32024, loss=0.01898617297410965\n", - "Surface training t=32025, loss=0.022951471619307995\n", - "Surface training t=32026, loss=0.031107627786695957\n", - "Surface training t=32027, loss=0.024595793336629868\n", - "Surface training t=32028, loss=0.02731336187571287\n", - "Surface training t=32029, loss=0.02537485957145691\n", - "Surface training t=32030, loss=0.0228828527033329\n", - "Surface training t=32031, loss=0.01869822572916746\n", - "Surface training t=32032, loss=0.026963232085108757\n", - "Surface training t=32033, loss=0.018679513595998287\n", - "Surface training t=32034, loss=0.019453839398920536\n", - "Surface training t=32035, loss=0.01998717710375786\n", - "Surface training t=32036, loss=0.018905099481344223\n", - "Surface training t=32037, loss=0.018274585716426373\n", - "Surface training t=32038, loss=0.017412858549505472\n", - "Surface training t=32039, loss=0.015409841202199459\n", - "Surface training t=32040, loss=0.023442331701517105\n", - "Surface training t=32041, loss=0.01993097923696041\n", - "Surface training t=32042, loss=0.017893034033477306\n", - "Surface training t=32043, loss=0.023647794499993324\n", - "Surface training t=32044, loss=0.023243446834385395\n", - "Surface training t=32045, loss=0.020560607314109802\n", - "Surface training t=32046, loss=0.026200314983725548\n", - "Surface training t=32047, loss=0.027232928201556206\n", - "Surface training t=32048, loss=0.0209196163341403\n", - "Surface training t=32049, loss=0.020208347588777542\n", - "Surface training t=32050, loss=0.023886163718998432\n", - "Surface training t=32051, loss=0.019304286688566208\n", - "Surface training t=32052, loss=0.01857459358870983\n", - "Surface training t=32053, loss=0.01579015702009201\n", - "Surface training t=32054, loss=0.015389932319521904\n", - "Surface training t=32055, loss=0.017194773070514202\n", - "Surface training t=32056, loss=0.01214371295645833\n", - "Surface training t=32057, loss=0.01674136146903038\n", - "Surface training t=32058, loss=0.01295064715668559\n", - "Surface training t=32059, loss=0.016826478764414787\n", - "Surface training t=32060, loss=0.01546224532648921\n", - "Surface training t=32061, loss=0.016594101674854755\n", - "Surface training t=32062, loss=0.018745748326182365\n", - "Surface training t=32063, loss=0.016194021794945\n", - "Surface training t=32064, loss=0.017830348573625088\n", - "Surface training t=32065, loss=0.020084908232092857\n", - "Surface training t=32066, loss=0.02081977389752865\n", - "Surface training t=32067, loss=0.0195384593680501\n", - "Surface training t=32068, loss=0.01915445365011692\n", - "Surface training t=32069, loss=0.01754652103409171\n", - "Surface training t=32070, loss=0.016262613236904144\n", - "Surface training t=32071, loss=0.02280164696276188\n", - "Surface training t=32072, loss=0.01977479550987482\n", - "Surface training t=32073, loss=0.01730621512979269\n", - "Surface training t=32074, loss=0.019011356867849827\n", - "Surface training t=32075, loss=0.02438170462846756\n", - "Surface training t=32076, loss=0.020421992987394333\n", - "Surface training t=32077, loss=0.02171665895730257\n", - "Surface training t=32078, loss=0.027805417776107788\n", - "Surface training t=32079, loss=0.0270155631005764\n", - "Surface training t=32080, loss=0.022201349958777428\n", - "Surface training t=32081, loss=0.027890028432011604\n", - "Surface training t=32082, loss=0.026020245626568794\n", - "Surface training t=32083, loss=0.018727115355432034\n", - "Surface training t=32084, loss=0.02603452280163765\n", - "Surface training t=32085, loss=0.025020591914653778\n", - "Surface training t=32086, loss=0.018090403638780117\n", - "Surface training t=32087, loss=0.019884747453033924\n", - "Surface training t=32088, loss=0.015392806846648455\n", - "Surface training t=32089, loss=0.015257641673088074\n", - "Surface training t=32090, loss=0.018104853574186563\n", - "Surface training t=32091, loss=0.01547855930402875\n", - "Surface training t=32092, loss=0.009995732922106981\n", - "Surface training t=32093, loss=0.013483834452927113\n", - "Surface training t=32094, loss=0.016463851556181908\n", - "Surface training t=32095, loss=0.02574711386114359\n", - "Surface training t=32096, loss=0.026732501573860645\n", - "Surface training t=32097, loss=0.028830664232373238\n", - "Surface training t=32098, loss=0.022380510345101357\n", - "Surface training t=32099, loss=0.023544128984212875\n", - "Surface training t=32100, loss=0.02339724637567997\n", - "Surface training t=32101, loss=0.018174683675169945\n", - "Surface training t=32102, loss=0.019622799940407276\n", - "Surface training t=32103, loss=0.02163707185536623\n", - "Surface training t=32104, loss=0.02458583004772663\n", - "Surface training t=32105, loss=0.022236371412873268\n", - "Surface training t=32106, loss=0.020128188654780388\n", - "Surface training t=32107, loss=0.014677934348583221\n", - "Surface training t=32108, loss=0.015695279464125633\n", - "Surface training t=32109, loss=0.01617457391694188\n", - "Surface training t=32110, loss=0.01537492498755455\n", - "Surface training t=32111, loss=0.014894146006554365\n", - "Surface training t=32112, loss=0.015122407581657171\n", - "Surface training t=32113, loss=0.017239860258996487\n", - "Surface training t=32114, loss=0.019135019276291132\n", - "Surface training t=32115, loss=0.02017619926482439\n", - "Surface training t=32116, loss=0.028135123662650585\n", - "Surface training t=32117, loss=0.032669758424162865\n", - "Surface training t=32118, loss=0.026456782594323158\n", - "Surface training t=32119, loss=0.022024571895599365\n", - "Surface training t=32120, loss=0.022214755415916443\n", - "Surface training t=32121, loss=0.018809927627444267\n", - "Surface training t=32122, loss=0.02263381192460656\n", - "Surface training t=32123, loss=0.022848238237202168\n", - "Surface training t=32124, loss=0.02167176641523838\n", - "Surface training t=32125, loss=0.02346639707684517\n", - "Surface training t=32126, loss=0.025608745403587818\n", - "Surface training t=32127, loss=0.025537991896271706\n", - "Surface training t=32128, loss=0.02360715437680483\n", - "Surface training t=32129, loss=0.01643746066838503\n", - "Surface training t=32130, loss=0.015130533371120691\n", - "Surface training t=32131, loss=0.015239039901643991\n", - "Surface training t=32132, loss=0.014163593761622906\n", - "Surface training t=32133, loss=0.011934702284634113\n", - "Surface training t=32134, loss=0.01837730873376131\n", - "Surface training t=32135, loss=0.021037074737250805\n", - "Surface training t=32136, loss=0.01733420928940177\n", - "Surface training t=32137, loss=0.014492700807750225\n", - "Surface training t=32138, loss=0.01662476733326912\n", - "Surface training t=32139, loss=0.029486405663192272\n", - "Surface training t=32140, loss=0.022235766053199768\n", - "Surface training t=32141, loss=0.02178407646715641\n", - "Surface training t=32142, loss=0.023924673907458782\n", - "Surface training t=32143, loss=0.0150491907261312\n", - "Surface training t=32144, loss=0.022608143277466297\n", - "Surface training t=32145, loss=0.02044328674674034\n", - "Surface training t=32146, loss=0.021295790560543537\n", - "Surface training t=32147, loss=0.02337261103093624\n", - "Surface training t=32148, loss=0.020274107344448566\n", - "Surface training t=32149, loss=0.020557045470923185\n", - "Surface training t=32150, loss=0.015354265458881855\n", - "Surface training t=32151, loss=0.019479372538626194\n", - "Surface training t=32152, loss=0.025275501422584057\n", - "Surface training t=32153, loss=0.02415355760604143\n", - "Surface training t=32154, loss=0.02320605143904686\n", - "Surface training t=32155, loss=0.02814778033643961\n", - "Surface training t=32156, loss=0.017508994787931442\n", - "Surface training t=32157, loss=0.025257865898311138\n", - "Surface training t=32158, loss=0.01971391076222062\n", - "Surface training t=32159, loss=0.018729494884610176\n", - "Surface training t=32160, loss=0.019330947659909725\n", - "Surface training t=32161, loss=0.01776161789894104\n", - "Surface training t=32162, loss=0.014773022383451462\n", - "Surface training t=32163, loss=0.017312703654170036\n", - "Surface training t=32164, loss=0.01585868326947093\n", - "Surface training t=32165, loss=0.019283868372440338\n", - "Surface training t=32166, loss=0.018822981510311365\n", - "Surface training t=32167, loss=0.021810833364725113\n", - "Surface training t=32168, loss=0.024683864787220955\n", - "Surface training t=32169, loss=0.02120845764875412\n", - "Surface training t=32170, loss=0.016112460754811764\n", - "Surface training t=32171, loss=0.020065071992576122\n", - "Surface training t=32172, loss=0.023231370374560356\n", - "Surface training t=32173, loss=0.01862217579036951\n", - "Surface training t=32174, loss=0.016297268215566874\n", - "Surface training t=32175, loss=0.028510307893157005\n", - "Surface training t=32176, loss=0.02119491621851921\n", - "Surface training t=32177, loss=0.02243934292346239\n", - "Surface training t=32178, loss=0.022343595512211323\n", - "Surface training t=32179, loss=0.016772584058344364\n", - "Surface training t=32180, loss=0.017388688400387764\n", - "Surface training t=32181, loss=0.014080271124839783\n", - "Surface training t=32182, loss=0.022102169692516327\n", - "Surface training t=32183, loss=0.023185953497886658\n", - "Surface training t=32184, loss=0.01700607454404235\n", - "Surface training t=32185, loss=0.02127816155552864\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=32186, loss=0.023045984096825123\n", - "Surface training t=32187, loss=0.022331788204610348\n", - "Surface training t=32188, loss=0.018960861954838037\n", - "Surface training t=32189, loss=0.02576448582112789\n", - "Surface training t=32190, loss=0.02071464527398348\n", - "Surface training t=32191, loss=0.017220077104866505\n", - "Surface training t=32192, loss=0.01740844640880823\n", - "Surface training t=32193, loss=0.017378696240484715\n", - "Surface training t=32194, loss=0.021583636291325092\n", - "Surface training t=32195, loss=0.02058332785964012\n", - "Surface training t=32196, loss=0.01751123322173953\n", - "Surface training t=32197, loss=0.0195280434563756\n", - "Surface training t=32198, loss=0.021471203304827213\n", - "Surface training t=32199, loss=0.025829647667706013\n", - "Surface training t=32200, loss=0.020398260094225407\n", - "Surface training t=32201, loss=0.02462160587310791\n", - "Surface training t=32202, loss=0.022505709901452065\n", - "Surface training t=32203, loss=0.01999987754970789\n", - "Surface training t=32204, loss=0.027185522951185703\n", - "Surface training t=32205, loss=0.021745694801211357\n", - "Surface training t=32206, loss=0.023843123577535152\n", - "Surface training t=32207, loss=0.03245346248149872\n", - "Surface training t=32208, loss=0.020891391672194004\n", - "Surface training t=32209, loss=0.02737096231430769\n", - "Surface training t=32210, loss=0.01605390477925539\n", - "Surface training t=32211, loss=0.01758326217532158\n", - "Surface training t=32212, loss=0.015680096112191677\n", - "Surface training t=32213, loss=0.01701846532523632\n", - "Surface training t=32214, loss=0.019428598694503307\n", - "Surface training t=32215, loss=0.014216359239071608\n", - "Surface training t=32216, loss=0.01638654712587595\n", - "Surface training t=32217, loss=0.018709152936935425\n", - "Surface training t=32218, loss=0.016409809701144695\n", - "Surface training t=32219, loss=0.026558964513242245\n", - "Surface training t=32220, loss=0.019908016547560692\n", - "Surface training t=32221, loss=0.025746939703822136\n", - "Surface training t=32222, loss=0.01820227410644293\n", - "Surface training t=32223, loss=0.02037122007459402\n", - "Surface training t=32224, loss=0.0206237668171525\n", - "Surface training t=32225, loss=0.023047535680234432\n", - "Surface training t=32226, loss=0.02609902899712324\n", - "Surface training t=32227, loss=0.016526897437870502\n", - "Surface training t=32228, loss=0.018346368335187435\n", - "Surface training t=32229, loss=0.01775941113010049\n", - "Surface training t=32230, loss=0.01913338713347912\n", - "Surface training t=32231, loss=0.01728896051645279\n", - "Surface training t=32232, loss=0.01822343049570918\n", - "Surface training t=32233, loss=0.020623348653316498\n", - "Surface training t=32234, loss=0.013409883715212345\n", - "Surface training t=32235, loss=0.022714709863066673\n", - "Surface training t=32236, loss=0.021416252478957176\n", - "Surface training t=32237, loss=0.01687949988991022\n", - "Surface training t=32238, loss=0.014614731539040804\n", - "Surface training t=32239, loss=0.013150118757039309\n", - "Surface training t=32240, loss=0.01303816121071577\n", - "Surface training t=32241, loss=0.019120252691209316\n", - "Surface training t=32242, loss=0.017058245837688446\n", - "Surface training t=32243, loss=0.02163013443350792\n", - "Surface training t=32244, loss=0.02567678689956665\n", - "Surface training t=32245, loss=0.022518531419336796\n", - "Surface training t=32246, loss=0.021674432791769505\n", - "Surface training t=32247, loss=0.025510680861771107\n", - "Surface training t=32248, loss=0.02884878497570753\n", - "Surface training t=32249, loss=0.02299946267157793\n", - "Surface training t=32250, loss=0.026584860868752003\n", - "Surface training t=32251, loss=0.027210148982703686\n", - "Surface training t=32252, loss=0.021405960898846388\n", - "Surface training t=32253, loss=0.017191470600664616\n", - "Surface training t=32254, loss=0.019691425375640392\n", - "Surface training t=32255, loss=0.02181934006512165\n", - "Surface training t=32256, loss=0.023372307419776917\n", - "Surface training t=32257, loss=0.01786023285239935\n", - "Surface training t=32258, loss=0.01675008237361908\n", - "Surface training t=32259, loss=0.016234072856605053\n", - "Surface training t=32260, loss=0.02167435083538294\n", - "Surface training t=32261, loss=0.02196356002241373\n", - "Surface training t=32262, loss=0.03675636649131775\n", - "Surface training t=32263, loss=0.02786421962082386\n", - "Surface training t=32264, loss=0.03856154344975948\n", - "Surface training t=32265, loss=0.037813663482666016\n", - "Surface training t=32266, loss=0.030575696378946304\n", - "Surface training t=32267, loss=0.025530831888318062\n", - "Surface training t=32268, loss=0.048249250277876854\n", - "Surface training t=32269, loss=0.028662952594459057\n", - "Surface training t=32270, loss=0.048906974494457245\n", - "Surface training t=32271, loss=0.031779089011251926\n", - "Surface training t=32272, loss=0.025868045166134834\n", - "Surface training t=32273, loss=0.02578673232346773\n", - "Surface training t=32274, loss=0.02831456810235977\n", - "Surface training t=32275, loss=0.022365002427250147\n", - "Surface training t=32276, loss=0.03552353195846081\n", - "Surface training t=32277, loss=0.03372032381594181\n", - "Surface training t=32278, loss=0.024331798776984215\n", - "Surface training t=32279, loss=0.032382259145379066\n", - "Surface training t=32280, loss=0.02619423158466816\n", - "Surface training t=32281, loss=0.02564684860408306\n", - "Surface training t=32282, loss=0.02451013121753931\n", - "Surface training t=32283, loss=0.02114164549857378\n", - "Surface training t=32284, loss=0.023199450224637985\n", - "Surface training t=32285, loss=0.022015581838786602\n", - "Surface training t=32286, loss=0.018954120576381683\n", - "Surface training t=32287, loss=0.01787098776549101\n", - "Surface training t=32288, loss=0.01523550320416689\n", - "Surface training t=32289, loss=0.02690315991640091\n", - "Surface training t=32290, loss=0.01975888293236494\n", - "Surface training t=32291, loss=0.016113745514303446\n", - "Surface training t=32292, loss=0.01930266711860895\n", - "Surface training t=32293, loss=0.019500105641782284\n", - "Surface training t=32294, loss=0.017773982603102922\n", - "Surface training t=32295, loss=0.01848178170621395\n", - "Surface training t=32296, loss=0.01954091154038906\n", - "Surface training t=32297, loss=0.023090858943760395\n", - "Surface training t=32298, loss=0.019233187660574913\n", - "Surface training t=32299, loss=0.019538582302629948\n", - "Surface training t=32300, loss=0.02390069793909788\n", - "Surface training t=32301, loss=0.023673207499086857\n", - "Surface training t=32302, loss=0.02999445702880621\n", - "Surface training t=32303, loss=0.023493272718042135\n", - "Surface training t=32304, loss=0.020254099741578102\n", - "Surface training t=32305, loss=0.028819415718317032\n", - "Surface training t=32306, loss=0.028541692532598972\n", - "Surface training t=32307, loss=0.03316161409020424\n", - "Surface training t=32308, loss=0.03003319539129734\n", - "Surface training t=32309, loss=0.020092779770493507\n", - "Surface training t=32310, loss=0.029273193329572678\n", - "Surface training t=32311, loss=0.03075644001364708\n", - "Surface training t=32312, loss=0.022306298837065697\n", - "Surface training t=32313, loss=0.028980839997529984\n", - "Surface training t=32314, loss=0.02535204589366913\n", - "Surface training t=32315, loss=0.02472736593335867\n", - "Surface training t=32316, loss=0.026909103617072105\n", - "Surface training t=32317, loss=0.02333619724959135\n", - "Surface training t=32318, loss=0.022971656173467636\n", - "Surface training t=32319, loss=0.030626585707068443\n", - "Surface training t=32320, loss=0.026098208501935005\n", - "Surface training t=32321, loss=0.03081005346029997\n", - "Surface training t=32322, loss=0.03302090801298618\n", - "Surface training t=32323, loss=0.030059575103223324\n", - "Surface training t=32324, loss=0.02442788053303957\n", - "Surface training t=32325, loss=0.02645008359104395\n", - "Surface training t=32326, loss=0.0327809127047658\n", - "Surface training t=32327, loss=0.03705624956637621\n", - "Surface training t=32328, loss=0.04220941849052906\n", - "Surface training t=32329, loss=0.040290530771017075\n", - "Surface training t=32330, loss=0.026467307470738888\n", - "Surface training t=32331, loss=0.025935925543308258\n", - "Surface training t=32332, loss=0.02439158782362938\n", - "Surface training t=32333, loss=0.0273444764316082\n", - "Surface training t=32334, loss=0.0260746655985713\n", - "Surface training t=32335, loss=0.023789330385625362\n", - "Surface training t=32336, loss=0.02862053830176592\n", - "Surface training t=32337, loss=0.026677658781409264\n", - "Surface training t=32338, loss=0.02398320473730564\n", - "Surface training t=32339, loss=0.022172892466187477\n", - "Surface training t=32340, loss=0.024334198795259\n", - "Surface training t=32341, loss=0.030229585245251656\n", - "Surface training t=32342, loss=0.031721118837594986\n", - "Surface training t=32343, loss=0.02424686960875988\n", - "Surface training t=32344, loss=0.023614694364368916\n", - "Surface training t=32345, loss=0.020638979971408844\n", - "Surface training t=32346, loss=0.023386464454233646\n", - "Surface training t=32347, loss=0.022312330082058907\n", - "Surface training t=32348, loss=0.017202730756253004\n", - "Surface training t=32349, loss=0.02246508002281189\n", - "Surface training t=32350, loss=0.02044464461505413\n", - "Surface training t=32351, loss=0.015208699740469456\n", - "Surface training t=32352, loss=0.01802571676671505\n", - "Surface training t=32353, loss=0.017073050141334534\n", - "Surface training t=32354, loss=0.016244901809841394\n", - "Surface training t=32355, loss=0.021857883781194687\n", - "Surface training t=32356, loss=0.019822814501821995\n", - "Surface training t=32357, loss=0.023487890139222145\n", - "Surface training t=32358, loss=0.017677444498986006\n", - "Surface training t=32359, loss=0.013494379352778196\n", - "Surface training t=32360, loss=0.02209279965609312\n", - "Surface training t=32361, loss=0.026006357744336128\n", - "Surface training t=32362, loss=0.02250723261386156\n", - "Surface training t=32363, loss=0.01888480968773365\n", - "Surface training t=32364, loss=0.01863790675997734\n", - "Surface training t=32365, loss=0.025178035721182823\n", - "Surface training t=32366, loss=0.02408025972545147\n", - "Surface training t=32367, loss=0.017880600411444902\n", - "Surface training t=32368, loss=0.028011941350996494\n", - "Surface training t=32369, loss=0.021148445084691048\n", - "Surface training t=32370, loss=0.01800904981791973\n", - "Surface training t=32371, loss=0.023865479044616222\n", - "Surface training t=32372, loss=0.01904117316007614\n", - "Surface training t=32373, loss=0.01960413856431842\n", - "Surface training t=32374, loss=0.014329458121210337\n", - "Surface training t=32375, loss=0.013261392246931791\n", - "Surface training t=32376, loss=0.021596801467239857\n", - "Surface training t=32377, loss=0.015971310436725616\n", - "Surface training t=32378, loss=0.014950146898627281\n", - "Surface training t=32379, loss=0.012352143879979849\n", - "Surface training t=32380, loss=0.017749857623130083\n", - "Surface training t=32381, loss=0.01458056177943945\n", - "Surface training t=32382, loss=0.015909319277852774\n", - "Surface training t=32383, loss=0.024109622463583946\n", - "Surface training t=32384, loss=0.021809258498251438\n", - "Surface training t=32385, loss=0.01767433062195778\n", - "Surface training t=32386, loss=0.015309962909668684\n", - "Surface training t=32387, loss=0.012857989873737097\n", - "Surface training t=32388, loss=0.015208656899631023\n", - "Surface training t=32389, loss=0.014734196476638317\n", - "Surface training t=32390, loss=0.012303427327424288\n", - "Surface training t=32391, loss=0.017034471035003662\n", - "Surface training t=32392, loss=0.022791617549955845\n", - "Surface training t=32393, loss=0.02465160470455885\n", - "Surface training t=32394, loss=0.019359944388270378\n", - "Surface training t=32395, loss=0.02329425700008869\n", - "Surface training t=32396, loss=0.020351099781692028\n", - "Surface training t=32397, loss=0.01772024855017662\n", - "Surface training t=32398, loss=0.018278279341757298\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=32399, loss=0.016927525866776705\n", - "Surface training t=32400, loss=0.018549637869000435\n", - "Surface training t=32401, loss=0.02128921914845705\n", - "Surface training t=32402, loss=0.016687991097569466\n", - "Surface training t=32403, loss=0.02089668530970812\n", - "Surface training t=32404, loss=0.024669061414897442\n", - "Surface training t=32405, loss=0.021606692112982273\n", - "Surface training t=32406, loss=0.0191554743796587\n", - "Surface training t=32407, loss=0.0224901270121336\n", - "Surface training t=32408, loss=0.017045811749994755\n", - "Surface training t=32409, loss=0.017641705460846424\n", - "Surface training t=32410, loss=0.01534306164830923\n", - "Surface training t=32411, loss=0.015731381718069315\n", - "Surface training t=32412, loss=0.01589849079027772\n", - "Surface training t=32413, loss=0.013870427384972572\n", - "Surface training t=32414, loss=0.016091421712189913\n", - "Surface training t=32415, loss=0.014291329775005579\n", - "Surface training t=32416, loss=0.012233120389282703\n", - "Surface training t=32417, loss=0.017009103205055\n", - "Surface training t=32418, loss=0.015313081443309784\n", - "Surface training t=32419, loss=0.014235257171094418\n", - "Surface training t=32420, loss=0.015807490330189466\n", - "Surface training t=32421, loss=0.01372389355674386\n", - "Surface training t=32422, loss=0.01481198612600565\n", - "Surface training t=32423, loss=0.02118590474128723\n", - "Surface training t=32424, loss=0.019233491737395525\n", - "Surface training t=32425, loss=0.015770782716572285\n", - "Surface training t=32426, loss=0.022444169968366623\n", - "Surface training t=32427, loss=0.02274128422141075\n", - "Surface training t=32428, loss=0.02059973683208227\n", - "Surface training t=32429, loss=0.02615782804787159\n", - "Surface training t=32430, loss=0.018012825399637222\n", - "Surface training t=32431, loss=0.016777626238763332\n", - "Surface training t=32432, loss=0.02222632337361574\n", - "Surface training t=32433, loss=0.01785496436059475\n", - "Surface training t=32434, loss=0.023212797939777374\n", - "Surface training t=32435, loss=0.02195593435317278\n", - "Surface training t=32436, loss=0.0200482327491045\n", - "Surface training t=32437, loss=0.018484235275536776\n", - "Surface training t=32438, loss=0.011770732700824738\n", - "Surface training t=32439, loss=0.010587343480437994\n", - "Surface training t=32440, loss=0.01002914970740676\n", - "Surface training t=32441, loss=0.014191137626767159\n", - "Surface training t=32442, loss=0.011050566099584103\n", - "Surface training t=32443, loss=0.020018531009554863\n", - "Surface training t=32444, loss=0.016868348699063063\n", - "Surface training t=32445, loss=0.017297659069299698\n", - "Surface training t=32446, loss=0.022061574272811413\n", - "Surface training t=32447, loss=0.01930201333016157\n", - "Surface training t=32448, loss=0.01839373353868723\n", - "Surface training t=32449, loss=0.022880693897604942\n", - "Surface training t=32450, loss=0.019022916443645954\n", - "Surface training t=32451, loss=0.020660043694078922\n", - "Surface training t=32452, loss=0.019267968833446503\n", - "Surface training t=32453, loss=0.02286667888984084\n", - "Surface training t=32454, loss=0.02041078731417656\n", - "Surface training t=32455, loss=0.021022322587668896\n", - "Surface training t=32456, loss=0.021068288013339043\n", - "Surface training t=32457, loss=0.01965176872909069\n", - "Surface training t=32458, loss=0.019443174824118614\n", - "Surface training t=32459, loss=0.019541322253644466\n", - "Surface training t=32460, loss=0.018131832592189312\n", - "Surface training t=32461, loss=0.017748495563864708\n", - "Surface training t=32462, loss=0.021522280760109425\n", - "Surface training t=32463, loss=0.023361527360975742\n", - "Surface training t=32464, loss=0.023907732218503952\n", - "Surface training t=32465, loss=0.020039409399032593\n", - "Surface training t=32466, loss=0.022333348635584116\n", - "Surface training t=32467, loss=0.02935570292174816\n", - "Surface training t=32468, loss=0.019302881322801113\n", - "Surface training t=32469, loss=0.02087399736046791\n", - "Surface training t=32470, loss=0.021685561165213585\n", - "Surface training t=32471, loss=0.02121166419237852\n", - "Surface training t=32472, loss=0.02298156078904867\n", - "Surface training t=32473, loss=0.020377865992486477\n", - "Surface training t=32474, loss=0.01826311368495226\n", - "Surface training t=32475, loss=0.013084628153592348\n", - "Surface training t=32476, loss=0.016806181985884905\n", - "Surface training t=32477, loss=0.01466682180762291\n", - "Surface training t=32478, loss=0.01622240897268057\n", - "Surface training t=32479, loss=0.012688161339610815\n", - "Surface training t=32480, loss=0.016585294622927904\n", - "Surface training t=32481, loss=0.020596562884747982\n", - "Surface training t=32482, loss=0.01754970569163561\n", - "Surface training t=32483, loss=0.018712827004492283\n", - "Surface training t=32484, loss=0.020004747435450554\n", - "Surface training t=32485, loss=0.024308142252266407\n", - "Surface training t=32486, loss=0.024808382615447044\n", - "Surface training t=32487, loss=0.026829006150364876\n", - "Surface training t=32488, loss=0.024053587578237057\n", - "Surface training t=32489, loss=0.036717044189572334\n", - "Surface training t=32490, loss=0.02531797345727682\n", - "Surface training t=32491, loss=0.026620078831911087\n", - "Surface training t=32492, loss=0.018756825476884842\n", - "Surface training t=32493, loss=0.014651639387011528\n", - "Surface training t=32494, loss=0.01731119677424431\n", - "Surface training t=32495, loss=0.018162349238991737\n", - "Surface training t=32496, loss=0.019515677355229855\n", - "Surface training t=32497, loss=0.016344073228538036\n", - "Surface training t=32498, loss=0.018446842674165964\n", - "Surface training t=32499, loss=0.018719611689448357\n", - "Surface training t=32500, loss=0.02789641823619604\n", - "Surface training t=32501, loss=0.02295362576842308\n", - "Surface training t=32502, loss=0.0251583531498909\n", - "Surface training t=32503, loss=0.02143536787480116\n", - "Surface training t=32504, loss=0.019528338685631752\n", - "Surface training t=32505, loss=0.021965700201690197\n", - "Surface training t=32506, loss=0.019817430526018143\n", - "Surface training t=32507, loss=0.021760561503469944\n", - "Surface training t=32508, loss=0.02372610569000244\n", - "Surface training t=32509, loss=0.021467302925884724\n", - "Surface training t=32510, loss=0.019677472300827503\n", - "Surface training t=32511, loss=0.017521914560347795\n", - "Surface training t=32512, loss=0.020115556195378304\n", - "Surface training t=32513, loss=0.019797297194600105\n", - "Surface training t=32514, loss=0.02403914090245962\n", - "Surface training t=32515, loss=0.01871771551668644\n", - "Surface training t=32516, loss=0.022632683627307415\n", - "Surface training t=32517, loss=0.02018290013074875\n", - "Surface training t=32518, loss=0.018852626904845238\n", - "Surface training t=32519, loss=0.016482902225106955\n", - "Surface training t=32520, loss=0.01758509036153555\n", - "Surface training t=32521, loss=0.019264549016952515\n", - "Surface training t=32522, loss=0.012966119218617678\n", - "Surface training t=32523, loss=0.01905785035341978\n", - "Surface training t=32524, loss=0.015727849677205086\n", - "Surface training t=32525, loss=0.021537354215979576\n", - "Surface training t=32526, loss=0.021801834926009178\n", - "Surface training t=32527, loss=0.01971443183720112\n", - "Surface training t=32528, loss=0.031364116817712784\n", - "Surface training t=32529, loss=0.02784135192632675\n", - "Surface training t=32530, loss=0.024825449101626873\n", - "Surface training t=32531, loss=0.022380194626748562\n", - "Surface training t=32532, loss=0.022151030600070953\n", - "Surface training t=32533, loss=0.024462045170366764\n", - "Surface training t=32534, loss=0.020454798825085163\n", - "Surface training t=32535, loss=0.01574198715388775\n", - "Surface training t=32536, loss=0.01931222714483738\n", - "Surface training t=32537, loss=0.01835074182599783\n", - "Surface training t=32538, loss=0.022116427309811115\n", - "Surface training t=32539, loss=0.016732505057007074\n", - "Surface training t=32540, loss=0.01882775966078043\n", - "Surface training t=32541, loss=0.020902770571410656\n", - "Surface training t=32542, loss=0.02173755969852209\n", - "Surface training t=32543, loss=0.017372731119394302\n", - "Surface training t=32544, loss=0.01624395838007331\n", - "Surface training t=32545, loss=0.02121574431657791\n", - "Surface training t=32546, loss=0.03687787801027298\n", - "Surface training t=32547, loss=0.028347380459308624\n", - "Surface training t=32548, loss=0.019764390774071217\n", - "Surface training t=32549, loss=0.02287125028669834\n", - "Surface training t=32550, loss=0.026084898971021175\n", - "Surface training t=32551, loss=0.01998309977352619\n", - "Surface training t=32552, loss=0.021974786184728146\n", - "Surface training t=32553, loss=0.03353959135711193\n", - "Surface training t=32554, loss=0.02088384795933962\n", - "Surface training t=32555, loss=0.02183604333549738\n", - "Surface training t=32556, loss=0.02504565566778183\n", - "Surface training t=32557, loss=0.025274711661040783\n", - "Surface training t=32558, loss=0.019565725699067116\n", - "Surface training t=32559, loss=0.021364539861679077\n", - "Surface training t=32560, loss=0.028858748264610767\n", - "Surface training t=32561, loss=0.025892846286296844\n", - "Surface training t=32562, loss=0.02083089016377926\n", - "Surface training t=32563, loss=0.022447523660957813\n", - "Surface training t=32564, loss=0.02266183029860258\n", - "Surface training t=32565, loss=0.01627439446747303\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=32566, loss=0.01926009263843298\n", - "Surface training t=32567, loss=0.01686867792159319\n", - "Surface training t=32568, loss=0.01636617397889495\n", - "Surface training t=32569, loss=0.019487449899315834\n", - "Surface training t=32570, loss=0.01609215885400772\n", - "Surface training t=32571, loss=0.01699543697759509\n", - "Surface training t=32572, loss=0.015024973079562187\n", - "Surface training t=32573, loss=0.02022834960371256\n", - "Surface training t=32574, loss=0.027171400375664234\n", - "Surface training t=32575, loss=0.029363976791501045\n", - "Surface training t=32576, loss=0.02125030942261219\n", - "Surface training t=32577, loss=0.021673440001904964\n", - "Surface training t=32578, loss=0.017912287265062332\n", - "Surface training t=32579, loss=0.03204888477921486\n", - "Surface training t=32580, loss=0.021646550856530666\n", - "Surface training t=32581, loss=0.02517481241375208\n", - "Surface training t=32582, loss=0.0311855711042881\n", - "Surface training t=32583, loss=0.020427404902875423\n", - "Surface training t=32584, loss=0.02650531381368637\n", - "Surface training t=32585, loss=0.022065259516239166\n", - "Surface training t=32586, loss=0.02198673039674759\n", - "Surface training t=32587, loss=0.0221760431304574\n", - "Surface training t=32588, loss=0.014655082020908594\n", - "Surface training t=32589, loss=0.01404052134603262\n", - "Surface training t=32590, loss=0.01727381069213152\n", - "Surface training t=32591, loss=0.01521500525996089\n", - "Surface training t=32592, loss=0.01700657233595848\n", - "Surface training t=32593, loss=0.01727332640439272\n", - "Surface training t=32594, loss=0.017828427255153656\n", - "Surface training t=32595, loss=0.019830435514450073\n", - "Surface training t=32596, loss=0.0200692443177104\n", - "Surface training t=32597, loss=0.02218629326671362\n", - "Surface training t=32598, loss=0.018924042582511902\n", - "Surface training t=32599, loss=0.017359976656734943\n", - "Surface training t=32600, loss=0.01687179133296013\n", - "Surface training t=32601, loss=0.016765445936471224\n", - "Surface training t=32602, loss=0.017197466921061277\n", - "Surface training t=32603, loss=0.01744602434337139\n", - "Surface training t=32604, loss=0.021449058316648006\n", - "Surface training t=32605, loss=0.01789945736527443\n", - "Surface training t=32606, loss=0.015066618099808693\n", - "Surface training t=32607, loss=0.015357046388089657\n", - "Surface training t=32608, loss=0.011975346598774195\n", - "Surface training t=32609, loss=0.019062149338424206\n", - "Surface training t=32610, loss=0.020428535528481007\n", - "Surface training t=32611, loss=0.016259903088212013\n", - "Surface training t=32612, loss=0.01606424432247877\n", - "Surface training t=32613, loss=0.019834790378808975\n", - "Surface training t=32614, loss=0.01585903251543641\n", - "Surface training t=32615, loss=0.019678454846143723\n", - "Surface training t=32616, loss=0.01785015780478716\n", - "Surface training t=32617, loss=0.0194559246301651\n", - "Surface training t=32618, loss=0.02114919200539589\n", - "Surface training t=32619, loss=0.01991963293403387\n", - "Surface training t=32620, loss=0.01939601730555296\n", - "Surface training t=32621, loss=0.017994089052081108\n", - "Surface training t=32622, loss=0.018253911286592484\n", - "Surface training t=32623, loss=0.015386282466351986\n", - "Surface training t=32624, loss=0.01895052846521139\n", - "Surface training t=32625, loss=0.014783751219511032\n", - "Surface training t=32626, loss=0.016116127837449312\n", - "Surface training t=32627, loss=0.018690672237426043\n", - "Surface training t=32628, loss=0.02176580624654889\n", - "Surface training t=32629, loss=0.018336630892008543\n", - "Surface training t=32630, loss=0.024292555637657642\n", - "Surface training t=32631, loss=0.03526755329221487\n", - "Surface training t=32632, loss=0.02493102476000786\n", - "Surface training t=32633, loss=0.02394018042832613\n", - "Surface training t=32634, loss=0.02253019716590643\n", - "Surface training t=32635, loss=0.01981774065643549\n", - "Surface training t=32636, loss=0.02422290202230215\n", - "Surface training t=32637, loss=0.02620516438037157\n", - "Surface training t=32638, loss=0.020991861820220947\n", - "Surface training t=32639, loss=0.022247308399528265\n", - "Surface training t=32640, loss=0.027272955514490604\n", - "Surface training t=32641, loss=0.017123814672231674\n", - "Surface training t=32642, loss=0.029417306184768677\n", - "Surface training t=32643, loss=0.02740601822733879\n", - "Surface training t=32644, loss=0.018497186712920666\n", - "Surface training t=32645, loss=0.021284975111484528\n", - "Surface training t=32646, loss=0.020198681391775608\n", - "Surface training t=32647, loss=0.021454254165291786\n", - "Surface training t=32648, loss=0.02411322435364127\n", - "Surface training t=32649, loss=0.02415954601019621\n", - "Surface training t=32650, loss=0.031995171681046486\n", - "Surface training t=32651, loss=0.037650651298463345\n", - "Surface training t=32652, loss=0.0235591484233737\n", - "Surface training t=32653, loss=0.02731897309422493\n", - "Surface training t=32654, loss=0.033199492841959\n", - "Surface training t=32655, loss=0.0306604141369462\n", - "Surface training t=32656, loss=0.027220268733799458\n", - "Surface training t=32657, loss=0.0209308797493577\n", - "Surface training t=32658, loss=0.02002089051529765\n", - "Surface training t=32659, loss=0.0220239395275712\n", - "Surface training t=32660, loss=0.022879095748066902\n", - "Surface training t=32661, loss=0.022127380594611168\n", - "Surface training t=32662, loss=0.022633563727140427\n", - "Surface training t=32663, loss=0.019857344217598438\n", - "Surface training t=32664, loss=0.02063494175672531\n", - "Surface training t=32665, loss=0.018166874535381794\n", - "Surface training t=32666, loss=0.020296738483011723\n", - "Surface training t=32667, loss=0.01927434653043747\n", - "Surface training t=32668, loss=0.014870548620820045\n", - "Surface training t=32669, loss=0.01598910754546523\n", - "Surface training t=32670, loss=0.020434238016605377\n", - "Surface training t=32671, loss=0.01527920039370656\n", - "Surface training t=32672, loss=0.014786939602345228\n", - "Surface training t=32673, loss=0.018596449866890907\n", - "Surface training t=32674, loss=0.03269138466566801\n", - "Surface training t=32675, loss=0.025819960050284863\n", - "Surface training t=32676, loss=0.02279931865632534\n", - "Surface training t=32677, loss=0.02497909776866436\n", - "Surface training t=32678, loss=0.02094187680631876\n", - "Surface training t=32679, loss=0.0245633190497756\n", - "Surface training t=32680, loss=0.019916346296668053\n", - "Surface training t=32681, loss=0.019766093231737614\n", - "Surface training t=32682, loss=0.015439332462847233\n", - "Surface training t=32683, loss=0.015800952911376953\n", - "Surface training t=32684, loss=0.01854559686034918\n", - "Surface training t=32685, loss=0.01997549459338188\n", - "Surface training t=32686, loss=0.019625038839876652\n", - "Surface training t=32687, loss=0.016680866479873657\n", - "Surface training t=32688, loss=0.018699503503739834\n", - "Surface training t=32689, loss=0.015484289731830359\n", - "Surface training t=32690, loss=0.018360912334173918\n", - "Surface training t=32691, loss=0.011837402824312449\n", - "Surface training t=32692, loss=0.013931077439337969\n", - "Surface training t=32693, loss=0.015305935870856047\n", - "Surface training t=32694, loss=0.014916946645826101\n", - "Surface training t=32695, loss=0.020204706117510796\n", - "Surface training t=32696, loss=0.019438154064118862\n", - "Surface training t=32697, loss=0.021204554475843906\n", - "Surface training t=32698, loss=0.0204209852963686\n", - "Surface training t=32699, loss=0.016635197214782238\n", - "Surface training t=32700, loss=0.02335718646645546\n", - "Surface training t=32701, loss=0.02094079554080963\n", - "Surface training t=32702, loss=0.019242013804614544\n", - "Surface training t=32703, loss=0.013697800226509571\n", - "Surface training t=32704, loss=0.01491232356056571\n", - "Surface training t=32705, loss=0.01889181323349476\n", - "Surface training t=32706, loss=0.01590202283114195\n", - "Surface training t=32707, loss=0.016027656383812428\n", - "Surface training t=32708, loss=0.016828753519803286\n", - "Surface training t=32709, loss=0.016236462630331516\n", - "Surface training t=32710, loss=0.013881601858884096\n", - "Surface training t=32711, loss=0.01937265694141388\n", - "Surface training t=32712, loss=0.016935831867158413\n", - "Surface training t=32713, loss=0.013632831163704395\n", - "Surface training t=32714, loss=0.016048388555645943\n", - "Surface training t=32715, loss=0.013207674957811832\n", - "Surface training t=32716, loss=0.015436096582561731\n", - "Surface training t=32717, loss=0.014598243404179811\n", - "Surface training t=32718, loss=0.014892938546836376\n", - "Surface training t=32719, loss=0.014970772434026003\n", - "Surface training t=32720, loss=0.017658750526607037\n", - "Surface training t=32721, loss=0.014325880911201239\n", - "Surface training t=32722, loss=0.013190797064453363\n", - "Surface training t=32723, loss=0.0158822201192379\n", - "Surface training t=32724, loss=0.013690924271941185\n", - "Surface training t=32725, loss=0.013233205769211054\n", - "Surface training t=32726, loss=0.01294622989371419\n", - "Surface training t=32727, loss=0.015149144921451807\n", - "Surface training t=32728, loss=0.013040728867053986\n", - "Surface training t=32729, loss=0.021438656374812126\n", - "Surface training t=32730, loss=0.016228361055254936\n", - "Surface training t=32731, loss=0.020711434073746204\n", - "Surface training t=32732, loss=0.020884661935269833\n", - "Surface training t=32733, loss=0.020957290194928646\n", - "Surface training t=32734, loss=0.02266003657132387\n", - "Surface training t=32735, loss=0.03355822339653969\n", - "Surface training t=32736, loss=0.0288256723433733\n", - "Surface training t=32737, loss=0.030607868917286396\n", - "Surface training t=32738, loss=0.029514603316783905\n", - "Surface training t=32739, loss=0.028701824136078358\n", - "Surface training t=32740, loss=0.03006286919116974\n", - "Surface training t=32741, loss=0.019201181828975677\n", - "Surface training t=32742, loss=0.018181881867349148\n", - "Surface training t=32743, loss=0.015134533867239952\n", - "Surface training t=32744, loss=0.022618823684751987\n", - "Surface training t=32745, loss=0.017587565816938877\n", - "Surface training t=32746, loss=0.021803184412419796\n", - "Surface training t=32747, loss=0.026978074572980404\n", - "Surface training t=32748, loss=0.018501194193959236\n", - "Surface training t=32749, loss=0.02288957592099905\n", - "Surface training t=32750, loss=0.03599017299711704\n", - "Surface training t=32751, loss=0.01805374026298523\n", - "Surface training t=32752, loss=0.020510174334049225\n", - "Surface training t=32753, loss=0.0215360214933753\n", - "Surface training t=32754, loss=0.020376057364046574\n", - "Surface training t=32755, loss=0.022314843721687794\n", - "Surface training t=32756, loss=0.02493907045572996\n", - "Surface training t=32757, loss=0.02477350737899542\n", - "Surface training t=32758, loss=0.037188272923231125\n", - "Surface training t=32759, loss=0.029015754349529743\n", - "Surface training t=32760, loss=0.023772825486958027\n", - "Surface training t=32761, loss=0.02271357085555792\n", - "Surface training t=32762, loss=0.023030459880828857\n", - "Surface training t=32763, loss=0.020164051093161106\n", - "Surface training t=32764, loss=0.018425357528030872\n", - "Surface training t=32765, loss=0.018893171101808548\n", - "Surface training t=32766, loss=0.020345399156212807\n", - "Surface training t=32767, loss=0.0191332520917058\n", - "Surface training t=32768, loss=0.021736920811235905\n", - "Surface training t=32769, loss=0.02219028677791357\n", - "Surface training t=32770, loss=0.02006977330893278\n", - "Surface training t=32771, loss=0.02403072128072381\n", - "Surface training t=32772, loss=0.022674499079585075\n", - "Surface training t=32773, loss=0.02023510728031397\n", - "Surface training t=32774, loss=0.021424136124551296\n", - "Surface training t=32775, loss=0.022488804534077644\n", - "Surface training t=32776, loss=0.020967703312635422\n", - "Surface training t=32777, loss=0.025042440742254257\n", - "Surface training t=32778, loss=0.020008156076073647\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=32779, loss=0.02005286980420351\n", - "Surface training t=32780, loss=0.018515589646995068\n", - "Surface training t=32781, loss=0.023312545381486416\n", - "Surface training t=32782, loss=0.022790311835706234\n", - "Surface training t=32783, loss=0.016246437095105648\n", - "Surface training t=32784, loss=0.021102366037666798\n", - "Surface training t=32785, loss=0.013141804374754429\n", - "Surface training t=32786, loss=0.015084845013916492\n", - "Surface training t=32787, loss=0.01090680854395032\n", - "Surface training t=32788, loss=0.015364705584943295\n", - "Surface training t=32789, loss=0.012559607159346342\n", - "Surface training t=32790, loss=0.01859392784535885\n", - "Surface training t=32791, loss=0.01886165887117386\n", - "Surface training t=32792, loss=0.017551546916365623\n", - "Surface training t=32793, loss=0.012795133516192436\n", - "Surface training t=32794, loss=0.020019925199449062\n", - "Surface training t=32795, loss=0.01684232661500573\n", - "Surface training t=32796, loss=0.01623823307454586\n", - "Surface training t=32797, loss=0.015218676999211311\n", - "Surface training t=32798, loss=0.015474861953407526\n", - "Surface training t=32799, loss=0.013319446239620447\n", - "Surface training t=32800, loss=0.013701543677598238\n", - "Surface training t=32801, loss=0.014031900558620691\n", - "Surface training t=32802, loss=0.013777378480881453\n", - "Surface training t=32803, loss=0.01368878548964858\n", - "Surface training t=32804, loss=0.014153629541397095\n", - "Surface training t=32805, loss=0.014531305059790611\n", - "Surface training t=32806, loss=0.01682233391329646\n", - "Surface training t=32807, loss=0.014408629853278399\n", - "Surface training t=32808, loss=0.013259312603622675\n", - "Surface training t=32809, loss=0.016426309943199158\n", - "Surface training t=32810, loss=0.019596122205257416\n", - "Surface training t=32811, loss=0.01643369160592556\n", - "Surface training t=32812, loss=0.016147498972713947\n", - "Surface training t=32813, loss=0.019840294495224953\n", - "Surface training t=32814, loss=0.01544644357636571\n", - "Surface training t=32815, loss=0.015158360823988914\n", - "Surface training t=32816, loss=0.015139584429562092\n", - "Surface training t=32817, loss=0.022032818756997585\n", - "Surface training t=32818, loss=0.015603568870574236\n", - "Surface training t=32819, loss=0.01846904680132866\n", - "Surface training t=32820, loss=0.017433973029255867\n", - "Surface training t=32821, loss=0.018183155916631222\n", - "Surface training t=32822, loss=0.015708522871136665\n", - "Surface training t=32823, loss=0.017974390648305416\n", - "Surface training t=32824, loss=0.019150770269334316\n", - "Surface training t=32825, loss=0.015289624221622944\n", - "Surface training t=32826, loss=0.017968108877539635\n", - "Surface training t=32827, loss=0.011790810152888298\n", - "Surface training t=32828, loss=0.018345551565289497\n", - "Surface training t=32829, loss=0.018524502404034138\n", - "Surface training t=32830, loss=0.017582839354872704\n", - "Surface training t=32831, loss=0.017924832180142403\n", - "Surface training t=32832, loss=0.013541263993829489\n", - "Surface training t=32833, loss=0.017309994902461767\n", - "Surface training t=32834, loss=0.017292417585849762\n", - "Surface training t=32835, loss=0.024556712247431278\n", - "Surface training t=32836, loss=0.01653006626293063\n", - "Surface training t=32837, loss=0.015347465872764587\n", - "Surface training t=32838, loss=0.016829223837703466\n", - "Surface training t=32839, loss=0.018356594257056713\n", - "Surface training t=32840, loss=0.021718810312449932\n", - "Surface training t=32841, loss=0.023009326308965683\n", - "Surface training t=32842, loss=0.02157040499150753\n", - "Surface training t=32843, loss=0.02014291286468506\n", - "Surface training t=32844, loss=0.01776363980025053\n", - "Surface training t=32845, loss=0.018011704087257385\n", - "Surface training t=32846, loss=0.0196042126044631\n", - "Surface training t=32847, loss=0.01900898665189743\n", - "Surface training t=32848, loss=0.0204067500308156\n", - "Surface training t=32849, loss=0.021338234655559063\n", - "Surface training t=32850, loss=0.027445195242762566\n", - "Surface training t=32851, loss=0.022701513022184372\n", - "Surface training t=32852, loss=0.021541522815823555\n", - "Surface training t=32853, loss=0.02870226465165615\n", - "Surface training t=32854, loss=0.025677974335849285\n", - "Surface training t=32855, loss=0.026094917207956314\n", - "Surface training t=32856, loss=0.025728324428200722\n", - "Surface training t=32857, loss=0.018488485366106033\n", - "Surface training t=32858, loss=0.01704376842826605\n", - "Surface training t=32859, loss=0.028300335630774498\n", - "Surface training t=32860, loss=0.014180027414113283\n", - "Surface training t=32861, loss=0.020072096958756447\n", - "Surface training t=32862, loss=0.017977941781282425\n", - "Surface training t=32863, loss=0.020218861289322376\n", - "Surface training t=32864, loss=0.018255590461194515\n", - "Surface training t=32865, loss=0.01843391451984644\n", - "Surface training t=32866, loss=0.015798773616552353\n", - "Surface training t=32867, loss=0.023387434892356396\n", - "Surface training t=32868, loss=0.026469601318240166\n", - "Surface training t=32869, loss=0.030205927789211273\n", - "Surface training t=32870, loss=0.02354816161096096\n", - "Surface training t=32871, loss=0.02529712114483118\n", - "Surface training t=32872, loss=0.027432389557361603\n", - "Surface training t=32873, loss=0.02080217469483614\n", - "Surface training t=32874, loss=0.018650400452315807\n", - "Surface training t=32875, loss=0.020180012099444866\n", - "Surface training t=32876, loss=0.015751118771731853\n", - "Surface training t=32877, loss=0.021486626006662846\n", - "Surface training t=32878, loss=0.01761234737932682\n", - "Surface training t=32879, loss=0.015125802718102932\n", - "Surface training t=32880, loss=0.02355827484279871\n", - "Surface training t=32881, loss=0.02078984212130308\n", - "Surface training t=32882, loss=0.021720866672694683\n", - "Surface training t=32883, loss=0.021220573224127293\n", - "Surface training t=32884, loss=0.01953467819839716\n", - "Surface training t=32885, loss=0.019105969928205013\n", - "Surface training t=32886, loss=0.01920204423367977\n", - "Surface training t=32887, loss=0.02104510646313429\n", - "Surface training t=32888, loss=0.022220169194042683\n", - "Surface training t=32889, loss=0.02069553779438138\n", - "Surface training t=32890, loss=0.02374010719358921\n", - "Surface training t=32891, loss=0.025652894750237465\n", - "Surface training t=32892, loss=0.020057372748851776\n", - "Surface training t=32893, loss=0.01703010220080614\n", - "Surface training t=32894, loss=0.022467021830379963\n", - "Surface training t=32895, loss=0.017412032932043076\n", - "Surface training t=32896, loss=0.022755936719477177\n", - "Surface training t=32897, loss=0.03010251186788082\n", - "Surface training t=32898, loss=0.027951004914939404\n", - "Surface training t=32899, loss=0.03423931077122688\n", - "Surface training t=32900, loss=0.02282391581684351\n", - "Surface training t=32901, loss=0.024971485137939453\n", - "Surface training t=32902, loss=0.023308920674026012\n", - "Surface training t=32903, loss=0.01950515154749155\n", - "Surface training t=32904, loss=0.02118520252406597\n", - "Surface training t=32905, loss=0.018275336362421513\n", - "Surface training t=32906, loss=0.02332463301718235\n", - "Surface training t=32907, loss=0.016873056534677744\n", - "Surface training t=32908, loss=0.01450301380828023\n", - "Surface training t=32909, loss=0.01612609252333641\n", - "Surface training t=32910, loss=0.015989332459867\n", - "Surface training t=32911, loss=0.016577578149735928\n", - "Surface training t=32912, loss=0.020195172168314457\n", - "Surface training t=32913, loss=0.020238989032804966\n", - "Surface training t=32914, loss=0.01943869423121214\n", - "Surface training t=32915, loss=0.01878211833536625\n", - "Surface training t=32916, loss=0.02318960428237915\n", - "Surface training t=32917, loss=0.020635255612432957\n", - "Surface training t=32918, loss=0.013559951446950436\n", - "Surface training t=32919, loss=0.019614944234490395\n", - "Surface training t=32920, loss=0.01588681060820818\n", - "Surface training t=32921, loss=0.014990294352173805\n", - "Surface training t=32922, loss=0.0129085429944098\n", - "Surface training t=32923, loss=0.015298128128051758\n", - "Surface training t=32924, loss=0.016173304058611393\n", - "Surface training t=32925, loss=0.02039544377475977\n", - "Surface training t=32926, loss=0.024211709387600422\n", - "Surface training t=32927, loss=0.035119129344820976\n", - "Surface training t=32928, loss=0.021689416840672493\n", - "Surface training t=32929, loss=0.024159131571650505\n", - "Surface training t=32930, loss=0.01947061438113451\n", - "Surface training t=32931, loss=0.020339482463896275\n", - "Surface training t=32932, loss=0.01804258394986391\n", - "Surface training t=32933, loss=0.023926270194351673\n", - "Surface training t=32934, loss=0.022425814531743526\n", - "Surface training t=32935, loss=0.014381480403244495\n", - "Surface training t=32936, loss=0.017752004321664572\n", - "Surface training t=32937, loss=0.014646390918642282\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=32938, loss=0.01290197717025876\n", - "Surface training t=32939, loss=0.016126959584653378\n", - "Surface training t=32940, loss=0.01777850277721882\n", - "Surface training t=32941, loss=0.019049442373216152\n", - "Surface training t=32942, loss=0.01705993991345167\n", - "Surface training t=32943, loss=0.018974103964865208\n", - "Surface training t=32944, loss=0.02432376053184271\n", - "Surface training t=32945, loss=0.020029413513839245\n", - "Surface training t=32946, loss=0.01705049816519022\n", - "Surface training t=32947, loss=0.015928922221064568\n", - "Surface training t=32948, loss=0.013730295933783054\n", - "Surface training t=32949, loss=0.016438521444797516\n", - "Surface training t=32950, loss=0.014782984275370836\n", - "Surface training t=32951, loss=0.013299878686666489\n", - "Surface training t=32952, loss=0.014745957218110561\n", - "Surface training t=32953, loss=0.012403835542500019\n", - "Surface training t=32954, loss=0.024904842488467693\n", - "Surface training t=32955, loss=0.026816113851964474\n", - "Surface training t=32956, loss=0.02199309691786766\n", - "Surface training t=32957, loss=0.025862730108201504\n", - "Surface training t=32958, loss=0.02103637531399727\n", - "Surface training t=32959, loss=0.025438674725592136\n", - "Surface training t=32960, loss=0.025233393535017967\n", - "Surface training t=32961, loss=0.02301181945949793\n", - "Surface training t=32962, loss=0.023612212389707565\n", - "Surface training t=32963, loss=0.03599230386316776\n", - "Surface training t=32964, loss=0.0329066002741456\n", - "Surface training t=32965, loss=0.032930982299149036\n", - "Surface training t=32966, loss=0.04523215629160404\n", - "Surface training t=32967, loss=0.023759964853525162\n", - "Surface training t=32968, loss=0.027599677443504333\n", - "Surface training t=32969, loss=0.02663690410554409\n", - "Surface training t=32970, loss=0.02125880029052496\n", - "Surface training t=32971, loss=0.023247022181749344\n", - "Surface training t=32972, loss=0.019517093896865845\n", - "Surface training t=32973, loss=0.01910205651074648\n", - "Surface training t=32974, loss=0.023685581050813198\n", - "Surface training t=32975, loss=0.02473632525652647\n", - "Surface training t=32976, loss=0.023700273595750332\n", - "Surface training t=32977, loss=0.024257349781692028\n", - "Surface training t=32978, loss=0.026220455765724182\n", - "Surface training t=32979, loss=0.026979115791618824\n", - "Surface training t=32980, loss=0.031728798523545265\n", - "Surface training t=32981, loss=0.02603348158299923\n", - "Surface training t=32982, loss=0.02536874171346426\n", - "Surface training t=32983, loss=0.02404133975505829\n", - "Surface training t=32984, loss=0.020851424895226955\n", - "Surface training t=32985, loss=0.02575293555855751\n", - "Surface training t=32986, loss=0.03453131578862667\n", - "Surface training t=32987, loss=0.026349158957600594\n", - "Surface training t=32988, loss=0.02889242861419916\n", - "Surface training t=32989, loss=0.03730468824505806\n", - "Surface training t=32990, loss=0.03506948612630367\n", - "Surface training t=32991, loss=0.028432332910597324\n", - "Surface training t=32992, loss=0.03429390862584114\n", - "Surface training t=32993, loss=0.03827916458249092\n", - "Surface training t=32994, loss=0.027629923075437546\n", - "Surface training t=32995, loss=0.029177356511354446\n", - "Surface training t=32996, loss=0.02911987714469433\n", - "Surface training t=32997, loss=0.017361104488372803\n", - "Surface training t=32998, loss=0.018086583353579044\n", - "Surface training t=32999, loss=0.02371976338326931\n", - "Surface training t=33000, loss=0.026080715470016003\n", - "Surface training t=33001, loss=0.033968398347496986\n", - "Surface training t=33002, loss=0.029693779535591602\n", - "Surface training t=33003, loss=0.02940808515995741\n", - "Surface training t=33004, loss=0.030790437012910843\n", - "Surface training t=33005, loss=0.033974066376686096\n", - "Surface training t=33006, loss=0.028589047491550446\n", - "Surface training t=33007, loss=0.028650577180087566\n", - "Surface training t=33008, loss=0.04172770492732525\n", - "Surface training t=33009, loss=0.027946150861680508\n", - "Surface training t=33010, loss=0.027464217506349087\n", - "Surface training t=33011, loss=0.01980271190404892\n", - "Surface training t=33012, loss=0.018982402980327606\n", - "Surface training t=33013, loss=0.02041514217853546\n", - "Surface training t=33014, loss=0.022964179515838623\n", - "Surface training t=33015, loss=0.021853657439351082\n", - "Surface training t=33016, loss=0.020445192698389292\n", - "Surface training t=33017, loss=0.021136442199349403\n", - "Surface training t=33018, loss=0.019356686621904373\n", - "Surface training t=33019, loss=0.018734985031187534\n", - "Surface training t=33020, loss=0.0221097432076931\n", - "Surface training t=33021, loss=0.02008890174329281\n", - "Surface training t=33022, loss=0.018742565996944904\n", - "Surface training t=33023, loss=0.021508892066776752\n", - "Surface training t=33024, loss=0.017852271907031536\n", - "Surface training t=33025, loss=0.03104453720152378\n", - "Surface training t=33026, loss=0.031749785877764225\n", - "Surface training t=33027, loss=0.0214218869805336\n", - "Surface training t=33028, loss=0.021073507145047188\n", - "Surface training t=33029, loss=0.021495572291314602\n", - "Surface training t=33030, loss=0.018226711079478264\n", - "Surface training t=33031, loss=0.018273686058819294\n", - "Surface training t=33032, loss=0.019655068404972553\n", - "Surface training t=33033, loss=0.02119604405015707\n", - "Surface training t=33034, loss=0.01498638279736042\n", - "Surface training t=33035, loss=0.019310126081109047\n", - "Surface training t=33036, loss=0.016258603893220425\n", - "Surface training t=33037, loss=0.02131043467670679\n", - "Surface training t=33038, loss=0.014415573328733444\n", - "Surface training t=33039, loss=0.014799903146922588\n", - "Surface training t=33040, loss=0.01641182415187359\n", - "Surface training t=33041, loss=0.014238890260457993\n", - "Surface training t=33042, loss=0.0199557701125741\n", - "Surface training t=33043, loss=0.01873343251645565\n", - "Surface training t=33044, loss=0.01835598796606064\n", - "Surface training t=33045, loss=0.017007440328598022\n", - "Surface training t=33046, loss=0.022055609151721\n", - "Surface training t=33047, loss=0.017000076826661825\n", - "Surface training t=33048, loss=0.019357162062078714\n", - "Surface training t=33049, loss=0.018158715218305588\n", - "Surface training t=33050, loss=0.023886202834546566\n", - "Surface training t=33051, loss=0.020599269308149815\n", - "Surface training t=33052, loss=0.0170269962400198\n", - "Surface training t=33053, loss=0.018890121020376682\n", - "Surface training t=33054, loss=0.01859051827341318\n", - "Surface training t=33055, loss=0.021776563487946987\n", - "Surface training t=33056, loss=0.021018117666244507\n", - "Surface training t=33057, loss=0.027841471135616302\n", - "Surface training t=33058, loss=0.02819297183305025\n", - "Surface training t=33059, loss=0.021809490397572517\n", - "Surface training t=33060, loss=0.01666687522083521\n", - "Surface training t=33061, loss=0.024708562530577183\n", - "Surface training t=33062, loss=0.020737530197948217\n", - "Surface training t=33063, loss=0.017864480149000883\n", - "Surface training t=33064, loss=0.02658180333673954\n", - "Surface training t=33065, loss=0.021841609850525856\n", - "Surface training t=33066, loss=0.024736884981393814\n", - "Surface training t=33067, loss=0.040504876524209976\n", - "Surface training t=33068, loss=0.0249236561357975\n", - "Surface training t=33069, loss=0.03721056319773197\n", - "Surface training t=33070, loss=0.025929341092705727\n", - "Surface training t=33071, loss=0.037798844277858734\n", - "Surface training t=33072, loss=0.026538193225860596\n", - "Surface training t=33073, loss=0.04463760368525982\n", - "Surface training t=33074, loss=0.027262147516012192\n", - "Surface training t=33075, loss=0.02936583198606968\n", - "Surface training t=33076, loss=0.0397974643856287\n", - "Surface training t=33077, loss=0.027018863707780838\n", - "Surface training t=33078, loss=0.027037888765335083\n", - "Surface training t=33079, loss=0.027424180880188942\n", - "Surface training t=33080, loss=0.027665198780596256\n", - "Surface training t=33081, loss=0.02178390510380268\n", - "Surface training t=33082, loss=0.018715565092861652\n", - "Surface training t=33083, loss=0.026413097977638245\n", - "Surface training t=33084, loss=0.02202023286372423\n", - "Surface training t=33085, loss=0.023420636542141438\n", - "Surface training t=33086, loss=0.02431851252913475\n", - "Surface training t=33087, loss=0.018469913862645626\n", - "Surface training t=33088, loss=0.02068949956446886\n", - "Surface training t=33089, loss=0.018010065890848637\n", - "Surface training t=33090, loss=0.019086091313511133\n", - "Surface training t=33091, loss=0.013817619066685438\n", - "Surface training t=33092, loss=0.015195684507489204\n", - "Surface training t=33093, loss=0.01431306079030037\n", - "Surface training t=33094, loss=0.019635888747870922\n", - "Surface training t=33095, loss=0.02290683425962925\n", - "Surface training t=33096, loss=0.020887925289571285\n", - "Surface training t=33097, loss=0.018175081349909306\n", - "Surface training t=33098, loss=0.025985374115407467\n", - "Surface training t=33099, loss=0.018646291457116604\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=33100, loss=0.018663191236555576\n", - "Surface training t=33101, loss=0.022541466169059277\n", - "Surface training t=33102, loss=0.02081814594566822\n", - "Surface training t=33103, loss=0.021309715695679188\n", - "Surface training t=33104, loss=0.020563552156090736\n", - "Surface training t=33105, loss=0.017453315667808056\n", - "Surface training t=33106, loss=0.02000079955905676\n", - "Surface training t=33107, loss=0.01674237661063671\n", - "Surface training t=33108, loss=0.01715884916484356\n", - "Surface training t=33109, loss=0.012949917931109667\n", - "Surface training t=33110, loss=0.017147202510386705\n", - "Surface training t=33111, loss=0.013149387668818235\n", - "Surface training t=33112, loss=0.01676900777965784\n", - "Surface training t=33113, loss=0.01695389673113823\n", - "Surface training t=33114, loss=0.018938139081001282\n", - "Surface training t=33115, loss=0.01867459900677204\n", - "Surface training t=33116, loss=0.027760298922657967\n", - "Surface training t=33117, loss=0.02961482759565115\n", - "Surface training t=33118, loss=0.029693054035305977\n", - "Surface training t=33119, loss=0.024826234206557274\n", - "Surface training t=33120, loss=0.022426416166126728\n", - "Surface training t=33121, loss=0.030242029577493668\n", - "Surface training t=33122, loss=0.03765479288995266\n", - "Surface training t=33123, loss=0.02449618186801672\n", - "Surface training t=33124, loss=0.02409871108829975\n", - "Surface training t=33125, loss=0.02536135818809271\n", - "Surface training t=33126, loss=0.037422480061650276\n", - "Surface training t=33127, loss=0.03246868774294853\n", - "Surface training t=33128, loss=0.026364021934568882\n", - "Surface training t=33129, loss=0.02786312811076641\n", - "Surface training t=33130, loss=0.03333117440342903\n", - "Surface training t=33131, loss=0.027055750600993633\n", - "Surface training t=33132, loss=0.022554270923137665\n", - "Surface training t=33133, loss=0.028892124071717262\n", - "Surface training t=33134, loss=0.019023770466446877\n", - "Surface training t=33135, loss=0.038551392033696175\n", - "Surface training t=33136, loss=0.031100855208933353\n", - "Surface training t=33137, loss=0.030333999544382095\n", - "Surface training t=33138, loss=0.03628198988735676\n", - "Surface training t=33139, loss=0.03070523403584957\n", - "Surface training t=33140, loss=0.0291574839502573\n", - "Surface training t=33141, loss=0.028185158036649227\n", - "Surface training t=33142, loss=0.02199279423803091\n", - "Surface training t=33143, loss=0.018745571840554476\n", - "Surface training t=33144, loss=0.020224949344992638\n", - "Surface training t=33145, loss=0.017809417098760605\n", - "Surface training t=33146, loss=0.021327592432498932\n", - "Surface training t=33147, loss=0.028510550037026405\n", - "Surface training t=33148, loss=0.02621632721275091\n", - "Surface training t=33149, loss=0.023765522055327892\n", - "Surface training t=33150, loss=0.023839113768190145\n", - "Surface training t=33151, loss=0.04374055191874504\n", - "Surface training t=33152, loss=0.031627426855266094\n", - "Surface training t=33153, loss=0.028626474551856518\n", - "Surface training t=33154, loss=0.030570238828659058\n", - "Surface training t=33155, loss=0.022679291665554047\n", - "Surface training t=33156, loss=0.0166400708258152\n", - "Surface training t=33157, loss=0.017496160697191954\n", - "Surface training t=33158, loss=0.02553630992770195\n", - "Surface training t=33159, loss=0.021455987356603146\n", - "Surface training t=33160, loss=0.02033207193017006\n", - "Surface training t=33161, loss=0.02615693025290966\n", - "Surface training t=33162, loss=0.019692382775247097\n", - "Surface training t=33163, loss=0.026710767298936844\n", - "Surface training t=33164, loss=0.02472232747823\n", - "Surface training t=33165, loss=0.024904241785407066\n", - "Surface training t=33166, loss=0.024562419392168522\n", - "Surface training t=33167, loss=0.0216107415035367\n", - "Surface training t=33168, loss=0.027650625444948673\n", - "Surface training t=33169, loss=0.02006638702005148\n", - "Surface training t=33170, loss=0.024150820448994637\n", - "Surface training t=33171, loss=0.028263256885111332\n", - "Surface training t=33172, loss=0.02227986603975296\n", - "Surface training t=33173, loss=0.0226952712982893\n", - "Surface training t=33174, loss=0.020374782383441925\n", - "Surface training t=33175, loss=0.019555270671844482\n", - "Surface training t=33176, loss=0.021382524631917477\n", - "Surface training t=33177, loss=0.014248973689973354\n", - "Surface training t=33178, loss=0.02903041895478964\n", - "Surface training t=33179, loss=0.01754055079072714\n", - "Surface training t=33180, loss=0.01671734731644392\n", - "Surface training t=33181, loss=0.01636949647217989\n", - "Surface training t=33182, loss=0.015602560713887215\n", - "Surface training t=33183, loss=0.013648394029587507\n", - "Surface training t=33184, loss=0.014996212907135487\n", - "Surface training t=33185, loss=0.019042435102164745\n", - "Surface training t=33186, loss=0.02711651474237442\n", - "Surface training t=33187, loss=0.017513977829366922\n", - "Surface training t=33188, loss=0.031074869446456432\n", - "Surface training t=33189, loss=0.021722341887652874\n", - "Surface training t=33190, loss=0.023210860788822174\n", - "Surface training t=33191, loss=0.019363616593182087\n", - "Surface training t=33192, loss=0.020927581004798412\n", - "Surface training t=33193, loss=0.022828223183751106\n", - "Surface training t=33194, loss=0.023769158869981766\n", - "Surface training t=33195, loss=0.029168817214667797\n", - "Surface training t=33196, loss=0.022809199057519436\n", - "Surface training t=33197, loss=0.021747520193457603\n", - "Surface training t=33198, loss=0.02577841281890869\n", - "Surface training t=33199, loss=0.02399030327796936\n", - "Surface training t=33200, loss=0.015692666172981262\n", - "Surface training t=33201, loss=0.025110759772360325\n", - "Surface training t=33202, loss=0.01777139399200678\n", - "Surface training t=33203, loss=0.021558518521487713\n", - "Surface training t=33204, loss=0.022480535320937634\n", - "Surface training t=33205, loss=0.018553157802671194\n", - "Surface training t=33206, loss=0.021775823086500168\n", - "Surface training t=33207, loss=0.01881977915763855\n", - "Surface training t=33208, loss=0.02350273635238409\n", - "Surface training t=33209, loss=0.027670403942465782\n", - "Surface training t=33210, loss=0.023582132533192635\n", - "Surface training t=33211, loss=0.025594988837838173\n", - "Surface training t=33212, loss=0.022353891283273697\n", - "Surface training t=33213, loss=0.014533607754856348\n", - "Surface training t=33214, loss=0.015196156688034534\n", - "Surface training t=33215, loss=0.013744547963142395\n", - "Surface training t=33216, loss=0.012025842908769846\n", - "Surface training t=33217, loss=0.014303081668913364\n", - "Surface training t=33218, loss=0.016450033523142338\n", - "Surface training t=33219, loss=0.016843893565237522\n", - "Surface training t=33220, loss=0.019895865581929684\n", - "Surface training t=33221, loss=0.01693400228396058\n", - "Surface training t=33222, loss=0.020236180163919926\n", - "Surface training t=33223, loss=0.022149828262627125\n", - "Surface training t=33224, loss=0.023040343075990677\n", - "Surface training t=33225, loss=0.022032827138900757\n", - "Surface training t=33226, loss=0.019284342881292105\n", - "Surface training t=33227, loss=0.017392731737345457\n", - "Surface training t=33228, loss=0.02577149774879217\n", - "Surface training t=33229, loss=0.017498656176030636\n", - "Surface training t=33230, loss=0.016632897313684225\n", - "Surface training t=33231, loss=0.020326973870396614\n", - "Surface training t=33232, loss=0.01931905746459961\n", - "Surface training t=33233, loss=0.019870120100677013\n", - "Surface training t=33234, loss=0.018753918819129467\n", - "Surface training t=33235, loss=0.024907837621867657\n", - "Surface training t=33236, loss=0.02407128270715475\n", - "Surface training t=33237, loss=0.020844684913754463\n", - "Surface training t=33238, loss=0.029978198930621147\n", - "Surface training t=33239, loss=0.024521522223949432\n", - "Surface training t=33240, loss=0.023182393983006477\n", - "Surface training t=33241, loss=0.026155010797083378\n", - "Surface training t=33242, loss=0.0173633499071002\n", - "Surface training t=33243, loss=0.016775258351117373\n", - "Surface training t=33244, loss=0.020977354142814875\n", - "Surface training t=33245, loss=0.01764017529785633\n", - "Surface training t=33246, loss=0.016494459472596645\n", - "Surface training t=33247, loss=0.021034237928688526\n", - "Surface training t=33248, loss=0.016759385354816914\n", - "Surface training t=33249, loss=0.019017926417291164\n", - "Surface training t=33250, loss=0.014062830246984959\n", - "Surface training t=33251, loss=0.012339469976723194\n", - "Surface training t=33252, loss=0.011867284774780273\n", - "Surface training t=33253, loss=0.012162288185209036\n", - "Surface training t=33254, loss=0.012892950791865587\n", - "Surface training t=33255, loss=0.01233866112306714\n", - "Surface training t=33256, loss=0.01704293303191662\n", - "Surface training t=33257, loss=0.02110538398846984\n", - "Surface training t=33258, loss=0.03779635764658451\n", - "Surface training t=33259, loss=0.02531556924805045\n", - "Surface training t=33260, loss=0.031043410301208496\n", - "Surface training t=33261, loss=0.032045735977590084\n", - "Surface training t=33262, loss=0.037659959867596626\n", - "Surface training t=33263, loss=0.026834369637072086\n", - "Surface training t=33264, loss=0.022768489085137844\n", - "Surface training t=33265, loss=0.019309365190565586\n", - "Surface training t=33266, loss=0.02048428636044264\n", - "Surface training t=33267, loss=0.016563445329666138\n", - "Surface training t=33268, loss=0.015257460996508598\n", - "Surface training t=33269, loss=0.01369239017367363\n", - "Surface training t=33270, loss=0.013238741084933281\n", - "Surface training t=33271, loss=0.010744384489953518\n", - "Surface training t=33272, loss=0.012319622095674276\n", - "Surface training t=33273, loss=0.014059233944863081\n", - "Surface training t=33274, loss=0.018247469328343868\n", - "Surface training t=33275, loss=0.01691820379346609\n", - "Surface training t=33276, loss=0.01833920180797577\n", - "Surface training t=33277, loss=0.018637409433722496\n", - "Surface training t=33278, loss=0.018202586099505424\n", - "Surface training t=33279, loss=0.013956017326563597\n", - "Surface training t=33280, loss=0.020000622607767582\n", - "Surface training t=33281, loss=0.015699410811066628\n", - "Surface training t=33282, loss=0.017984462901949883\n", - "Surface training t=33283, loss=0.015176326036453247\n", - "Surface training t=33284, loss=0.017859716899693012\n", - "Surface training t=33285, loss=0.01600809395313263\n", - "Surface training t=33286, loss=0.012069802731275558\n", - "Surface training t=33287, loss=0.019679876044392586\n", - "Surface training t=33288, loss=0.017893700394779444\n", - "Surface training t=33289, loss=0.0157048930414021\n", - "Surface training t=33290, loss=0.016534077003598213\n", - "Surface training t=33291, loss=0.01655855868011713\n", - "Surface training t=33292, loss=0.01808973215520382\n", - "Surface training t=33293, loss=0.013477022759616375\n", - "Surface training t=33294, loss=0.015891637187451124\n", - "Surface training t=33295, loss=0.01724455412477255\n", - "Surface training t=33296, loss=0.016909840516746044\n", - "Surface training t=33297, loss=0.023392523638904095\n", - "Surface training t=33298, loss=0.021325914189219475\n", - "Surface training t=33299, loss=0.024809534661471844\n", - "Surface training t=33300, loss=0.027924852445721626\n", - "Surface training t=33301, loss=0.016222224570810795\n", - "Surface training t=33302, loss=0.01620754785835743\n", - "Surface training t=33303, loss=0.01880729477852583\n", - "Surface training t=33304, loss=0.01793274749070406\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=33305, loss=0.019654029980301857\n", - "Surface training t=33306, loss=0.022394304629415274\n", - "Surface training t=33307, loss=0.027414722368121147\n", - "Surface training t=33308, loss=0.027997948229312897\n", - "Surface training t=33309, loss=0.04248836450278759\n", - "Surface training t=33310, loss=0.030530275776982307\n", - "Surface training t=33311, loss=0.02299628686159849\n", - "Surface training t=33312, loss=0.032207886688411236\n", - "Surface training t=33313, loss=0.03278076834976673\n", - "Surface training t=33314, loss=0.028141967952251434\n", - "Surface training t=33315, loss=0.027867102064192295\n", - "Surface training t=33316, loss=0.03365509584546089\n", - "Surface training t=33317, loss=0.036979444324970245\n", - "Surface training t=33318, loss=0.030827634036540985\n", - "Surface training t=33319, loss=0.03488121647387743\n", - "Surface training t=33320, loss=0.031218646094202995\n", - "Surface training t=33321, loss=0.02688182145357132\n", - "Surface training t=33322, loss=0.026368768885731697\n", - "Surface training t=33323, loss=0.024712628684937954\n", - "Surface training t=33324, loss=0.017806705087423325\n", - "Surface training t=33325, loss=0.022527440451085567\n", - "Surface training t=33326, loss=0.020852893590927124\n", - "Surface training t=33327, loss=0.02096790401265025\n", - "Surface training t=33328, loss=0.02202643221244216\n", - "Surface training t=33329, loss=0.021851937286555767\n", - "Surface training t=33330, loss=0.01506034191697836\n", - "Surface training t=33331, loss=0.01340013463050127\n", - "Surface training t=33332, loss=0.016250040847808123\n", - "Surface training t=33333, loss=0.01832801941782236\n", - "Surface training t=33334, loss=0.0193193219602108\n", - "Surface training t=33335, loss=0.015624880325049162\n", - "Surface training t=33336, loss=0.01687409169971943\n", - "Surface training t=33337, loss=0.01872679591178894\n", - "Surface training t=33338, loss=0.019642755389213562\n", - "Surface training t=33339, loss=0.019882201217114925\n", - "Surface training t=33340, loss=0.020270521752536297\n", - "Surface training t=33341, loss=0.02187598729506135\n", - "Surface training t=33342, loss=0.023636437952518463\n", - "Surface training t=33343, loss=0.030961986631155014\n", - "Surface training t=33344, loss=0.02092032227665186\n", - "Surface training t=33345, loss=0.01915975846350193\n", - "Surface training t=33346, loss=0.029351843520998955\n", - "Surface training t=33347, loss=0.021700113080441952\n", - "Surface training t=33348, loss=0.022935903165489435\n", - "Surface training t=33349, loss=0.03902001492679119\n", - "Surface training t=33350, loss=0.026570425368845463\n", - "Surface training t=33351, loss=0.03432512283325195\n", - "Surface training t=33352, loss=0.027573765255510807\n", - "Surface training t=33353, loss=0.03174274228513241\n", - "Surface training t=33354, loss=0.02597783412784338\n", - "Surface training t=33355, loss=0.024303605780005455\n", - "Surface training t=33356, loss=0.029454481787979603\n", - "Surface training t=33357, loss=0.02373784501105547\n", - "Surface training t=33358, loss=0.02454322762787342\n", - "Surface training t=33359, loss=0.022403921000659466\n", - "Surface training t=33360, loss=0.014879852999001741\n", - "Surface training t=33361, loss=0.022006302140653133\n", - "Surface training t=33362, loss=0.018344824202358723\n", - "Surface training t=33363, loss=0.017082177102565765\n", - "Surface training t=33364, loss=0.01945915911346674\n", - "Surface training t=33365, loss=0.016355618834495544\n", - "Surface training t=33366, loss=0.019136658869683743\n", - "Surface training t=33367, loss=0.018225783482193947\n", - "Surface training t=33368, loss=0.023810644634068012\n", - "Surface training t=33369, loss=0.013943767175078392\n", - "Surface training t=33370, loss=0.015935472212731838\n", - "Surface training t=33371, loss=0.013430989347398281\n", - "Surface training t=33372, loss=0.013793220277875662\n", - "Surface training t=33373, loss=0.013613395392894745\n", - "Surface training t=33374, loss=0.015602677129209042\n", - "Surface training t=33375, loss=0.014600866939872503\n", - "Surface training t=33376, loss=0.015323888510465622\n", - "Surface training t=33377, loss=0.017212919890880585\n", - "Surface training t=33378, loss=0.01579040288925171\n", - "Surface training t=33379, loss=0.01928406860679388\n", - "Surface training t=33380, loss=0.015545057132840157\n", - "Surface training t=33381, loss=0.019791637547314167\n", - "Surface training t=33382, loss=0.02159357350319624\n", - "Surface training t=33383, loss=0.017669569235295057\n", - "Surface training t=33384, loss=0.016663160175085068\n", - "Surface training t=33385, loss=0.017729448154568672\n", - "Surface training t=33386, loss=0.019511114805936813\n", - "Surface training t=33387, loss=0.02081699762493372\n", - "Surface training t=33388, loss=0.01696034101769328\n", - "Surface training t=33389, loss=0.018994729034602642\n", - "Surface training t=33390, loss=0.014865138567984104\n", - "Surface training t=33391, loss=0.01563844084739685\n", - "Surface training t=33392, loss=0.019197986461222172\n", - "Surface training t=33393, loss=0.02089601755142212\n", - "Surface training t=33394, loss=0.015769769437611103\n", - "Surface training t=33395, loss=0.016616615001112223\n", - "Surface training t=33396, loss=0.014699906576424837\n", - "Surface training t=33397, loss=0.014931289479136467\n", - "Surface training t=33398, loss=0.01606095489114523\n", - "Surface training t=33399, loss=0.018379163462668657\n", - "Surface training t=33400, loss=0.016850837506353855\n", - "Surface training t=33401, loss=0.01803898811340332\n", - "Surface training t=33402, loss=0.014362819027155638\n", - "Surface training t=33403, loss=0.015117723494768143\n", - "Surface training t=33404, loss=0.02115374244749546\n", - "Surface training t=33405, loss=0.01963143702596426\n", - "Surface training t=33406, loss=0.019778812304139137\n", - "Surface training t=33407, loss=0.02639195416122675\n", - "Surface training t=33408, loss=0.03268163278698921\n", - "Surface training t=33409, loss=0.025603728368878365\n", - "Surface training t=33410, loss=0.03633539378643036\n", - "Surface training t=33411, loss=0.035910047590732574\n", - "Surface training t=33412, loss=0.02230376284569502\n", - "Surface training t=33413, loss=0.022747116163372993\n", - "Surface training t=33414, loss=0.021410485729575157\n", - "Surface training t=33415, loss=0.021244478411972523\n", - "Surface training t=33416, loss=0.023831401951611042\n", - "Surface training t=33417, loss=0.017571045085787773\n", - "Surface training t=33418, loss=0.020108959171921015\n", - "Surface training t=33419, loss=0.02952869702130556\n", - "Surface training t=33420, loss=0.024142018519341946\n", - "Surface training t=33421, loss=0.022092747036367655\n", - "Surface training t=33422, loss=0.01995608303695917\n", - "Surface training t=33423, loss=0.025024783797562122\n", - "Surface training t=33424, loss=0.024670480750501156\n", - "Surface training t=33425, loss=0.021484900265932083\n", - "Surface training t=33426, loss=0.023819196969270706\n", - "Surface training t=33427, loss=0.0224422300234437\n", - "Surface training t=33428, loss=0.020945731550455093\n", - "Surface training t=33429, loss=0.024035818874835968\n", - "Surface training t=33430, loss=0.01737777702510357\n", - "Surface training t=33431, loss=0.018736968748271465\n", - "Surface training t=33432, loss=0.01867916714400053\n", - "Surface training t=33433, loss=0.027074862271547318\n", - "Surface training t=33434, loss=0.019463343545794487\n", - "Surface training t=33435, loss=0.022577226161956787\n", - "Surface training t=33436, loss=0.02359954919666052\n", - "Surface training t=33437, loss=0.019742218777537346\n", - "Surface training t=33438, loss=0.027341232635080814\n", - "Surface training t=33439, loss=0.01910924818366766\n", - "Surface training t=33440, loss=0.019903534092009068\n", - "Surface training t=33441, loss=0.020655427128076553\n", - "Surface training t=33442, loss=0.01843022182583809\n", - "Surface training t=33443, loss=0.0201606173068285\n", - "Surface training t=33444, loss=0.02174239791929722\n", - "Surface training t=33445, loss=0.019243664108216763\n", - "Surface training t=33446, loss=0.020045205019414425\n", - "Surface training t=33447, loss=0.017505422234535217\n", - "Surface training t=33448, loss=0.01866993587464094\n", - "Surface training t=33449, loss=0.01410703081637621\n", - "Surface training t=33450, loss=0.01594300242140889\n", - "Surface training t=33451, loss=0.01573343388736248\n", - "Surface training t=33452, loss=0.016060238238424063\n", - "Surface training t=33453, loss=0.01588422991335392\n", - "Surface training t=33454, loss=0.014151147101074457\n", - "Surface training t=33455, loss=0.01859048893675208\n", - "Surface training t=33456, loss=0.01216938765719533\n", - "Surface training t=33457, loss=0.018785400316119194\n", - "Surface training t=33458, loss=0.016827309504151344\n", - "Surface training t=33459, loss=0.020646178163588047\n", - "Surface training t=33460, loss=0.020358015783131123\n", - "Surface training t=33461, loss=0.01753230206668377\n", - "Surface training t=33462, loss=0.02499349322170019\n", - "Surface training t=33463, loss=0.018510494381189346\n", - "Surface training t=33464, loss=0.015691678039729595\n", - "Surface training t=33465, loss=0.01677736919373274\n", - "Surface training t=33466, loss=0.017138656228780746\n", - "Surface training t=33467, loss=0.01899526361376047\n", - "Surface training t=33468, loss=0.01671765884384513\n", - "Surface training t=33469, loss=0.014939462766051292\n", - "Surface training t=33470, loss=0.015691839158535004\n", - "Surface training t=33471, loss=0.013462862465530634\n", - "Surface training t=33472, loss=0.015729985665529966\n", - "Surface training t=33473, loss=0.012477961368858814\n", - "Surface training t=33474, loss=0.012245941441506147\n", - "Surface training t=33475, loss=0.015489879064261913\n", - "Surface training t=33476, loss=0.019823451526463032\n", - "Surface training t=33477, loss=0.019444129895418882\n", - "Surface training t=33478, loss=0.014788246247917414\n", - "Surface training t=33479, loss=0.017681067809462547\n", - "Surface training t=33480, loss=0.02077318634837866\n", - "Surface training t=33481, loss=0.016298183239996433\n", - "Surface training t=33482, loss=0.0174621376208961\n", - "Surface training t=33483, loss=0.014662547037005424\n", - "Surface training t=33484, loss=0.017524597235023975\n", - "Surface training t=33485, loss=0.017750510945916176\n", - "Surface training t=33486, loss=0.01889851875603199\n", - "Surface training t=33487, loss=0.01785392127931118\n", - "Surface training t=33488, loss=0.01726281549781561\n", - "Surface training t=33489, loss=0.013527484610676765\n", - "Surface training t=33490, loss=0.019892691634595394\n", - "Surface training t=33491, loss=0.017282200045883656\n", - "Surface training t=33492, loss=0.02103670872747898\n", - "Surface training t=33493, loss=0.015775049105286598\n", - "Surface training t=33494, loss=0.016168592032045126\n", - "Surface training t=33495, loss=0.013227174989879131\n", - "Surface training t=33496, loss=0.01657973136752844\n", - "Surface training t=33497, loss=0.017795459367334843\n", - "Surface training t=33498, loss=0.018408951349556446\n", - "Surface training t=33499, loss=0.01679317746311426\n", - "Surface training t=33500, loss=0.01563043799251318\n", - "Surface training t=33501, loss=0.020949273370206356\n", - "Surface training t=33502, loss=0.015558045357465744\n", - "Surface training t=33503, loss=0.015388261061161757\n", - "Surface training t=33504, loss=0.01837698183953762\n", - "Surface training t=33505, loss=0.020190782845020294\n", - "Surface training t=33506, loss=0.024065164849162102\n", - "Surface training t=33507, loss=0.026036408729851246\n", - "Surface training t=33508, loss=0.0212433198466897\n", - "Surface training t=33509, loss=0.022776884958148003\n", - "Surface training t=33510, loss=0.021899080835282803\n", - "Surface training t=33511, loss=0.022495200857520103\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=33512, loss=0.02472543716430664\n", - "Surface training t=33513, loss=0.02333847526460886\n", - "Surface training t=33514, loss=0.02220938727259636\n", - "Surface training t=33515, loss=0.02302120253443718\n", - "Surface training t=33516, loss=0.027150838635861874\n", - "Surface training t=33517, loss=0.02901867963373661\n", - "Surface training t=33518, loss=0.022964011877775192\n", - "Surface training t=33519, loss=0.01643393188714981\n", - "Surface training t=33520, loss=0.017773855477571487\n", - "Surface training t=33521, loss=0.020327914506196976\n", - "Surface training t=33522, loss=0.015128653030842543\n", - "Surface training t=33523, loss=0.030352283269166946\n", - "Surface training t=33524, loss=0.02367630321532488\n", - "Surface training t=33525, loss=0.022231847047805786\n", - "Surface training t=33526, loss=0.017808577977120876\n", - "Surface training t=33527, loss=0.02481174934655428\n", - "Surface training t=33528, loss=0.021489719860255718\n", - "Surface training t=33529, loss=0.018476814031600952\n", - "Surface training t=33530, loss=0.026056968607008457\n", - "Surface training t=33531, loss=0.02061107289046049\n", - "Surface training t=33532, loss=0.026152504608035088\n", - "Surface training t=33533, loss=0.023375103250145912\n", - "Surface training t=33534, loss=0.025974511168897152\n", - "Surface training t=33535, loss=0.023769780062139034\n", - "Surface training t=33536, loss=0.022232369519770145\n", - "Surface training t=33537, loss=0.0208826819434762\n", - "Surface training t=33538, loss=0.01964341662824154\n", - "Surface training t=33539, loss=0.02195211872458458\n", - "Surface training t=33540, loss=0.021665502339601517\n", - "Surface training t=33541, loss=0.02300321962684393\n", - "Surface training t=33542, loss=0.016915253829210997\n", - "Surface training t=33543, loss=0.020618046633899212\n", - "Surface training t=33544, loss=0.022132715210318565\n", - "Surface training t=33545, loss=0.016593400854617357\n", - "Surface training t=33546, loss=0.023533287458121777\n", - "Surface training t=33547, loss=0.024561353027820587\n", - "Surface training t=33548, loss=0.026568072848021984\n", - "Surface training t=33549, loss=0.022264020517468452\n", - "Surface training t=33550, loss=0.02050386182963848\n", - "Surface training t=33551, loss=0.02440367266535759\n", - "Surface training t=33552, loss=0.02146059460937977\n", - "Surface training t=33553, loss=0.01503372797742486\n", - "Surface training t=33554, loss=0.02200896106660366\n", - "Surface training t=33555, loss=0.025032655335962772\n", - "Surface training t=33556, loss=0.020543815568089485\n", - "Surface training t=33557, loss=0.024465246126055717\n", - "Surface training t=33558, loss=0.02230995148420334\n", - "Surface training t=33559, loss=0.018745900131762028\n", - "Surface training t=33560, loss=0.018491188995540142\n", - "Surface training t=33561, loss=0.022256758995354176\n", - "Surface training t=33562, loss=0.017067939043045044\n", - "Surface training t=33563, loss=0.017240645363926888\n", - "Surface training t=33564, loss=0.015372362919151783\n", - "Surface training t=33565, loss=0.017826570197939873\n", - "Surface training t=33566, loss=0.016029586549848318\n", - "Surface training t=33567, loss=0.019511211663484573\n", - "Surface training t=33568, loss=0.01691809669137001\n", - "Surface training t=33569, loss=0.020640505477786064\n", - "Surface training t=33570, loss=0.020355941727757454\n", - "Surface training t=33571, loss=0.02341928333044052\n", - "Surface training t=33572, loss=0.023462802171707153\n", - "Surface training t=33573, loss=0.022580732591450214\n", - "Surface training t=33574, loss=0.023485593497753143\n", - "Surface training t=33575, loss=0.02315775863826275\n", - "Surface training t=33576, loss=0.02558052632957697\n", - "Surface training t=33577, loss=0.02985704131424427\n", - "Surface training t=33578, loss=0.029349863529205322\n", - "Surface training t=33579, loss=0.02820601873099804\n", - "Surface training t=33580, loss=0.02220605267211795\n", - "Surface training t=33581, loss=0.01874749455600977\n", - "Surface training t=33582, loss=0.021882777102291584\n", - "Surface training t=33583, loss=0.02260914444923401\n", - "Surface training t=33584, loss=0.03078649565577507\n", - "Surface training t=33585, loss=0.034963203594088554\n", - "Surface training t=33586, loss=0.03289512265473604\n", - "Surface training t=33587, loss=0.030460676178336143\n", - "Surface training t=33588, loss=0.04168976843357086\n", - "Surface training t=33589, loss=0.028576667420566082\n", - "Surface training t=33590, loss=0.031398250721395016\n", - "Surface training t=33591, loss=0.02750989329069853\n", - "Surface training t=33592, loss=0.028532547876238823\n", - "Surface training t=33593, loss=0.028057977557182312\n", - "Surface training t=33594, loss=0.02243850566446781\n", - "Surface training t=33595, loss=0.02802858967334032\n", - "Surface training t=33596, loss=0.025055008940398693\n", - "Surface training t=33597, loss=0.01899910718202591\n", - "Surface training t=33598, loss=0.017039808444678783\n", - "Surface training t=33599, loss=0.02298235148191452\n", - "Surface training t=33600, loss=0.020199733786284924\n", - "Surface training t=33601, loss=0.01890137605369091\n", - "Surface training t=33602, loss=0.017900302074849606\n", - "Surface training t=33603, loss=0.019868900999426842\n", - "Surface training t=33604, loss=0.019865553826093674\n", - "Surface training t=33605, loss=0.018630788661539555\n", - "Surface training t=33606, loss=0.01862445566803217\n", - "Surface training t=33607, loss=0.013472025282680988\n", - "Surface training t=33608, loss=0.016311755403876305\n", - "Surface training t=33609, loss=0.013086590450257063\n", - "Surface training t=33610, loss=0.018585247918963432\n", - "Surface training t=33611, loss=0.012995489407330751\n", - "Surface training t=33612, loss=0.01621480006724596\n", - "Surface training t=33613, loss=0.015187184792011976\n", - "Surface training t=33614, loss=0.013168246950954199\n", - "Surface training t=33615, loss=0.015681983437389135\n", - "Surface training t=33616, loss=0.01829115767031908\n", - "Surface training t=33617, loss=0.020742039196193218\n", - "Surface training t=33618, loss=0.01793621899560094\n", - "Surface training t=33619, loss=0.014978352934122086\n", - "Surface training t=33620, loss=0.014707009308040142\n", - "Surface training t=33621, loss=0.014669615775346756\n", - "Surface training t=33622, loss=0.018822651356458664\n", - "Surface training t=33623, loss=0.019863881170749664\n", - "Surface training t=33624, loss=0.01833872962743044\n", - "Surface training t=33625, loss=0.02104614395648241\n", - "Surface training t=33626, loss=0.015162514988332987\n", - "Surface training t=33627, loss=0.014263244811445475\n", - "Surface training t=33628, loss=0.015383835881948471\n", - "Surface training t=33629, loss=0.021389644593000412\n", - "Surface training t=33630, loss=0.021053611300885677\n", - "Surface training t=33631, loss=0.021863173693418503\n", - "Surface training t=33632, loss=0.014757076278328896\n", - "Surface training t=33633, loss=0.01874316670000553\n", - "Surface training t=33634, loss=0.016925452277064323\n", - "Surface training t=33635, loss=0.016588258557021618\n", - "Surface training t=33636, loss=0.015958426520228386\n", - "Surface training t=33637, loss=0.01144347433000803\n", - "Surface training t=33638, loss=0.015301431529223919\n", - "Surface training t=33639, loss=0.0169024970382452\n", - "Surface training t=33640, loss=0.028487269766628742\n", - "Surface training t=33641, loss=0.022198754362761974\n", - "Surface training t=33642, loss=0.020724477246403694\n", - "Surface training t=33643, loss=0.021431440487504005\n", - "Surface training t=33644, loss=0.018583512865006924\n", - "Surface training t=33645, loss=0.01568632200360298\n", - "Surface training t=33646, loss=0.019637273624539375\n", - "Surface training t=33647, loss=0.022755103185772896\n", - "Surface training t=33648, loss=0.019892282783985138\n", - "Surface training t=33649, loss=0.019108968321233988\n", - "Surface training t=33650, loss=0.021777884103357792\n", - "Surface training t=33651, loss=0.02292633429169655\n", - "Surface training t=33652, loss=0.021261822432279587\n", - "Surface training t=33653, loss=0.024344262667000294\n", - "Surface training t=33654, loss=0.018995496444404125\n", - "Surface training t=33655, loss=0.017454579006880522\n", - "Surface training t=33656, loss=0.022791183553636074\n", - "Surface training t=33657, loss=0.021308427676558495\n", - "Surface training t=33658, loss=0.021171100437641144\n", - "Surface training t=33659, loss=0.020681865513324738\n", - "Surface training t=33660, loss=0.014687629882246256\n", - "Surface training t=33661, loss=0.017340533435344696\n", - "Surface training t=33662, loss=0.015395582653582096\n", - "Surface training t=33663, loss=0.015941903926432133\n", - "Surface training t=33664, loss=0.015591452363878489\n", - "Surface training t=33665, loss=0.015865579713135958\n", - "Surface training t=33666, loss=0.0141586409881711\n", - "Surface training t=33667, loss=0.013378440868109465\n", - "Surface training t=33668, loss=0.016525051556527615\n", - "Surface training t=33669, loss=0.016989526338875294\n", - "Surface training t=33670, loss=0.01623118156567216\n", - "Surface training t=33671, loss=0.019135344307869673\n", - "Surface training t=33672, loss=0.019147463142871857\n", - "Surface training t=33673, loss=0.019933540374040604\n", - "Surface training t=33674, loss=0.02196918986737728\n", - "Surface training t=33675, loss=0.02093452587723732\n", - "Surface training t=33676, loss=0.0191757557913661\n", - "Surface training t=33677, loss=0.016533592715859413\n", - "Surface training t=33678, loss=0.01710367575287819\n", - "Surface training t=33679, loss=0.01619343552738428\n", - "Surface training t=33680, loss=0.020065651275217533\n", - "Surface training t=33681, loss=0.01634122058749199\n", - "Surface training t=33682, loss=0.02185138501226902\n", - "Surface training t=33683, loss=0.019379764795303345\n", - "Surface training t=33684, loss=0.018689045682549477\n", - "Surface training t=33685, loss=0.017597077414393425\n", - "Surface training t=33686, loss=0.018307351507246494\n", - "Surface training t=33687, loss=0.017476443201303482\n", - "Surface training t=33688, loss=0.01688039069995284\n", - "Surface training t=33689, loss=0.01627338072285056\n", - "Surface training t=33690, loss=0.015899081714451313\n", - "Surface training t=33691, loss=0.014712198171764612\n", - "Surface training t=33692, loss=0.016699479892849922\n", - "Surface training t=33693, loss=0.014718869235366583\n", - "Surface training t=33694, loss=0.017883055843412876\n", - "Surface training t=33695, loss=0.013743238523602486\n", - "Surface training t=33696, loss=0.012928293086588383\n", - "Surface training t=33697, loss=0.019563181325793266\n", - "Surface training t=33698, loss=0.028577986173331738\n", - "Surface training t=33699, loss=0.022641570307314396\n", - "Surface training t=33700, loss=0.022160095162689686\n", - "Surface training t=33701, loss=0.018915350548923016\n", - "Surface training t=33702, loss=0.02346455119550228\n", - "Surface training t=33703, loss=0.01743116369470954\n", - "Surface training t=33704, loss=0.021071933209896088\n", - "Surface training t=33705, loss=0.03593256138265133\n", - "Surface training t=33706, loss=0.02166316658258438\n", - "Surface training t=33707, loss=0.025544450618326664\n", - "Surface training t=33708, loss=0.021788734011352062\n", - "Surface training t=33709, loss=0.022280822042375803\n", - "Surface training t=33710, loss=0.0215455605648458\n", - "Surface training t=33711, loss=0.033974927850067616\n", - "Surface training t=33712, loss=0.021719452925026417\n", - "Surface training t=33713, loss=0.023016364313662052\n", - "Surface training t=33714, loss=0.026202148757874966\n", - "Surface training t=33715, loss=0.024465481750667095\n", - "Surface training t=33716, loss=0.023940250277519226\n", - "Surface training t=33717, loss=0.02036196179687977\n", - "Surface training t=33718, loss=0.015398729126900434\n", - "Surface training t=33719, loss=0.014385655988007784\n", - "Surface training t=33720, loss=0.01804307848215103\n", - "Surface training t=33721, loss=0.015548900235444307\n", - "Surface training t=33722, loss=0.021772028878331184\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=33723, loss=0.027281480841338634\n", - "Surface training t=33724, loss=0.019305242225527763\n", - "Surface training t=33725, loss=0.0192714910954237\n", - "Surface training t=33726, loss=0.019660172052681446\n", - "Surface training t=33727, loss=0.02255992591381073\n", - "Surface training t=33728, loss=0.02094045653939247\n", - "Surface training t=33729, loss=0.023638557642698288\n", - "Surface training t=33730, loss=0.021218023262917995\n", - "Surface training t=33731, loss=0.01762948650866747\n", - "Surface training t=33732, loss=0.01973615027964115\n", - "Surface training t=33733, loss=0.016444501467049122\n", - "Surface training t=33734, loss=0.01833073701709509\n", - "Surface training t=33735, loss=0.01771390065550804\n", - "Surface training t=33736, loss=0.023680337704718113\n", - "Surface training t=33737, loss=0.020179640501737595\n", - "Surface training t=33738, loss=0.015571830794215202\n", - "Surface training t=33739, loss=0.014078206848353148\n", - "Surface training t=33740, loss=0.012154394295066595\n", - "Surface training t=33741, loss=0.011049816384911537\n", - "Surface training t=33742, loss=0.017833810299634933\n", - "Surface training t=33743, loss=0.012398631311953068\n", - "Surface training t=33744, loss=0.01448741415515542\n", - "Surface training t=33745, loss=0.01493513397872448\n", - "Surface training t=33746, loss=0.01790173351764679\n", - "Surface training t=33747, loss=0.012356085702776909\n", - "Surface training t=33748, loss=0.022203121334314346\n", - "Surface training t=33749, loss=0.016222922131419182\n", - "Surface training t=33750, loss=0.026665550656616688\n", - "Surface training t=33751, loss=0.021885008551180363\n", - "Surface training t=33752, loss=0.01722982246428728\n", - "Surface training t=33753, loss=0.020530092529952526\n", - "Surface training t=33754, loss=0.032646819949150085\n", - "Surface training t=33755, loss=0.026041449047625065\n", - "Surface training t=33756, loss=0.026750660501420498\n", - "Surface training t=33757, loss=0.020780005492269993\n", - "Surface training t=33758, loss=0.025206785649061203\n", - "Surface training t=33759, loss=0.024928918108344078\n", - "Surface training t=33760, loss=0.022024639882147312\n", - "Surface training t=33761, loss=0.020179052837193012\n", - "Surface training t=33762, loss=0.018939275294542313\n", - "Surface training t=33763, loss=0.022513370029628277\n", - "Surface training t=33764, loss=0.02347113098949194\n", - "Surface training t=33765, loss=0.024223029613494873\n", - "Surface training t=33766, loss=0.026006699539721012\n", - "Surface training t=33767, loss=0.020880154334008694\n", - "Surface training t=33768, loss=0.024516788311302662\n", - "Surface training t=33769, loss=0.0290997507981956\n", - "Surface training t=33770, loss=0.04228440672159195\n", - "Surface training t=33771, loss=0.03493486903607845\n", - "Surface training t=33772, loss=0.03828258439898491\n", - "Surface training t=33773, loss=0.025216447189450264\n", - "Surface training t=33774, loss=0.034072283655405045\n", - "Surface training t=33775, loss=0.027518936432898045\n", - "Surface training t=33776, loss=0.05117533355951309\n", - "Surface training t=33777, loss=0.02753487415611744\n", - "Surface training t=33778, loss=0.045360738411545753\n", - "Surface training t=33779, loss=0.03184041008353233\n", - "Surface training t=33780, loss=0.04049849137663841\n", - "Surface training t=33781, loss=0.03146104793995619\n", - "Surface training t=33782, loss=0.03985575772821903\n", - "Surface training t=33783, loss=0.02441577333956957\n", - "Surface training t=33784, loss=0.025810109451413155\n", - "Surface training t=33785, loss=0.037348467856645584\n", - "Surface training t=33786, loss=0.037032959051430225\n", - "Surface training t=33787, loss=0.03719217889010906\n", - "Surface training t=33788, loss=0.038728371262550354\n", - "Surface training t=33789, loss=0.03512363135814667\n", - "Surface training t=33790, loss=0.02483054529875517\n", - "Surface training t=33791, loss=0.03274299576878548\n", - "Surface training t=33792, loss=0.030205057933926582\n", - "Surface training t=33793, loss=0.02475210838019848\n", - "Surface training t=33794, loss=0.02303749043494463\n", - "Surface training t=33795, loss=0.020825975108891726\n", - "Surface training t=33796, loss=0.019531571306288242\n", - "Surface training t=33797, loss=0.019229838624596596\n", - "Surface training t=33798, loss=0.015851494390517473\n", - "Surface training t=33799, loss=0.0173803330399096\n", - "Surface training t=33800, loss=0.016921683680266142\n", - "Surface training t=33801, loss=0.014483008533716202\n", - "Surface training t=33802, loss=0.01387192727997899\n", - "Surface training t=33803, loss=0.014677670784294605\n", - "Surface training t=33804, loss=0.013642291072756052\n", - "Surface training t=33805, loss=0.013563556596636772\n", - "Surface training t=33806, loss=0.015037906356155872\n", - "Surface training t=33807, loss=0.014071444515138865\n", - "Surface training t=33808, loss=0.01684750523418188\n", - "Surface training t=33809, loss=0.018002250231802464\n", - "Surface training t=33810, loss=0.015251229517161846\n", - "Surface training t=33811, loss=0.01951136253774166\n", - "Surface training t=33812, loss=0.022093535400927067\n", - "Surface training t=33813, loss=0.022792239673435688\n", - "Surface training t=33814, loss=0.02282619383186102\n", - "Surface training t=33815, loss=0.023367066867649555\n", - "Surface training t=33816, loss=0.027256385423243046\n", - "Surface training t=33817, loss=0.02677702158689499\n", - "Surface training t=33818, loss=0.0156288156285882\n", - "Surface training t=33819, loss=0.01398157561197877\n", - "Surface training t=33820, loss=0.01829170435667038\n", - "Surface training t=33821, loss=0.016151635441929102\n", - "Surface training t=33822, loss=0.019608041271567345\n", - "Surface training t=33823, loss=0.020228521898388863\n", - "Surface training t=33824, loss=0.015000661835074425\n", - "Surface training t=33825, loss=0.015101377386599779\n", - "Surface training t=33826, loss=0.013201527297496796\n", - "Surface training t=33827, loss=0.016426921356469393\n", - "Surface training t=33828, loss=0.017017492093145847\n", - "Surface training t=33829, loss=0.016133676283061504\n", - "Surface training t=33830, loss=0.01347911637276411\n", - "Surface training t=33831, loss=0.017362154088914394\n", - "Surface training t=33832, loss=0.022942964918911457\n", - "Surface training t=33833, loss=0.019082977436482906\n", - "Surface training t=33834, loss=0.022568058222532272\n", - "Surface training t=33835, loss=0.023341641761362553\n", - "Surface training t=33836, loss=0.029202907346189022\n", - "Surface training t=33837, loss=0.023986726999282837\n", - "Surface training t=33838, loss=0.021979878656566143\n", - "Surface training t=33839, loss=0.021487019024789333\n", - "Surface training t=33840, loss=0.02338003134354949\n", - "Surface training t=33841, loss=0.024057677946984768\n", - "Surface training t=33842, loss=0.021155223716050386\n", - "Surface training t=33843, loss=0.018459773622453213\n", - "Surface training t=33844, loss=0.020916045643389225\n", - "Surface training t=33845, loss=0.019572071731090546\n", - "Surface training t=33846, loss=0.0170839661732316\n", - "Surface training t=33847, loss=0.0148237906396389\n", - "Surface training t=33848, loss=0.015887211076915264\n", - "Surface training t=33849, loss=0.014221817720681429\n", - "Surface training t=33850, loss=0.019113317131996155\n", - "Surface training t=33851, loss=0.01610091933980584\n", - "Surface training t=33852, loss=0.016968553885817528\n", - "Surface training t=33853, loss=0.021021436899900436\n", - "Surface training t=33854, loss=0.01719280332326889\n", - "Surface training t=33855, loss=0.016884435899555683\n", - "Surface training t=33856, loss=0.017751798033714294\n", - "Surface training t=33857, loss=0.025638815015554428\n", - "Surface training t=33858, loss=0.03051650896668434\n", - "Surface training t=33859, loss=0.02753121592104435\n", - "Surface training t=33860, loss=0.022177789360284805\n", - "Surface training t=33861, loss=0.019323029555380344\n", - "Surface training t=33862, loss=0.014123786240816116\n", - "Surface training t=33863, loss=0.014875768683850765\n", - "Surface training t=33864, loss=0.021801949478685856\n", - "Surface training t=33865, loss=0.033926110714673996\n", - "Surface training t=33866, loss=0.02869432605803013\n", - "Surface training t=33867, loss=0.024846111424267292\n", - "Surface training t=33868, loss=0.028253107331693172\n", - "Surface training t=33869, loss=0.03862711787223816\n", - "Surface training t=33870, loss=0.033794110640883446\n", - "Surface training t=33871, loss=0.033513396978378296\n", - "Surface training t=33872, loss=0.04419167339801788\n", - "Surface training t=33873, loss=0.030012885108590126\n", - "Surface training t=33874, loss=0.03718572296202183\n", - "Surface training t=33875, loss=0.04066864028573036\n", - "Surface training t=33876, loss=0.03069249726831913\n", - "Surface training t=33877, loss=0.03631831891834736\n", - "Surface training t=33878, loss=0.03826039843261242\n", - "Surface training t=33879, loss=0.026829211972653866\n", - "Surface training t=33880, loss=0.030396681278944016\n", - "Surface training t=33881, loss=0.056126032024621964\n", - "Surface training t=33882, loss=0.0359613336622715\n", - "Surface training t=33883, loss=0.05105219781398773\n", - "Surface training t=33884, loss=0.03855429496616125\n", - "Surface training t=33885, loss=0.05441332422196865\n", - "Surface training t=33886, loss=0.03717868961393833\n", - "Surface training t=33887, loss=0.03371203690767288\n", - "Surface training t=33888, loss=0.04501292295753956\n", - "Surface training t=33889, loss=0.026528935879468918\n", - "Surface training t=33890, loss=0.027772740460932255\n", - "Surface training t=33891, loss=0.03531987592577934\n", - "Surface training t=33892, loss=0.02788070309907198\n", - "Surface training t=33893, loss=0.022745024412870407\n", - "Surface training t=33894, loss=0.03382510878145695\n", - "Surface training t=33895, loss=0.025926725007593632\n", - "Surface training t=33896, loss=0.02149264421314001\n", - "Surface training t=33897, loss=0.026620343327522278\n", - "Surface training t=33898, loss=0.018931737169623375\n", - "Surface training t=33899, loss=0.022456221282482147\n", - "Surface training t=33900, loss=0.014412558637559414\n", - "Surface training t=33901, loss=0.03093619178980589\n", - "Surface training t=33902, loss=0.025854209437966347\n", - "Surface training t=33903, loss=0.03160783275961876\n", - "Surface training t=33904, loss=0.022188570350408554\n", - "Surface training t=33905, loss=0.023076306097209454\n", - "Surface training t=33906, loss=0.02782412339001894\n", - "Surface training t=33907, loss=0.028246548026800156\n", - "Surface training t=33908, loss=0.025979149155318737\n", - "Surface training t=33909, loss=0.022760416381061077\n", - "Surface training t=33910, loss=0.02529869880527258\n", - "Surface training t=33911, loss=0.02489305566996336\n", - "Surface training t=33912, loss=0.023364567197859287\n", - "Surface training t=33913, loss=0.020507839508354664\n", - "Surface training t=33914, loss=0.017083369195461273\n", - "Surface training t=33915, loss=0.01417296752333641\n", - "Surface training t=33916, loss=0.012761679943650961\n", - "Surface training t=33917, loss=0.014986429363489151\n", - "Surface training t=33918, loss=0.018051044084131718\n", - "Surface training t=33919, loss=0.015666844323277473\n", - "Surface training t=33920, loss=0.019341924227774143\n", - "Surface training t=33921, loss=0.014908676035702229\n", - "Surface training t=33922, loss=0.015665696933865547\n", - "Surface training t=33923, loss=0.015951858833432198\n", - "Surface training t=33924, loss=0.013413131702691317\n", - "Surface training t=33925, loss=0.014828399755060673\n", - "Surface training t=33926, loss=0.013253703713417053\n", - "Surface training t=33927, loss=0.012845577206462622\n", - "Surface training t=33928, loss=0.018445960246026516\n", - "Surface training t=33929, loss=0.020678927190601826\n", - "Surface training t=33930, loss=0.018093544524163008\n", - "Surface training t=33931, loss=0.020845533348619938\n", - "Surface training t=33932, loss=0.022813603281974792\n", - "Surface training t=33933, loss=0.023889169096946716\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=33934, loss=0.028574582189321518\n", - "Surface training t=33935, loss=0.029191438108682632\n", - "Surface training t=33936, loss=0.029144052416086197\n", - "Surface training t=33937, loss=0.03181111998856068\n", - "Surface training t=33938, loss=0.035624854266643524\n", - "Surface training t=33939, loss=0.02635019924491644\n", - "Surface training t=33940, loss=0.021478070877492428\n", - "Surface training t=33941, loss=0.018608521670103073\n", - "Surface training t=33942, loss=0.021564585156738758\n", - "Surface training t=33943, loss=0.0163088021799922\n", - "Surface training t=33944, loss=0.020436754450201988\n", - "Surface training t=33945, loss=0.01411985233426094\n", - "Surface training t=33946, loss=0.022027255967259407\n", - "Surface training t=33947, loss=0.020080205984413624\n", - "Surface training t=33948, loss=0.014420014806091785\n", - "Surface training t=33949, loss=0.022015743888914585\n", - "Surface training t=33950, loss=0.025585684925317764\n", - "Surface training t=33951, loss=0.022159071639180183\n", - "Surface training t=33952, loss=0.025485030375421047\n", - "Surface training t=33953, loss=0.01894219033420086\n", - "Surface training t=33954, loss=0.023087643086910248\n", - "Surface training t=33955, loss=0.019232451915740967\n", - "Surface training t=33956, loss=0.02108642691746354\n", - "Surface training t=33957, loss=0.0231382017955184\n", - "Surface training t=33958, loss=0.028116953559219837\n", - "Surface training t=33959, loss=0.03225370869040489\n", - "Surface training t=33960, loss=0.02706540748476982\n", - "Surface training t=33961, loss=0.02373612765222788\n", - "Surface training t=33962, loss=0.017419520765542984\n", - "Surface training t=33963, loss=0.01920716743916273\n", - "Surface training t=33964, loss=0.016802608966827393\n", - "Surface training t=33965, loss=0.01900060474872589\n", - "Surface training t=33966, loss=0.018854660913348198\n", - "Surface training t=33967, loss=0.024557714350521564\n", - "Surface training t=33968, loss=0.018842794932425022\n", - "Surface training t=33969, loss=0.02340883854776621\n", - "Surface training t=33970, loss=0.03715079836547375\n", - "Surface training t=33971, loss=0.023033705540001392\n", - "Surface training t=33972, loss=0.031387509778141975\n", - "Surface training t=33973, loss=0.023328270763158798\n", - "Surface training t=33974, loss=0.03480634465813637\n", - "Surface training t=33975, loss=0.023613005876541138\n", - "Surface training t=33976, loss=0.035176824778318405\n", - "Surface training t=33977, loss=0.027617373503744602\n", - "Surface training t=33978, loss=0.022214490920305252\n", - "Surface training t=33979, loss=0.022616793867200613\n", - "Surface training t=33980, loss=0.0206623375415802\n", - "Surface training t=33981, loss=0.019564176443964243\n", - "Surface training t=33982, loss=0.02025881316512823\n", - "Surface training t=33983, loss=0.02432449907064438\n", - "Surface training t=33984, loss=0.018061934038996696\n", - "Surface training t=33985, loss=0.021285369992256165\n", - "Surface training t=33986, loss=0.023222423158586025\n", - "Surface training t=33987, loss=0.0163209424354136\n", - "Surface training t=33988, loss=0.015825494192540646\n", - "Surface training t=33989, loss=0.02004337217658758\n", - "Surface training t=33990, loss=0.016914380714297295\n", - "Surface training t=33991, loss=0.02206967119127512\n", - "Surface training t=33992, loss=0.02342076040804386\n", - "Surface training t=33993, loss=0.014150692149996758\n", - "Surface training t=33994, loss=0.014409274328500032\n", - "Surface training t=33995, loss=0.013915926683694124\n", - "Surface training t=33996, loss=0.01413344545289874\n", - "Surface training t=33997, loss=0.018708852119743824\n", - "Surface training t=33998, loss=0.012869412079453468\n", - "Surface training t=33999, loss=0.017343923449516296\n", - "Surface training t=34000, loss=0.014543908182531595\n", - "Surface training t=34001, loss=0.0144088389351964\n", - "Surface training t=34002, loss=0.012219434604048729\n", - "Surface training t=34003, loss=0.018455215264111757\n", - "Surface training t=34004, loss=0.01796062383800745\n", - "Surface training t=34005, loss=0.02048521488904953\n", - "Surface training t=34006, loss=0.027843414805829525\n", - "Surface training t=34007, loss=0.024668514262884855\n", - "Surface training t=34008, loss=0.024309996515512466\n", - "Surface training t=34009, loss=0.03816434554755688\n", - "Surface training t=34010, loss=0.026081237010657787\n", - "Surface training t=34011, loss=0.021654164418578148\n", - "Surface training t=34012, loss=0.02053095493465662\n", - "Surface training t=34013, loss=0.023222492076456547\n", - "Surface training t=34014, loss=0.026980715803802013\n", - "Surface training t=34015, loss=0.026290282607078552\n", - "Surface training t=34016, loss=0.028081425465643406\n", - "Surface training t=34017, loss=0.022644509561359882\n", - "Surface training t=34018, loss=0.02436179853975773\n", - "Surface training t=34019, loss=0.018416376784443855\n", - "Surface training t=34020, loss=0.022978701628744602\n", - "Surface training t=34021, loss=0.02788449451327324\n", - "Surface training t=34022, loss=0.02101986575871706\n", - "Surface training t=34023, loss=0.027118884958326817\n", - "Surface training t=34024, loss=0.02598874643445015\n", - "Surface training t=34025, loss=0.022175566293299198\n", - "Surface training t=34026, loss=0.01937676128000021\n", - "Surface training t=34027, loss=0.02088179811835289\n", - "Surface training t=34028, loss=0.018929303623735905\n", - "Surface training t=34029, loss=0.019248124212026596\n", - "Surface training t=34030, loss=0.02745771687477827\n", - "Surface training t=34031, loss=0.022358743473887444\n", - "Surface training t=34032, loss=0.024291448295116425\n", - "Surface training t=34033, loss=0.03301822394132614\n", - "Surface training t=34034, loss=0.02517922967672348\n", - "Surface training t=34035, loss=0.028079520910978317\n", - "Surface training t=34036, loss=0.019294033758342266\n", - "Surface training t=34037, loss=0.020725887268781662\n", - "Surface training t=34038, loss=0.02050910796970129\n", - "Surface training t=34039, loss=0.018785963766276836\n", - "Surface training t=34040, loss=0.022375483065843582\n", - "Surface training t=34041, loss=0.019879654981195927\n", - "Surface training t=34042, loss=0.021438447758555412\n", - "Surface training t=34043, loss=0.021102207712829113\n", - "Surface training t=34044, loss=0.022007305175065994\n", - "Surface training t=34045, loss=0.01814754121005535\n", - "Surface training t=34046, loss=0.015374467708170414\n", - "Surface training t=34047, loss=0.015630212146788836\n", - "Surface training t=34048, loss=0.01753324829041958\n", - "Surface training t=34049, loss=0.013214761391282082\n", - "Surface training t=34050, loss=0.016271752305328846\n", - "Surface training t=34051, loss=0.018850057385861874\n", - "Surface training t=34052, loss=0.01638636039569974\n", - "Surface training t=34053, loss=0.015869231894612312\n", - "Surface training t=34054, loss=0.02293569501489401\n", - "Surface training t=34055, loss=0.02339023444801569\n", - "Surface training t=34056, loss=0.01778717339038849\n", - "Surface training t=34057, loss=0.021146144717931747\n", - "Surface training t=34058, loss=0.017371793277561665\n", - "Surface training t=34059, loss=0.02034430205821991\n", - "Surface training t=34060, loss=0.020041348412632942\n", - "Surface training t=34061, loss=0.016692119650542736\n", - "Surface training t=34062, loss=0.017019188031554222\n", - "Surface training t=34063, loss=0.017468381207436323\n", - "Surface training t=34064, loss=0.01759370695799589\n", - "Surface training t=34065, loss=0.019874441903084517\n", - "Surface training t=34066, loss=0.014811832457780838\n", - "Surface training t=34067, loss=0.014709239825606346\n", - "Surface training t=34068, loss=0.01449342630803585\n", - "Surface training t=34069, loss=0.012309378944337368\n", - "Surface training t=34070, loss=0.013797132298350334\n", - "Surface training t=34071, loss=0.017980066128075123\n", - "Surface training t=34072, loss=0.013822628650814295\n", - "Surface training t=34073, loss=0.022936823777854443\n", - "Surface training t=34074, loss=0.01847975142300129\n", - "Surface training t=34075, loss=0.0178291373886168\n", - "Surface training t=34076, loss=0.017377257347106934\n", - "Surface training t=34077, loss=0.016769714653491974\n", - "Surface training t=34078, loss=0.020848562940955162\n", - "Surface training t=34079, loss=0.02149428427219391\n", - "Surface training t=34080, loss=0.01994318701326847\n", - "Surface training t=34081, loss=0.02003931626677513\n", - "Surface training t=34082, loss=0.015594801865518093\n", - "Surface training t=34083, loss=0.021931364201009274\n", - "Surface training t=34084, loss=0.019631904549896717\n", - "Surface training t=34085, loss=0.013791936449706554\n", - "Surface training t=34086, loss=0.017724160104990005\n", - "Surface training t=34087, loss=0.013804524205625057\n", - "Surface training t=34088, loss=0.01837556902319193\n", - "Surface training t=34089, loss=0.016372245736420155\n", - "Surface training t=34090, loss=0.02085212990641594\n", - "Surface training t=34091, loss=0.0204553185030818\n", - "Surface training t=34092, loss=0.01587836630642414\n", - "Surface training t=34093, loss=0.02019025757908821\n", - "Surface training t=34094, loss=0.018012553453445435\n", - "Surface training t=34095, loss=0.015388823579996824\n", - "Surface training t=34096, loss=0.014180789701640606\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=34097, loss=0.015232022386044264\n", - "Surface training t=34098, loss=0.017841648310422897\n", - "Surface training t=34099, loss=0.017715461552143097\n", - "Surface training t=34100, loss=0.018659065011888742\n", - "Surface training t=34101, loss=0.028388535603880882\n", - "Surface training t=34102, loss=0.01737029105424881\n", - "Surface training t=34103, loss=0.02866998501121998\n", - "Surface training t=34104, loss=0.026737626641988754\n", - "Surface training t=34105, loss=0.02247596811503172\n", - "Surface training t=34106, loss=0.01894241012632847\n", - "Surface training t=34107, loss=0.017893623560667038\n", - "Surface training t=34108, loss=0.015787402167916298\n", - "Surface training t=34109, loss=0.019313955679535866\n", - "Surface training t=34110, loss=0.019937978126108646\n", - "Surface training t=34111, loss=0.015240060165524483\n", - "Surface training t=34112, loss=0.020747078582644463\n", - "Surface training t=34113, loss=0.016422223765403032\n", - "Surface training t=34114, loss=0.014541250187903643\n", - "Surface training t=34115, loss=0.013432700652629137\n", - "Surface training t=34116, loss=0.01648680306971073\n", - "Surface training t=34117, loss=0.013172085396945477\n", - "Surface training t=34118, loss=0.015470641665160656\n", - "Surface training t=34119, loss=0.01285389345139265\n", - "Surface training t=34120, loss=0.013679401949048042\n", - "Surface training t=34121, loss=0.013025203254073858\n", - "Surface training t=34122, loss=0.013218894600868225\n", - "Surface training t=34123, loss=0.011555423028767109\n", - "Surface training t=34124, loss=0.015486455522477627\n", - "Surface training t=34125, loss=0.01461184537038207\n", - "Surface training t=34126, loss=0.021402185782790184\n", - "Surface training t=34127, loss=0.016427586786448956\n", - "Surface training t=34128, loss=0.01969288196414709\n", - "Surface training t=34129, loss=0.020327751524746418\n", - "Surface training t=34130, loss=0.014363684691488743\n", - "Surface training t=34131, loss=0.017553243786096573\n", - "Surface training t=34132, loss=0.016565189696848392\n", - "Surface training t=34133, loss=0.024892592802643776\n", - "Surface training t=34134, loss=0.024601757526397705\n", - "Surface training t=34135, loss=0.020659269765019417\n", - "Surface training t=34136, loss=0.024444599635899067\n", - "Surface training t=34137, loss=0.02241372410207987\n", - "Surface training t=34138, loss=0.025886457413434982\n", - "Surface training t=34139, loss=0.019787127152085304\n", - "Surface training t=34140, loss=0.020588891580700874\n", - "Surface training t=34141, loss=0.02036351803690195\n", - "Surface training t=34142, loss=0.016401643864810467\n", - "Surface training t=34143, loss=0.01929608080536127\n", - "Surface training t=34144, loss=0.016794837079942226\n", - "Surface training t=34145, loss=0.018226285930722952\n", - "Surface training t=34146, loss=0.01583843119442463\n", - "Surface training t=34147, loss=0.01877110544592142\n", - "Surface training t=34148, loss=0.015467802062630653\n", - "Surface training t=34149, loss=0.015227546449750662\n", - "Surface training t=34150, loss=0.016804849728941917\n", - "Surface training t=34151, loss=0.014624260365962982\n", - "Surface training t=34152, loss=0.019875943660736084\n", - "Surface training t=34153, loss=0.021765248849987984\n", - "Surface training t=34154, loss=0.020309866406023502\n", - "Surface training t=34155, loss=0.0221082316711545\n", - "Surface training t=34156, loss=0.02811717428267002\n", - "Surface training t=34157, loss=0.024711497128009796\n", - "Surface training t=34158, loss=0.025918112136423588\n", - "Surface training t=34159, loss=0.025430040434002876\n", - "Surface training t=34160, loss=0.03500865399837494\n", - "Surface training t=34161, loss=0.02520050387829542\n", - "Surface training t=34162, loss=0.02740646433085203\n", - "Surface training t=34163, loss=0.02184177841991186\n", - "Surface training t=34164, loss=0.02957436442375183\n", - "Surface training t=34165, loss=0.01915216725319624\n", - "Surface training t=34166, loss=0.0198441743850708\n", - "Surface training t=34167, loss=0.025496304966509342\n", - "Surface training t=34168, loss=0.025572072714567184\n", - "Surface training t=34169, loss=0.021354565396904945\n", - "Surface training t=34170, loss=0.02347063645720482\n", - "Surface training t=34171, loss=0.021246508695185184\n", - "Surface training t=34172, loss=0.019236942753195763\n", - "Surface training t=34173, loss=0.022310344502329826\n", - "Surface training t=34174, loss=0.016380439046770334\n", - "Surface training t=34175, loss=0.018334814347326756\n", - "Surface training t=34176, loss=0.017659218981862068\n", - "Surface training t=34177, loss=0.014251899905502796\n", - "Surface training t=34178, loss=0.015594687312841415\n", - "Surface training t=34179, loss=0.018162779975682497\n", - "Surface training t=34180, loss=0.01647692173719406\n", - "Surface training t=34181, loss=0.01457703672349453\n", - "Surface training t=34182, loss=0.01689992006868124\n", - "Surface training t=34183, loss=0.018512162379920483\n", - "Surface training t=34184, loss=0.018225140869617462\n", - "Surface training t=34185, loss=0.013676142320036888\n", - "Surface training t=34186, loss=0.016387395560741425\n", - "Surface training t=34187, loss=0.011829674243927002\n", - "Surface training t=34188, loss=0.019544238224625587\n", - "Surface training t=34189, loss=0.019351696595549583\n", - "Surface training t=34190, loss=0.022752287797629833\n", - "Surface training t=34191, loss=0.02647669054567814\n", - "Surface training t=34192, loss=0.02060478925704956\n", - "Surface training t=34193, loss=0.01862423773854971\n", - "Surface training t=34194, loss=0.021940546110272408\n", - "Surface training t=34195, loss=0.022183937951922417\n", - "Surface training t=34196, loss=0.01534073194488883\n", - "Surface training t=34197, loss=0.016650325618684292\n", - "Surface training t=34198, loss=0.013811897486448288\n", - "Surface training t=34199, loss=0.01670765271410346\n", - "Surface training t=34200, loss=0.015614157542586327\n", - "Surface training t=34201, loss=0.01542760245501995\n", - "Surface training t=34202, loss=0.022223515436053276\n", - "Surface training t=34203, loss=0.021307590417563915\n", - "Surface training t=34204, loss=0.017042684368789196\n", - "Surface training t=34205, loss=0.02132421638816595\n", - "Surface training t=34206, loss=0.015713076572865248\n", - "Surface training t=34207, loss=0.01641685562208295\n", - "Surface training t=34208, loss=0.01588854333385825\n", - "Surface training t=34209, loss=0.014185766689479351\n", - "Surface training t=34210, loss=0.018194924108684063\n", - "Surface training t=34211, loss=0.014387782663106918\n", - "Surface training t=34212, loss=0.018365921452641487\n", - "Surface training t=34213, loss=0.015302211977541447\n", - "Surface training t=34214, loss=0.018455583602190018\n", - "Surface training t=34215, loss=0.020680255256593227\n", - "Surface training t=34216, loss=0.01999380998313427\n", - "Surface training t=34217, loss=0.01803413638845086\n", - "Surface training t=34218, loss=0.02724958211183548\n", - "Surface training t=34219, loss=0.02859248500317335\n", - "Surface training t=34220, loss=0.028526916168630123\n", - "Surface training t=34221, loss=0.02323049120604992\n", - "Surface training t=34222, loss=0.020644129253923893\n", - "Surface training t=34223, loss=0.018691600300371647\n", - "Surface training t=34224, loss=0.014571749605238438\n", - "Surface training t=34225, loss=0.019613695330917835\n", - "Surface training t=34226, loss=0.019389387220144272\n", - "Surface training t=34227, loss=0.01604278478771448\n", - "Surface training t=34228, loss=0.025825630873441696\n", - "Surface training t=34229, loss=0.018369211815297604\n", - "Surface training t=34230, loss=0.016562649980187416\n", - "Surface training t=34231, loss=0.016070563811808825\n", - "Surface training t=34232, loss=0.015583297703415155\n", - "Surface training t=34233, loss=0.015217258129268885\n", - "Surface training t=34234, loss=0.017288542352616787\n", - "Surface training t=34235, loss=0.014199194498360157\n", - "Surface training t=34236, loss=0.01176306139677763\n", - "Surface training t=34237, loss=0.014074536506086588\n", - "Surface training t=34238, loss=0.014157744124531746\n", - "Surface training t=34239, loss=0.012630724348127842\n", - "Surface training t=34240, loss=0.017108125612139702\n", - "Surface training t=34241, loss=0.015431283507496119\n", - "Surface training t=34242, loss=0.018857039511203766\n", - "Surface training t=34243, loss=0.018925098702311516\n", - "Surface training t=34244, loss=0.017940371297299862\n", - "Surface training t=34245, loss=0.016246123239398003\n", - "Surface training t=34246, loss=0.015775995794683695\n", - "Surface training t=34247, loss=0.022376169450581074\n", - "Surface training t=34248, loss=0.015017140656709671\n", - "Surface training t=34249, loss=0.0150606669485569\n", - "Surface training t=34250, loss=0.018197016790509224\n", - "Surface training t=34251, loss=0.019614812918007374\n", - "Surface training t=34252, loss=0.026169179007411003\n", - "Surface training t=34253, loss=0.023946503177285194\n", - "Surface training t=34254, loss=0.023133717477321625\n", - "Surface training t=34255, loss=0.020102031528949738\n", - "Surface training t=34256, loss=0.023796672001481056\n", - "Surface training t=34257, loss=0.016210408881306648\n", - "Surface training t=34258, loss=0.0189182311296463\n", - "Surface training t=34259, loss=0.018052438739687204\n", - "Surface training t=34260, loss=0.020058651454746723\n", - "Surface training t=34261, loss=0.021989055909216404\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=34262, loss=0.017505016177892685\n", - "Surface training t=34263, loss=0.017804048024117947\n", - "Surface training t=34264, loss=0.018916345201432705\n", - "Surface training t=34265, loss=0.021994550712406635\n", - "Surface training t=34266, loss=0.020437130704522133\n", - "Surface training t=34267, loss=0.0226828558370471\n", - "Surface training t=34268, loss=0.019050780683755875\n", - "Surface training t=34269, loss=0.02762970421463251\n", - "Surface training t=34270, loss=0.020742314867675304\n", - "Surface training t=34271, loss=0.017441234551370144\n", - "Surface training t=34272, loss=0.022512227296829224\n", - "Surface training t=34273, loss=0.01674721809104085\n", - "Surface training t=34274, loss=0.022592450492084026\n", - "Surface training t=34275, loss=0.016205021180212498\n", - "Surface training t=34276, loss=0.016641576774418354\n", - "Surface training t=34277, loss=0.02339806966483593\n", - "Surface training t=34278, loss=0.01693771965801716\n", - "Surface training t=34279, loss=0.02201105933636427\n", - "Surface training t=34280, loss=0.0185519652441144\n", - "Surface training t=34281, loss=0.020432429388165474\n", - "Surface training t=34282, loss=0.026331444270908833\n", - "Surface training t=34283, loss=0.02194926142692566\n", - "Surface training t=34284, loss=0.02333253249526024\n", - "Surface training t=34285, loss=0.019378235563635826\n", - "Surface training t=34286, loss=0.018407193012535572\n", - "Surface training t=34287, loss=0.02095351368188858\n", - "Surface training t=34288, loss=0.02395317144691944\n", - "Surface training t=34289, loss=0.02715054713189602\n", - "Surface training t=34290, loss=0.01663672085851431\n", - "Surface training t=34291, loss=0.020758316852152348\n", - "Surface training t=34292, loss=0.01527906209230423\n", - "Surface training t=34293, loss=0.013663283549249172\n", - "Surface training t=34294, loss=0.019578583538532257\n", - "Surface training t=34295, loss=0.02064657863229513\n", - "Surface training t=34296, loss=0.021268938668072224\n", - "Surface training t=34297, loss=0.016005510464310646\n", - "Surface training t=34298, loss=0.01554601127281785\n", - "Surface training t=34299, loss=0.015575894620269537\n", - "Surface training t=34300, loss=0.01352080749347806\n", - "Surface training t=34301, loss=0.01312793605029583\n", - "Surface training t=34302, loss=0.017773413099348545\n", - "Surface training t=34303, loss=0.01941156294196844\n", - "Surface training t=34304, loss=0.01741348672658205\n", - "Surface training t=34305, loss=0.02300538495182991\n", - "Surface training t=34306, loss=0.025708877481520176\n", - "Surface training t=34307, loss=0.019879368133842945\n", - "Surface training t=34308, loss=0.025573656894266605\n", - "Surface training t=34309, loss=0.02809378318488598\n", - "Surface training t=34310, loss=0.02262065652757883\n", - "Surface training t=34311, loss=0.02279053069651127\n", - "Surface training t=34312, loss=0.024313845671713352\n", - "Surface training t=34313, loss=0.017500280402600765\n", - "Surface training t=34314, loss=0.020920886658132076\n", - "Surface training t=34315, loss=0.02217748947441578\n", - "Surface training t=34316, loss=0.02544842381030321\n", - "Surface training t=34317, loss=0.026054739952087402\n", - "Surface training t=34318, loss=0.019735392183065414\n", - "Surface training t=34319, loss=0.018711412325501442\n", - "Surface training t=34320, loss=0.022338684648275375\n", - "Surface training t=34321, loss=0.018547206185758114\n", - "Surface training t=34322, loss=0.02316938154399395\n", - "Surface training t=34323, loss=0.02191110420972109\n", - "Surface training t=34324, loss=0.019915428943932056\n", - "Surface training t=34325, loss=0.016841151751577854\n", - "Surface training t=34326, loss=0.01343993004411459\n", - "Surface training t=34327, loss=0.01932049309834838\n", - "Surface training t=34328, loss=0.020959319546818733\n", - "Surface training t=34329, loss=0.01431957446038723\n", - "Surface training t=34330, loss=0.018441383726894855\n", - "Surface training t=34331, loss=0.01684614736586809\n", - "Surface training t=34332, loss=0.012992052361369133\n", - "Surface training t=34333, loss=0.021382136270403862\n", - "Surface training t=34334, loss=0.015863155480474234\n", - "Surface training t=34335, loss=0.028578348457813263\n", - "Surface training t=34336, loss=0.01985004171729088\n", - "Surface training t=34337, loss=0.017499711364507675\n", - "Surface training t=34338, loss=0.021847414784133434\n", - "Surface training t=34339, loss=0.017613699659705162\n", - "Surface training t=34340, loss=0.015160765498876572\n", - "Surface training t=34341, loss=0.013096556533128023\n", - "Surface training t=34342, loss=0.014938736334443092\n", - "Surface training t=34343, loss=0.01993278693407774\n", - "Surface training t=34344, loss=0.017388037405908108\n", - "Surface training t=34345, loss=0.015990047715604305\n", - "Surface training t=34346, loss=0.01634345483034849\n", - "Surface training t=34347, loss=0.021023515611886978\n", - "Surface training t=34348, loss=0.023374658077955246\n", - "Surface training t=34349, loss=0.01949666952714324\n", - "Surface training t=34350, loss=0.01894544856622815\n", - "Surface training t=34351, loss=0.03243215195834637\n", - "Surface training t=34352, loss=0.020512381568551064\n", - "Surface training t=34353, loss=0.018626480363309383\n", - "Surface training t=34354, loss=0.02216539904475212\n", - "Surface training t=34355, loss=0.023570900782942772\n", - "Surface training t=34356, loss=0.02049544919282198\n", - "Surface training t=34357, loss=0.025039962492883205\n", - "Surface training t=34358, loss=0.023535236716270447\n", - "Surface training t=34359, loss=0.01754997903481126\n", - "Surface training t=34360, loss=0.020720165688544512\n", - "Surface training t=34361, loss=0.031915306113660336\n", - "Surface training t=34362, loss=0.02978681866079569\n", - "Surface training t=34363, loss=0.026450506411492825\n", - "Surface training t=34364, loss=0.022037643007934093\n", - "Surface training t=34365, loss=0.02091251965612173\n", - "Surface training t=34366, loss=0.021592730656266212\n", - "Surface training t=34367, loss=0.018785938620567322\n", - "Surface training t=34368, loss=0.022326448000967503\n", - "Surface training t=34369, loss=0.022186757996678352\n", - "Surface training t=34370, loss=0.02983028255403042\n", - "Surface training t=34371, loss=0.024103090167045593\n", - "Surface training t=34372, loss=0.026009399443864822\n", - "Surface training t=34373, loss=0.021230186335742474\n", - "Surface training t=34374, loss=0.02341720275580883\n", - "Surface training t=34375, loss=0.020288207568228245\n", - "Surface training t=34376, loss=0.016849172301590443\n", - "Surface training t=34377, loss=0.01860393863171339\n", - "Surface training t=34378, loss=0.01316682854667306\n", - "Surface training t=34379, loss=0.01778957899659872\n", - "Surface training t=34380, loss=0.014273162465542555\n", - "Surface training t=34381, loss=0.01333967037498951\n", - "Surface training t=34382, loss=0.01440578093752265\n", - "Surface training t=34383, loss=0.012758233118802309\n", - "Surface training t=34384, loss=0.016010764054954052\n", - "Surface training t=34385, loss=0.01936205243691802\n", - "Surface training t=34386, loss=0.02236692700535059\n", - "Surface training t=34387, loss=0.022305598482489586\n", - "Surface training t=34388, loss=0.01812645234167576\n", - "Surface training t=34389, loss=0.025906802155077457\n", - "Surface training t=34390, loss=0.016966570168733597\n", - "Surface training t=34391, loss=0.017766382545232773\n", - "Surface training t=34392, loss=0.028560115955770016\n", - "Surface training t=34393, loss=0.022431187331676483\n", - "Surface training t=34394, loss=0.02247348614037037\n", - "Surface training t=34395, loss=0.018059725873172283\n", - "Surface training t=34396, loss=0.018803873099386692\n", - "Surface training t=34397, loss=0.028069057501852512\n", - "Surface training t=34398, loss=0.022782851941883564\n", - "Surface training t=34399, loss=0.021566515788435936\n", - "Surface training t=34400, loss=0.023832054808735847\n", - "Surface training t=34401, loss=0.02032522391527891\n", - "Surface training t=34402, loss=0.024153157137334347\n", - "Surface training t=34403, loss=0.020540348254144192\n", - "Surface training t=34404, loss=0.01838546246290207\n", - "Surface training t=34405, loss=0.026608348824083805\n", - "Surface training t=34406, loss=0.023429746739566326\n", - "Surface training t=34407, loss=0.01618551230058074\n", - "Surface training t=34408, loss=0.016331826336681843\n", - "Surface training t=34409, loss=0.016666628420352936\n", - "Surface training t=34410, loss=0.012018265202641487\n", - "Surface training t=34411, loss=0.01584492437541485\n", - "Surface training t=34412, loss=0.018773164600133896\n", - "Surface training t=34413, loss=0.013995234854519367\n", - "Surface training t=34414, loss=0.015838684514164925\n", - "Surface training t=34415, loss=0.011914811097085476\n", - "Surface training t=34416, loss=0.017713190987706184\n", - "Surface training t=34417, loss=0.016813830938190222\n", - "Surface training t=34418, loss=0.017310502473264933\n", - "Surface training t=34419, loss=0.020492377690970898\n", - "Surface training t=34420, loss=0.021012088283896446\n", - "Surface training t=34421, loss=0.02940718550235033\n", - "Surface training t=34422, loss=0.026696096174418926\n", - "Surface training t=34423, loss=0.021332849748432636\n", - "Surface training t=34424, loss=0.020924540236592293\n", - "Surface training t=34425, loss=0.02023478876799345\n", - "Surface training t=34426, loss=0.01747103128582239\n", - "Surface training t=34427, loss=0.030129048973321915\n", - "Surface training t=34428, loss=0.024493038654327393\n", - "Surface training t=34429, loss=0.02767643705010414\n", - "Surface training t=34430, loss=0.029870244674384594\n", - "Surface training t=34431, loss=0.024554098956286907\n", - "Surface training t=34432, loss=0.03322239965200424\n", - "Surface training t=34433, loss=0.01706588175147772\n", - "Surface training t=34434, loss=0.012599281035363674\n", - "Surface training t=34435, loss=0.01607399992644787\n", - "Surface training t=34436, loss=0.013733586762100458\n", - "Surface training t=34437, loss=0.01474725129082799\n", - "Surface training t=34438, loss=0.016665246337652206\n", - "Surface training t=34439, loss=0.015920435078442097\n", - "Surface training t=34440, loss=0.014360559172928333\n", - "Surface training t=34441, loss=0.020411483943462372\n", - "Surface training t=34442, loss=0.025276804342865944\n", - "Surface training t=34443, loss=0.025160188786685467\n", - "Surface training t=34444, loss=0.01805876661092043\n", - "Surface training t=34445, loss=0.021229072473943233\n", - "Surface training t=34446, loss=0.0187287125736475\n", - "Surface training t=34447, loss=0.017049037851393223\n", - "Surface training t=34448, loss=0.016976994462311268\n", - "Surface training t=34449, loss=0.01562500884756446\n", - "Surface training t=34450, loss=0.016707617789506912\n", - "Surface training t=34451, loss=0.013319301884621382\n", - "Surface training t=34452, loss=0.01834613550454378\n", - "Surface training t=34453, loss=0.011457661632448435\n", - "Surface training t=34454, loss=0.014927594922482967\n", - "Surface training t=34455, loss=0.012915373779833317\n", - "Surface training t=34456, loss=0.019123224541544914\n", - "Surface training t=34457, loss=0.022386894561350346\n", - "Surface training t=34458, loss=0.014942727982997894\n", - "Surface training t=34459, loss=0.0174404950812459\n", - "Surface training t=34460, loss=0.018611645326018333\n", - "Surface training t=34461, loss=0.02232478093355894\n", - "Surface training t=34462, loss=0.019118253141641617\n", - "Surface training t=34463, loss=0.021047329530119896\n", - "Surface training t=34464, loss=0.01998698292300105\n", - "Surface training t=34465, loss=0.022627951577305794\n", - "Surface training t=34466, loss=0.018321430310606956\n", - "Surface training t=34467, loss=0.02161341067403555\n", - "Surface training t=34468, loss=0.021527398377656937\n", - "Surface training t=34469, loss=0.01986122550442815\n", - "Surface training t=34470, loss=0.019587043672800064\n", - "Surface training t=34471, loss=0.013553069438785315\n", - "Surface training t=34472, loss=0.015402314718812704\n", - "Surface training t=34473, loss=0.013725955970585346\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=34474, loss=0.013869647402316332\n", - "Surface training t=34475, loss=0.015569649171084166\n", - "Surface training t=34476, loss=0.01749298255890608\n", - "Surface training t=34477, loss=0.016140829771757126\n", - "Surface training t=34478, loss=0.015289805829524994\n", - "Surface training t=34479, loss=0.017187023535370827\n", - "Surface training t=34480, loss=0.0178162707015872\n", - "Surface training t=34481, loss=0.016607508063316345\n", - "Surface training t=34482, loss=0.011926589999347925\n", - "Surface training t=34483, loss=0.02001266460865736\n", - "Surface training t=34484, loss=0.017184000462293625\n", - "Surface training t=34485, loss=0.018349834717810154\n", - "Surface training t=34486, loss=0.019424221478402615\n", - "Surface training t=34487, loss=0.03145209886133671\n", - "Surface training t=34488, loss=0.020672205835580826\n", - "Surface training t=34489, loss=0.024265854619443417\n", - "Surface training t=34490, loss=0.02115636970847845\n", - "Surface training t=34491, loss=0.019695489667356014\n", - "Surface training t=34492, loss=0.01715986616909504\n", - "Surface training t=34493, loss=0.022927550598978996\n", - "Surface training t=34494, loss=0.01825634203851223\n", - "Surface training t=34495, loss=0.01921779103577137\n", - "Surface training t=34496, loss=0.01620945706963539\n", - "Surface training t=34497, loss=0.021142530255019665\n", - "Surface training t=34498, loss=0.02274950034916401\n", - "Surface training t=34499, loss=0.020092913880944252\n", - "Surface training t=34500, loss=0.022704618982970715\n", - "Surface training t=34501, loss=0.023053397424519062\n", - "Surface training t=34502, loss=0.016953233163803816\n", - "Surface training t=34503, loss=0.020408358424901962\n", - "Surface training t=34504, loss=0.016718626022338867\n", - "Surface training t=34505, loss=0.01840299926698208\n", - "Surface training t=34506, loss=0.017193731386214495\n", - "Surface training t=34507, loss=0.015884730499237776\n", - "Surface training t=34508, loss=0.015434322878718376\n", - "Surface training t=34509, loss=0.017592248506844044\n", - "Surface training t=34510, loss=0.017248221673071384\n", - "Surface training t=34511, loss=0.017872276715934277\n", - "Surface training t=34512, loss=0.020725484006106853\n", - "Surface training t=34513, loss=0.017216460313647985\n", - "Surface training t=34514, loss=0.01752778934314847\n", - "Surface training t=34515, loss=0.017110696993768215\n", - "Surface training t=34516, loss=0.019267989322543144\n", - "Surface training t=34517, loss=0.02121692430227995\n", - "Surface training t=34518, loss=0.02073546266183257\n", - "Surface training t=34519, loss=0.020216073840856552\n", - "Surface training t=34520, loss=0.025977790355682373\n", - "Surface training t=34521, loss=0.02217742893844843\n", - "Surface training t=34522, loss=0.020486363675445318\n", - "Surface training t=34523, loss=0.023980013094842434\n", - "Surface training t=34524, loss=0.022818869911134243\n", - "Surface training t=34525, loss=0.02365193236619234\n", - "Surface training t=34526, loss=0.022166620939970016\n", - "Surface training t=34527, loss=0.0250181145966053\n", - "Surface training t=34528, loss=0.024020408280193806\n", - "Surface training t=34529, loss=0.02483164705336094\n", - "Surface training t=34530, loss=0.018135041929781437\n", - "Surface training t=34531, loss=0.02087950985878706\n", - "Surface training t=34532, loss=0.02977157663553953\n", - "Surface training t=34533, loss=0.021826217882335186\n", - "Surface training t=34534, loss=0.026972340419888496\n", - "Surface training t=34535, loss=0.020347364246845245\n", - "Surface training t=34536, loss=0.025944851338863373\n", - "Surface training t=34537, loss=0.01829792745411396\n", - "Surface training t=34538, loss=0.01885658409446478\n", - "Surface training t=34539, loss=0.018624790012836456\n", - "Surface training t=34540, loss=0.01458226004615426\n", - "Surface training t=34541, loss=0.0194504177197814\n", - "Surface training t=34542, loss=0.01798576395958662\n", - "Surface training t=34543, loss=0.020371776074171066\n", - "Surface training t=34544, loss=0.018592690583318472\n", - "Surface training t=34545, loss=0.020735392346978188\n", - "Surface training t=34546, loss=0.021665912121534348\n", - "Surface training t=34547, loss=0.02257402241230011\n", - "Surface training t=34548, loss=0.02815026417374611\n", - "Surface training t=34549, loss=0.03181109577417374\n", - "Surface training t=34550, loss=0.021619943901896477\n", - "Surface training t=34551, loss=0.021401094272732735\n", - "Surface training t=34552, loss=0.013452146202325821\n", - "Surface training t=34553, loss=0.02033290732651949\n", - "Surface training t=34554, loss=0.018943535163998604\n", - "Surface training t=34555, loss=0.019224824383854866\n", - "Surface training t=34556, loss=0.01650784071534872\n", - "Surface training t=34557, loss=0.01441464526578784\n", - "Surface training t=34558, loss=0.017026078887283802\n", - "Surface training t=34559, loss=0.017627092078328133\n", - "Surface training t=34560, loss=0.015507693402469158\n", - "Surface training t=34561, loss=0.014271079562604427\n", - "Surface training t=34562, loss=0.017917374148964882\n", - "Surface training t=34563, loss=0.013978204224258661\n", - "Surface training t=34564, loss=0.015004675835371017\n", - "Surface training t=34565, loss=0.017282925080507994\n", - "Surface training t=34566, loss=0.01333639770746231\n", - "Surface training t=34567, loss=0.01281007332727313\n", - "Surface training t=34568, loss=0.012120299972593784\n", - "Surface training t=34569, loss=0.016445976682007313\n", - "Surface training t=34570, loss=0.011679815594106913\n", - "Surface training t=34571, loss=0.01604748610407114\n", - "Surface training t=34572, loss=0.01296300534158945\n", - "Surface training t=34573, loss=0.014550656080245972\n", - "Surface training t=34574, loss=0.010948569979518652\n", - "Surface training t=34575, loss=0.013971386011689901\n", - "Surface training t=34576, loss=0.01643161801621318\n", - "Surface training t=34577, loss=0.021798860281705856\n", - "Surface training t=34578, loss=0.020126007962971926\n", - "Surface training t=34579, loss=0.024844381026923656\n", - "Surface training t=34580, loss=0.02367325872182846\n", - "Surface training t=34581, loss=0.032067738473415375\n", - "Surface training t=34582, loss=0.02677349280565977\n", - "Surface training t=34583, loss=0.025586777366697788\n", - "Surface training t=34584, loss=0.024798192083835602\n", - "Surface training t=34585, loss=0.02673314232379198\n", - "Surface training t=34586, loss=0.02792196534574032\n", - "Surface training t=34587, loss=0.018019800540059805\n", - "Surface training t=34588, loss=0.032240135595202446\n", - "Surface training t=34589, loss=0.026313416659832\n", - "Surface training t=34590, loss=0.027951800264418125\n", - "Surface training t=34591, loss=0.01895781233906746\n", - "Surface training t=34592, loss=0.013344605453312397\n", - "Surface training t=34593, loss=0.016080768313258886\n", - "Surface training t=34594, loss=0.01518542179837823\n", - "Surface training t=34595, loss=0.022558841854333878\n", - "Surface training t=34596, loss=0.02068316377699375\n", - "Surface training t=34597, loss=0.024964408949017525\n", - "Surface training t=34598, loss=0.024952154606580734\n", - "Surface training t=34599, loss=0.018895119428634644\n", - "Surface training t=34600, loss=0.01938935648649931\n", - "Surface training t=34601, loss=0.017079665791243315\n", - "Surface training t=34602, loss=0.01595912780612707\n", - "Surface training t=34603, loss=0.012221353594213724\n", - "Surface training t=34604, loss=0.015994082670658827\n", - "Surface training t=34605, loss=0.017335542012006044\n", - "Surface training t=34606, loss=0.018139651976525784\n", - "Surface training t=34607, loss=0.013492506928741932\n", - "Surface training t=34608, loss=0.016849488019943237\n", - "Surface training t=34609, loss=0.016334827058017254\n", - "Surface training t=34610, loss=0.015250862576067448\n", - "Surface training t=34611, loss=0.015795189421623945\n", - "Surface training t=34612, loss=0.0139733268879354\n", - "Surface training t=34613, loss=0.014052562415599823\n", - "Surface training t=34614, loss=0.01833695825189352\n", - "Surface training t=34615, loss=0.020280794240534306\n", - "Surface training t=34616, loss=0.016348015516996384\n", - "Surface training t=34617, loss=0.019153531175106764\n", - "Surface training t=34618, loss=0.021637135185301304\n", - "Surface training t=34619, loss=0.025052612647414207\n", - "Surface training t=34620, loss=0.02029071655124426\n", - "Surface training t=34621, loss=0.026354688219726086\n", - "Surface training t=34622, loss=0.028193820267915726\n", - "Surface training t=34623, loss=0.01435898244380951\n", - "Surface training t=34624, loss=0.01914292573928833\n", - "Surface training t=34625, loss=0.021916847676038742\n", - "Surface training t=34626, loss=0.02068578079342842\n", - "Surface training t=34627, loss=0.018939722329378128\n", - "Surface training t=34628, loss=0.02123998012393713\n", - "Surface training t=34629, loss=0.017752202227711678\n", - "Surface training t=34630, loss=0.01657275902107358\n", - "Surface training t=34631, loss=0.016391875222325325\n", - "Surface training t=34632, loss=0.01889688428491354\n", - "Surface training t=34633, loss=0.01739432755857706\n", - "Surface training t=34634, loss=0.020475552417337894\n", - "Surface training t=34635, loss=0.013058794662356377\n", - "Surface training t=34636, loss=0.01965885329991579\n", - "Surface training t=34637, loss=0.021751313470304012\n", - "Surface training t=34638, loss=0.016739755868911743\n", - "Surface training t=34639, loss=0.022306805476546288\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=34640, loss=0.02670437004417181\n", - "Surface training t=34641, loss=0.02592182345688343\n", - "Surface training t=34642, loss=0.01766217779368162\n", - "Surface training t=34643, loss=0.01647672150284052\n", - "Surface training t=34644, loss=0.017342657782137394\n", - "Surface training t=34645, loss=0.016888443380594254\n", - "Surface training t=34646, loss=0.01902626547962427\n", - "Surface training t=34647, loss=0.021278519183397293\n", - "Surface training t=34648, loss=0.020899550057947636\n", - "Surface training t=34649, loss=0.02236087527126074\n", - "Surface training t=34650, loss=0.023427557200193405\n", - "Surface training t=34651, loss=0.024634527042508125\n", - "Surface training t=34652, loss=0.02276471257209778\n", - "Surface training t=34653, loss=0.025206537917256355\n", - "Surface training t=34654, loss=0.022291592322289944\n", - "Surface training t=34655, loss=0.022113224491477013\n", - "Surface training t=34656, loss=0.02107603568583727\n", - "Surface training t=34657, loss=0.023097416386008263\n", - "Surface training t=34658, loss=0.017855998128652573\n", - "Surface training t=34659, loss=0.021930625662207603\n", - "Surface training t=34660, loss=0.01901340950280428\n", - "Surface training t=34661, loss=0.02012214669957757\n", - "Surface training t=34662, loss=0.01634560152888298\n", - "Surface training t=34663, loss=0.025629130192101002\n", - "Surface training t=34664, loss=0.019657340832054615\n", - "Surface training t=34665, loss=0.01702371332794428\n", - "Surface training t=34666, loss=0.019430329091846943\n", - "Surface training t=34667, loss=0.020847653038799763\n", - "Surface training t=34668, loss=0.014984112698584795\n", - "Surface training t=34669, loss=0.01764107681810856\n", - "Surface training t=34670, loss=0.018107435666024685\n", - "Surface training t=34671, loss=0.0195637047290802\n", - "Surface training t=34672, loss=0.020192111376672983\n", - "Surface training t=34673, loss=0.022491078823804855\n", - "Surface training t=34674, loss=0.023348144255578518\n", - "Surface training t=34675, loss=0.020877785980701447\n", - "Surface training t=34676, loss=0.018019639421254396\n", - "Surface training t=34677, loss=0.015227561350911856\n", - "Surface training t=34678, loss=0.02009679563343525\n", - "Surface training t=34679, loss=0.018699809908866882\n", - "Surface training t=34680, loss=0.017485685646533966\n", - "Surface training t=34681, loss=0.01820471603423357\n", - "Surface training t=34682, loss=0.02181190811097622\n", - "Surface training t=34683, loss=0.01967803854495287\n", - "Surface training t=34684, loss=0.03981282003223896\n", - "Surface training t=34685, loss=0.03288835473358631\n", - "Surface training t=34686, loss=0.039884088560938835\n", - "Surface training t=34687, loss=0.033237216994166374\n", - "Surface training t=34688, loss=0.036988476291298866\n", - "Surface training t=34689, loss=0.024020216427743435\n", - "Surface training t=34690, loss=0.027081750333309174\n", - "Surface training t=34691, loss=0.03203068673610687\n", - "Surface training t=34692, loss=0.026685486547648907\n", - "Surface training t=34693, loss=0.03020607866346836\n", - "Surface training t=34694, loss=0.02035706490278244\n", - "Surface training t=34695, loss=0.023681730963289738\n", - "Surface training t=34696, loss=0.025479079224169254\n", - "Surface training t=34697, loss=0.021302073262631893\n", - "Surface training t=34698, loss=0.02014964632689953\n", - "Surface training t=34699, loss=0.024544970132410526\n", - "Surface training t=34700, loss=0.02194041758775711\n", - "Surface training t=34701, loss=0.019681585021317005\n", - "Surface training t=34702, loss=0.01768049318343401\n", - "Surface training t=34703, loss=0.016701129265129566\n", - "Surface training t=34704, loss=0.020533524453639984\n", - "Surface training t=34705, loss=0.02102417405694723\n", - "Surface training t=34706, loss=0.016636420972645283\n", - "Surface training t=34707, loss=0.014048694632947445\n", - "Surface training t=34708, loss=0.0160637809894979\n", - "Surface training t=34709, loss=0.0204179547727108\n", - "Surface training t=34710, loss=0.01831031357869506\n", - "Surface training t=34711, loss=0.026013446040451527\n", - "Surface training t=34712, loss=0.028452463448047638\n", - "Surface training t=34713, loss=0.02361073810607195\n", - "Surface training t=34714, loss=0.03442319482564926\n", - "Surface training t=34715, loss=0.02253655530512333\n", - "Surface training t=34716, loss=0.017612903378903866\n", - "Surface training t=34717, loss=0.021396014839410782\n", - "Surface training t=34718, loss=0.016939330846071243\n", - "Surface training t=34719, loss=0.017997180111706257\n", - "Surface training t=34720, loss=0.02388626802712679\n", - "Surface training t=34721, loss=0.01346151065081358\n", - "Surface training t=34722, loss=0.021494065411388874\n", - "Surface training t=34723, loss=0.019882116466760635\n", - "Surface training t=34724, loss=0.016355253756046295\n", - "Surface training t=34725, loss=0.01676286105066538\n", - "Surface training t=34726, loss=0.0150191611610353\n", - "Surface training t=34727, loss=0.012026547454297543\n", - "Surface training t=34728, loss=0.0215064762160182\n", - "Surface training t=34729, loss=0.019956136122345924\n", - "Surface training t=34730, loss=0.017062275670468807\n", - "Surface training t=34731, loss=0.018849891610443592\n", - "Surface training t=34732, loss=0.024210317060351372\n", - "Surface training t=34733, loss=0.021364516578614712\n", - "Surface training t=34734, loss=0.022302682511508465\n", - "Surface training t=34735, loss=0.021189013496041298\n", - "Surface training t=34736, loss=0.029937097802758217\n", - "Surface training t=34737, loss=0.026991155929863453\n", - "Surface training t=34738, loss=0.027540245093405247\n", - "Surface training t=34739, loss=0.0390955600887537\n", - "Surface training t=34740, loss=0.032434580847620964\n", - "Surface training t=34741, loss=0.027878996916115284\n", - "Surface training t=34742, loss=0.02458254899829626\n", - "Surface training t=34743, loss=0.022835384123027325\n", - "Surface training t=34744, loss=0.028033350594341755\n", - "Surface training t=34745, loss=0.022333715576678514\n", - "Surface training t=34746, loss=0.033045087940990925\n", - "Surface training t=34747, loss=0.033940703608095646\n", - "Surface training t=34748, loss=0.024181711487472057\n", - "Surface training t=34749, loss=0.020388627890497446\n", - "Surface training t=34750, loss=0.02109778020530939\n", - "Surface training t=34751, loss=0.025021099485456944\n", - "Surface training t=34752, loss=0.02176574617624283\n", - "Surface training t=34753, loss=0.02595026884227991\n", - "Surface training t=34754, loss=0.018347712233662605\n", - "Surface training t=34755, loss=0.017918448895215988\n", - "Surface training t=34756, loss=0.019120446406304836\n", - "Surface training t=34757, loss=0.017949257045984268\n", - "Surface training t=34758, loss=0.015040923841297626\n", - "Surface training t=34759, loss=0.02107108384370804\n", - "Surface training t=34760, loss=0.01902313344180584\n", - "Surface training t=34761, loss=0.020692845806479454\n", - "Surface training t=34762, loss=0.01613552402704954\n", - "Surface training t=34763, loss=0.01595690194517374\n", - "Surface training t=34764, loss=0.014164620079100132\n", - "Surface training t=34765, loss=0.019396232441067696\n", - "Surface training t=34766, loss=0.014317998196929693\n", - "Surface training t=34767, loss=0.018075804226100445\n", - "Surface training t=34768, loss=0.02252427488565445\n", - "Surface training t=34769, loss=0.019050419330596924\n", - "Surface training t=34770, loss=0.0195014625787735\n", - "Surface training t=34771, loss=0.03026948869228363\n", - "Surface training t=34772, loss=0.02109114918857813\n", - "Surface training t=34773, loss=0.031699489802122116\n", - "Surface training t=34774, loss=0.02252760250121355\n", - "Surface training t=34775, loss=0.01933224219828844\n", - "Surface training t=34776, loss=0.021271858364343643\n", - "Surface training t=34777, loss=0.021241863258183002\n", - "Surface training t=34778, loss=0.02159243542701006\n", - "Surface training t=34779, loss=0.018400960601866245\n", - "Surface training t=34780, loss=0.02305808011442423\n", - "Surface training t=34781, loss=0.027477937750518322\n", - "Surface training t=34782, loss=0.021660836413502693\n", - "Surface training t=34783, loss=0.02945487666875124\n", - "Surface training t=34784, loss=0.0199508061632514\n", - "Surface training t=34785, loss=0.018182051833719015\n", - "Surface training t=34786, loss=0.018946579657495022\n", - "Surface training t=34787, loss=0.023377404548227787\n", - "Surface training t=34788, loss=0.029855611734092236\n", - "Surface training t=34789, loss=0.024717964231967926\n", - "Surface training t=34790, loss=0.024733704514801502\n", - "Surface training t=34791, loss=0.021980736404657364\n", - "Surface training t=34792, loss=0.02718358486890793\n", - "Surface training t=34793, loss=0.02228925283998251\n", - "Surface training t=34794, loss=0.024554332718253136\n", - "Surface training t=34795, loss=0.030195255763828754\n", - "Surface training t=34796, loss=0.0271902815438807\n", - "Surface training t=34797, loss=0.03515850193798542\n", - "Surface training t=34798, loss=0.028261876665055752\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Surface training t=34799, loss=0.026117272675037384\n", - "Surface training t=34800, loss=0.02433091588318348\n", - "Surface training t=34801, loss=0.019477914087474346\n", - "Surface training t=34802, loss=0.02539461199194193\n", - "Surface training t=34803, loss=0.024719344452023506\n", - "Surface training t=34804, loss=0.029927401803433895\n", - "Surface training t=34805, loss=0.02640475705265999\n", - "Surface training t=34806, loss=0.0248259324580431\n", - "Surface training t=34807, loss=0.0335962176322937\n", - "Surface training t=34808, loss=0.027604208327829838\n", - "Surface training t=34809, loss=0.020849178545176983\n", - "Surface training t=34810, loss=0.017007664777338505\n", - "Surface training t=34811, loss=0.016392712481319904\n", - "Surface training t=34812, loss=0.019100003875792027\n", - "Surface training t=34813, loss=0.01667583966627717\n", - "Surface training t=34814, loss=0.013074218295514584\n", - "Surface training t=34815, loss=0.017193845473229885\n", - "Surface training t=34816, loss=0.020351399667561054\n", - "Surface training t=34817, loss=0.026148061268031597\n", - "Surface training t=34818, loss=0.020209407433867455\n", - "Surface training t=34819, loss=0.020887858234345913\n", - "Surface training t=34820, loss=0.024946855381131172\n", - "Surface training t=34821, loss=0.01661391742527485\n", - "Surface training t=34822, loss=0.022057742811739445\n", - "Surface training t=34823, loss=0.0176599295809865\n", - "Surface training t=34824, loss=0.020229910500347614\n", - "Surface training t=34825, loss=0.018029040656983852\n", - "Surface training t=34826, loss=0.020440176129341125\n", - "Surface training t=34827, loss=0.021527480334043503\n", - "Surface training t=34828, loss=0.01585905533283949\n", - "Surface training t=34829, loss=0.018570474348962307\n", - "Surface training t=34830, loss=0.02239773515611887\n", - "Surface training t=34831, loss=0.01987755997106433\n", - "Surface training t=34832, loss=0.020827862434089184\n", - "Surface training t=34833, loss=0.018411780707538128\n", - "Surface training t=34834, loss=0.02025254536420107\n", - "Surface training t=34835, loss=0.023701020516455173\n", - "Surface training t=34836, loss=0.02269093319773674\n", - "Surface training t=34837, loss=0.01464746380224824\n", - "Surface training t=34838, loss=0.016715017147362232\n", - "Surface training t=34839, loss=0.01519046537578106\n", - "Surface training t=34840, loss=0.011867452412843704\n", - "Surface training t=34841, loss=0.011636168230324984\n", - "Surface training t=34842, loss=0.016591941937804222\n", - "Surface training t=34843, loss=0.017768999561667442\n", - "Surface training t=34844, loss=0.019599519670009613\n", - "Surface training t=34845, loss=0.022462508641183376\n", - "Surface training t=34846, loss=0.021687778644263744\n", - "Surface training t=34847, loss=0.021037409082055092\n", - "Surface training t=34848, loss=0.02439987752586603\n", - "Surface training t=34849, loss=0.020578342489898205\n", - "Surface training t=34850, loss=0.021962212398648262\n", - "Surface training t=34851, loss=0.02482516411691904\n", - "Surface training t=34852, loss=0.022764362394809723\n", - "Surface training t=34853, loss=0.020060501992702484\n", - "Surface training t=34854, loss=0.021213039755821228\n", - "Surface training t=34855, loss=0.02755255252122879\n", - "Surface training t=34856, loss=0.025652037002146244\n", - "Surface training t=34857, loss=0.022367196157574654\n", - "Surface training t=34858, loss=0.02783518936485052\n", - "Surface training t=34859, loss=0.026177968829870224\n", - "Surface training t=34860, loss=0.025765015743672848\n", - "Surface training t=34861, loss=0.01916061621159315\n", - "Surface training t=34862, loss=0.02376366127282381\n", - "Surface training t=34863, loss=0.023841118440032005\n", - "Surface training t=34864, loss=0.02124106790870428\n", - "Surface training t=34865, loss=0.022221134044229984\n", - "Surface training t=34866, loss=0.023387539200484753\n", - "Surface training t=34867, loss=0.022847881074994802\n", - "Surface training t=34868, loss=0.035439975559711456\n", - "Surface training t=34869, loss=0.030222492292523384\n", - "Surface training t=34870, loss=0.02362515637651086\n", - "Surface training t=34871, loss=0.04176817834377289\n", - "Surface training t=34872, loss=0.029200782999396324\n", - "Surface training t=34873, loss=0.03440520726144314\n", - "Surface training t=34874, loss=0.03161246422678232\n", - "Surface training t=34875, loss=0.029994556680321693\n", - "Surface training t=34876, loss=0.027251560240983963\n", - "Surface training t=34877, loss=0.026241346262395382\n", - "Surface training t=34878, loss=0.024917080998420715\n", - "Surface training t=34879, loss=0.01993947010487318\n", - "Surface training t=34880, loss=0.0188433974981308\n", - "Surface training t=34881, loss=0.017753089778125286\n", - "Surface training t=34882, loss=0.020525083877146244\n", - "Surface training t=34883, loss=0.015861323568969965\n", - "Surface training t=34884, loss=0.017822980880737305\n", - "Surface training t=34885, loss=0.016211694106459618\n", - "Surface training t=34886, loss=0.016968607902526855\n", - "Surface training t=34887, loss=0.015408390201628208\n", - "Surface training t=34888, loss=0.019920732360333204\n", - "Surface training t=34889, loss=0.02389075979590416\n", - "Surface training t=34890, loss=0.01864552777260542\n", - "Surface training t=34891, loss=0.022887198254466057\n", - "Surface training t=34892, loss=0.026346709579229355\n", - "Surface training t=34893, loss=0.01846855878829956\n", - "Surface training t=34894, loss=0.017730477266013622\n", - "Surface training t=34895, loss=0.02771679125726223\n", - "Surface training t=34896, loss=0.02473179902881384\n", - "Surface training t=34897, loss=0.0231174873188138\n", - "Surface training t=34898, loss=0.026910274289548397\n", - "Surface training t=34899, loss=0.023727894760668278\n", - "Surface training t=34900, loss=0.018904478289186954\n", - "Surface training t=34901, loss=0.024552345275878906\n", - "Surface training t=34902, loss=0.019812763668596745\n", - "Surface training t=34903, loss=0.018290837295353413\n", - "Surface training t=34904, loss=0.024894464761018753\n", - "Surface training t=34905, loss=0.02104884944856167\n", - "Surface training t=34906, loss=0.02574630081653595\n", - "Surface training t=34907, loss=0.038464490324258804\n", - "Surface training t=34908, loss=0.023784141056239605\n", - "Surface training t=34909, loss=0.02195094246417284\n", - "Surface training t=34910, loss=0.020893988199532032\n", - "Surface training t=34911, loss=0.020913940854370594\n", - "Surface training t=34912, loss=0.026544984430074692\n", - "Surface training t=34913, loss=0.021656086668372154\n", - "Surface training t=34914, loss=0.020359653048217297\n", - "Surface training t=34915, loss=0.03135811723768711\n", - "Surface training t=34916, loss=0.020910675637423992\n", - "Surface training t=34917, loss=0.017749878577888012\n", - "Surface training t=34918, loss=0.020222026854753494\n", - "Surface training t=34919, loss=0.02393217198550701\n", - "Surface training t=34920, loss=0.016548813320696354\n", - "Surface training t=34921, loss=0.019879535771906376\n", - "Surface training t=34922, loss=0.01920226402580738\n", - "Surface training t=34923, loss=0.018716405145823956\n", - "Surface training t=34924, loss=0.023366253823041916\n", - "Surface training t=34925, loss=0.019327973946928978\n", - "Surface training t=34926, loss=0.017683050595223904\n", - "Surface training t=34927, loss=0.020316983573138714\n", - "Surface training t=34928, loss=0.020344726741313934\n", - "Surface training t=34929, loss=0.026577656157314777\n", - "Surface training t=34930, loss=0.02179804816842079\n", - "Surface training t=34931, loss=0.020178446546196938\n", - "Surface training t=34932, loss=0.02301362156867981\n", - "Surface training t=34933, loss=0.019782649353146553\n", - "Surface training t=34934, loss=0.013613355346024036\n", - "Surface training t=34935, loss=0.0259089358150959\n", - "Surface training t=34936, loss=0.030492007732391357\n", - "Surface training t=34937, loss=0.02249159663915634\n", - "Surface training t=34938, loss=0.030717474408447742\n", - "Surface training t=34939, loss=0.027749376371502876\n", - "Surface training t=34940, loss=0.03235188499093056\n", - "Surface training t=34941, loss=0.0234224796295166\n", - "Surface training t=34942, loss=0.018969848286360502\n", - "Surface training t=34943, loss=0.018471150659024715\n", - "Surface training t=34944, loss=0.028222582302987576\n", - "Surface training t=34945, loss=0.025806882418692112\n", - "Surface training t=34946, loss=0.019465764053165913\n", - "Surface training t=34947, loss=0.01811573188751936\n", - "Surface training t=34948, loss=0.014019811991602182\n", - "Surface training t=34949, loss=0.017545283772051334\n", - "Surface training t=34950, loss=0.021025624126195908\n", - "Surface training t=34951, loss=0.0252420655451715\n", - "Surface training t=34952, loss=0.02560752909630537\n", - "Surface training t=34953, loss=0.027730890549719334\n", - "Surface training t=34954, loss=0.03186838701367378\n", - "Surface training t=34955, loss=0.018848947249352932\n", - "Surface training t=34956, loss=0.02562570385634899\n", - "Surface training t=34957, loss=0.02473611757159233\n", - "Surface training t=34958, loss=0.02060551755130291\n", - "Surface training t=34959, loss=0.019661255180835724\n", - "Surface training t=34960, loss=0.020308485254645348\n", - "Surface training t=34961, loss=0.023836958222091198\n", - "Surface training t=34962, loss=0.022735705599188805\n", - "Surface training t=34963, loss=0.01930199656635523\n", - "Surface training t=34964, loss=0.020365570671856403\n", - "Surface training t=34965, loss=0.016280648298561573\n", - "Surface training t=34966, loss=0.017339137848466635\n", - "Surface training t=34967, loss=0.024161885492503643\n", - "Surface training t=34968, loss=0.02390156500041485\n", - "Surface training t=34969, loss=0.021423902362585068\n", - "Surface training t=34970, loss=0.02267182432115078\n", - "Surface training t=34971, loss=0.027643512934446335\n", - "Surface training t=34972, loss=0.022161363624036312\n", - "Surface training t=34973, loss=0.02699727565050125\n", - "Surface training t=34974, loss=0.026327064260840416\n", - "Surface training t=34975, loss=0.025325514376163483\n", - "Surface training t=34976, loss=0.037122342735528946\n", - "Surface training t=34977, loss=0.02853887900710106\n", - "Surface training t=34978, loss=0.023209617473185062\n", - "Surface training t=34979, loss=0.01999070681631565\n", - "Surface training t=34980, loss=0.025170637294650078\n", - "Surface training t=34981, loss=0.023379364982247353\n", - "Surface training t=34982, loss=0.0173968356102705\n", - "Surface training t=34983, loss=0.01793838944286108\n", - "Surface training t=34984, loss=0.03014253731817007\n", - "Surface training t=34985, loss=0.022308599203824997\n", - "Surface training t=34986, loss=0.023372254334390163\n", - "Surface training t=34987, loss=0.02462085708975792\n", - "Surface training t=34988, loss=0.022074414417147636\n", - "Surface training t=34989, loss=0.01872312417253852\n", - "Surface training t=34990, loss=0.021898345090448856\n", - "Surface training t=34991, loss=0.015109047759324312\n", - "Surface training t=34992, loss=0.01684784423559904\n", - "Surface training t=34993, loss=0.01605638675391674\n", - "Surface training t=34994, loss=0.018933079205453396\n", - "Surface training t=34995, loss=0.017416946589946747\n", - "Surface training t=34996, loss=0.013738160021603107\n", - "Surface training t=34997, loss=0.019315749406814575\n", - "Surface training t=34998, loss=0.019791975151747465\n", - "Surface training t=34999, loss=0.023263991810381413\n", - "initial_shape (320,) derivs_tensor.shape (320, 2)\n", - "self.tokens is ['u', 'du/dx0', 'd^2u/dx0^2']\n", - "Here, derivs order is {'u': [None], 'du/dx0': [0], 'd^2u/dx0^2': [0, 0]}\n", - "The cardinality of defined token pool is [3]\n", - "Among them, the pool contains [3]\n", - "Creating new equation, sparsity value [2.29105278e-05]\n", - "New solution accepted, confirmed 1/12 solutions.\n", - "Creating new equation, sparsity value [1.63536929e-06]\n", - "New solution accepted, confirmed 2/12 solutions.\n", - "Creating new equation, sparsity value [8.9282741e-08]\n", - "New solution accepted, confirmed 3/12 solutions.\n", - "Creating new equation, sparsity value [4.48556493e-11]\n", - "New solution accepted, confirmed 4/12 solutions.\n", - "Creating new equation, sparsity value [4.24010872e-07]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "New solution accepted, confirmed 5/12 solutions.\n", - "Creating new equation, sparsity value [0.00443635]\n", - "New solution accepted, confirmed 6/12 solutions.\n", - "Creating new equation, sparsity value [2.60449761e-05]\n", - "New solution accepted, confirmed 7/12 solutions.\n", - "Creating new equation, sparsity value [0.00421066]\n", - "New solution accepted, confirmed 8/12 solutions.\n", - "Creating new equation, sparsity value [0.4397165]\n", - "New solution accepted, confirmed 9/12 solutions.\n", - "Creating new equation, sparsity value [4.93953099e-12]\n", - "New solution accepted, confirmed 10/12 solutions.\n", - "Creating new equation, sparsity value [1.37251881e-09]\n", - "New solution accepted, confirmed 11/12 solutions.\n", - "Creating new equation, sparsity value [2.5158653e-05]\n", - "New solution accepted, confirmed 12/12 solutions.\n", - "[0.16, 0.84] [[0.42, 0.5800000000000001], [0.26, 0.74], [0.76, 0.24], [0.5, 0.5], [0.32, 0.6799999999999999], [0.54, 0.45999999999999996], [0.16, 0.84]]\n", - "best_obj 2\n", - "Multiobjective optimization : 0-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 1-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 2-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 3-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 4-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 5-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 6-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 7-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 8-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 9-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 10-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 11-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 12-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 13-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 14-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 15-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 16-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 17-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 18-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 19-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 20-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 21-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 22-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 23-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 24-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 25-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 26-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 27-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 28-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 29-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 30-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 31-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 32-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 33-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 34-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 35-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 36-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 37-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 38-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 39-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 40-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 41-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 42-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 43-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 44-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 45-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 46-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 47-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 48-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 49-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 50-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 51-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 52-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 53-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 54-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 55-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 56-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 57-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 58-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 59-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 60-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 61-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 62-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 63-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 64-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 65-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 66-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 67-th epoch.\n", - "During MO : processing 0-th weight.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 68-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 69-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 70-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 71-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 72-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 73-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 74-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 75-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 76-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 77-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 78-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 79-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 80-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 81-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 82-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 83-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 84-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n" - ] - }, + " epde_search_obj.set_preprocessor(default_preprocessor_type='poly',\n", + " preprocessor_kwargs={'use_smoothing' : True, 'sigma' : 1, \n", + " 'polynomial_window' : 15, 'poly_order' : 12})\n", + " popsize = 12\n", + " epde_search_obj.set_moeadd_params(population_size = popsize, training_epochs=125)\n", + " \n", + " factors_max_number = {'factors_num' : [1, 2, 3], 'probas' : [0.4, 0.3, 0.3]}\n", + " \n", + " epde_search_obj.fit(data=[x,], variable_names=['u',], max_deriv_order=(3,),\n", + " equation_terms_max_number=6, data_fun_pow = 2,\n", + " equation_factors_max_number=factors_max_number,\n", + " eq_sparsity_interval=(1e-13, 1e0))\n", + " \n", + " epde_search_obj.equations(only_print = True, num = 1)\n", + " \n", + " return epde_search_obj" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0f50bff", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAi8AAAGeCAYAAABcquEJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAADc6UlEQVR4nOyddXhTZxuH79SpUMEdijsUGW4rOtxh2GC4zmBs35ijU3SFDYYPKO6juA13py1QtNA2dU++P96maSEFGnqapLz3deXKycmRp22a8zuPqrRarRaJRCKRSCQSC8HK1AZIJBKJRCKRZAYpXiQSiUQikVgUUrxIJBKJRCKxKKR4kUgkEolEYlFI8SKRSCQSicSikOJFIpFIJBKJRSHFi0QikUgkEotCiheJRCKRSCQWhY2pDchqNBoNDx8+xMXFBZVKZWpzJBKJRCKRvAZarZbIyEgKFy6MldXLfSs5Trw8fPiQYsWKmdoMiUQikUgkRhAUFETRokVfuk2OEy8uLi6A+OFz585tYmskEolEIpG8DhERERQrViz1Ov4ycpx40YWKcufOLcWLRCKRSCQWxuukfMiEXYlEIpFIJBaFFC8SiUQikUgsCileJBKJRCKRWBRSvEgkEolEIrEopHiRSCQSiURiUUjxIpFIJBKJxKKQ4kUikUgkEolFIcWLRCKRSCQSi0KKF4lEIpFIJBaFFC8SiUQikUgsCileJBKJRCKRWBRSvEgkEolEIrEosmUw48yZMwHw9/cHwMfH57X2cXNzA0CtVjNx4kTF7JNIJJI3QqOB2CiIjYSYCIiOEM9x0eDiDm4FwL0AOLvBawydk0gkL0dx8TJp0iRmzJiR+nr48OG0bNmSPXv2ZLiPTuwMGzYMAD8/P4YPH/5aokcikUhem8QEITh0YiPtQ7dOJ0jSrnvhEfl657O1A9f8QsjoHm5pl/PrX+fOA1bSOS6RGEKl1Wq1Sh1crVbTo0cP1q1bl+pFOXv2LLVq1cLf3x9PT0+D+7m7uxMYGJi6D4gR2a9jakREBK6uroSHh5M7d+6s+DEkEoklEBsFd6/C3SsQ+ujlYiM6RZQkxJna6oyxsga3fM+JGwPLbvnFdtbZ4kiXSBQjM9dvxT/tp0+fJiAgAC8vL4BUwaJWqw1uHxAQgFqtTidcdPj5+eHt7a2UqRKJxBJIiIOg6xB4Ge5ehjtX4M5leHLH1JZlLZpkCH0sHq9CpRKemoyEjnsBKFYRCpVS3m6JJBtQVLy4ubkRFhaWbp2fnx9Ahl6XgICADI9lSPDEx8cTHx+f+joiIsJIayUSiVmRlAj3bwpPyp3L4nH3Cjy8LXJMTIGDEzi5gmNu8fz8ctrXNnYQEQLqYPEIf6pfVgcLD1BWodVC+DPxuHsl4+0KeUJNb/BqCTVaQG6PrLNBIslGst3POG3aNHx8fAx6Vl6Gh4cHoaGhBo/37bffZpF1Eokk20lOhscBKZ6UNELlwU0hYLICK+uXi47XWnbJ2tBMQvyLgsaQyNE9siLE9SgAHi2EHQuFt6ZsLb2YqdwA7Bze/BwSSTaQreJl0qRJ9OrVKzURNzMYEi4AkydP5uOPP059HRERQbFixYy2USKRKIRGA8H39AJF93zv2ptdmJ1coVRVKFlFPAqXASe39ALEwdH8qnzs7CFfUfF4FVqtqFxKJ2heIXw0ya8+5s3T4rFmOtjngiqNhZip1RJKVZMJwxKzJdvEi6+vL6VLl36lcMkonKRWqw2+Z29vj729fZbYKJFIsgCtViTMpvWk3L0iHrFRxh/XzgGKV0ovVEpWgbxFsk+YJCdDdDhEqyFKLZaj1Gleq8HWHnLnBde8Ig9Ft+ziATa2xp1XpYJczuJRyPB3ZDo0GmGPOlh4tc7vh/N7wf+8+PsYIj4WzvwrHn8Crvmg5rt6MZO/uHG2SyQKoGi1kQ4/Pz/UajXdu3cHhBAJDQ19abXRmTNn0r0vq40kEjMlLgYuH4FzfnDtuBApkWGv3i8jrKyhaLkUcZIiVEpVgYKeYG39ZrY+Lz7SCo/nRYihda9bEp0RTq4pouY5YaN7/fzymwgeQ0SEwIUDcG6vEDP3b77+vkXL6UNM1ZuJnjUSSRaSmeu34uLl7Nmz+Pn5pQoXEF6YYcOG4ebmRkBAAL6+vuma0Oka1Om8NL6+vuzZs+e1+rxI8SKRKExyMtw6I8TKWT+4elT0SzGGAiX1HhSdR6VoeRFSeV3CguHOJVEmHf5UWfGhw8FJXLyd3MSzbtkxNyTGQ0RK8mxEiFiODM3Y4/EqnN2e8+CkETnPe3dy5xGP183NCQ6C8/uEkDm3F0Ievt5+VlZQvq5ezFSsJ3rYSCRvgNmIF7VaTalSpQxWCelOu3DhQmbMmJHafVfHzJkzUz0vp06dStfo7mVI8SKRZDFaLTy4JYTKOT+4sF+IgczgXiB9qKdkFShRWSTBvi5xMXDvKgReSvO4KEIjmSUj8ZHRunTvu2beG5KcDFFhejGTVtiEPxPPESHpl99E8BQuDdWaQbWm4jn/a+QBarUQdEMvZDLzd3ZwEufSiZmSlc0vx0hi9piNeDEFUrxIJFlA6GNxR67zrjwNer39HF3Sh3t0D7d8r3/u1OqjNCLlzqWMS6RVKpEHUrKqyH9RQnyYAp3gCX8GkSH6UuiI54VPmuWoMMOCp5CnXshUawoFSrze+f3P68XM5cMiL+Z18CioFzI13xV/F4nkFUjxIsWLRJI5YiLh0iG9d+XO5dfbL5ezuCB6eYuLVYlKmbvjVj8V3pO0IuXOFYiPMby9a14hUjyriedSVYUHJ5fT658zJ5OcLMTMrdMit+XiQRHie77yqEBJIWKqN4OqTaFgyVf/3RLi4fp/Qsic2wvXT7y6oklHiUri8/FOe/FZkV4ZiQGkeJHiRSJ5OUmJcOOkXqxc+w+Sk169n5U1VHhHXIC8Woq8h9fJdYiPFTkpzwuVsCeGt09bWVSqWspzVRF+khe+zBETCVeOwsUDQtDcPP2i6MhfXO+Zqd4MCpZ69e9ZJ3h1yb8BF1/PHs/q0Pd/0KirLMWWpEOKFyleJJL0aLVCPOjCQBcPvH7ZctHyolS2pre4sDm5ZrytRiMaoenyUTIT8knrTSlc5s0riySGiYmEq8eEV+biAbhx6kXhmreo+FvrBE3h0q8WM2HB4ng6MfPQ/+Xbl6gEfb6Epj3lXCYJIMWLFC8SCcDT+yku/hTvyuvMyAEx6K+mtz4U9LJkz/hYuHQYzu4Rd+F3Lr865JPWmyJDPqYnNvo5MXPyxc7GeQrr82WqN4MiZV8tZp7c1YeYTmzNuNKrcBno8wW8288ycpEkiiHFixQvkreR6AhRIaLzrgRdf7397HOJvAddKKhklYzd+VqtCA+c+VcvWBLj02+TLuSTRqjIkI9lEBcj+vXocmZunHixFN6jYPpqpmLlX/63jVLDlnmw4VeRk2OIAiWg1+fQ6oPMlcpLcgxSvEjxInlbiI+FE9tg/2o4ueNFIWEIKysoW1svVirWf/nFIuSRECq6x/N5KnmLQK1WwktTtpYM+eQ04mNFTtTFA0LMXPvvxc+ZewEhZKqmeGaKVzQsZmKjYeci8P0Jnj0wfL48haHnRGg7VIx1kLw1SPEixYskJ5OUKETE/tVwbNPr5a4ULqMXK9Wbg4t7xtvGxYiyWJ13JfBS+vftHcUFqlYrcbyMLlSSnElCnKg00nlmrh57UcwUKQvdPoGWA4Rn74VjxMPe5WKmUka5MW75ofsn0H5k5voBSSwWKV6keJHkNDQaISj2r4bDvhm73nXkziP6a3ilJNoWLPnyYwde1M+1uXwk/cVIN33Yq6UQLK/y1EjeLhLi4PpJfc7MteP6fjCu+aDTWOg4Snwmnyc5CQ6tg9VTMy7Pd/GArh9BpzFyJEEOR4oXKV4kOQHd1N/9q+Hgmle3bi9fBxp2FQKjdI2Xl6GGPIQze/TelfCn6d/PW1Qcp1YrIYJc877xjyN5S4iNgl1/wfpfxBRxEN66NoOh68dQqNSL+2g0cGI7rP5ReHUM4ZgbOo+DrhMMCyGJxSPFixQvEkvm7lUhWA78I0qMX0bR8tDifWjeW7jqMyIuRiTX6rwrd6+kf9/BSYSTdN6VVyVgSiSvIilReFXWzRKdekEI6sbdocdnUK72i/totWIC9j9TRZWSIRycoMMoEVJyL6CY+ZLsR4oXKV4klsbjO0KsHFj96mZfeYtC8z7iUbqGYZGh0YgLxtkU78qVI+krRlQqcfHwaiV6uFSsLwfrIWauxcYmEh+fSHx8UupzXJzutX5dcrIGa2srrKxU2NhYpSyLZxsbsd7a2gp7e1ty53Ygd+5cuLg4YG39ljVm02qFEFk3S3wWdVRvLkRMnTaGP8PX/oN/psHxLYaPa+cA7YaJ5F45fiBHIMWLFC8SSyDsCRxcKwTL1eMv39bFQzTzatYHqjQyHBKKDBNf9Gd2p4SCnqV/P39xfSioRou3wvUeH5/E48dqHj5U8/RpJGp1DGFhMYSFRaNWxxAaGk1YWEzK+mjCw2NJTjbQTC8LcXKyJ3duB1xchKDRiRoPDycKFXKjcGE3ChVypVAhNwoWdMXBIQf1PvG/ICqNDvyjb4xXsgp0/1SIcUMCOvAS/DMdDv5juNGhrZ0or+71+ctzuyRmjxQvUrxIzJUoNRzZIMJCF/YZ/jLW4eAE9TtBi74inGPoiz0hTpRI710BJ7en967kchZ3tzrB8jqNxSwIrVbL48fh3LsXysOHah49EiJFPMJ4+FDNs2ev2UXYAFZWKhwcbLG3t8XBwQZ7e1vs7W1SHrZYWalITtag0WhJTtakLiclJZOcrEWjEevi4hKJiIgjLi7x1Sc1QFpRU7iwG0WLulO2bAHKlStIsWIelunJCQ6Cjb/BjoX6arm8RaDzeHhvmOEuzg9uw9qZsOfvF5vogejS690fek9+eQhVYrZI8SLFi8SciI3W92I5vfPFhl9psbGF2m2geV+o18Fw91mNRuSv7Fspcgqiw/XvlawCDTqnVAXVyxEdSyMiYgkIeEpAwFNu3w5OWRbPMTEv+V2mYG9vQ+HCbuTL54K7u1PKwxE3N0c8PMRrNzdH3N0dcXd3wtnZHgcHW2xssrZXTUJCEpGRcURExBIZGUd4eGzq64iIWEJConn0SJ3yCOfhQ/UrBY+Dgy2envkoV65giqApQNmyBShZMi92dhbQcj9KDdt9YOPvEPpIrHPMLcJBXcZDvqIv7vP0vkgG3u5juJuzlRU07S269pasrKj5kqxFihcpXiSmJjEBTu8WIaHjWyAuOuNtVSrR4Kt5X2jUDXJ7GN4u8JLwsOxbBc/u69fnLSq8My3eF/OBLJTo6HiuXXvE1asPuHr1ITduPMbfP/il3hNrayuKFHGjcGH31HCLbln38PBwQmWBHietVotaHcOjR+Gpoubhw3Du3HnGzZuPCQh4mqG4sbGxoly5gnh5lcDLqwS1apWkdOl8WJnrIMSEeNi/SuTF3Lsm1tnYiv+J7p9CqSov7qN+Cptmw+Y56QV8Whp1g75fQpmaytkuyTKkeJHiRWIq/C/A1vlweJ3IQXkZZWuJL+dmvTJOOAwOEl/q+1ambxbn5ApNegjBUrWJRU3n1Wq1PHyo5soVIVJ0j8DAZ2T0dVSgQG48PfOlPPJTurR4Ll7cwzI8DAqQnKwhKCiUW7eecPPmY27depKy/ITo6Bc7LefO7UDNmnoxU7NmcdzdzWyulEYjwqDrZgnvoo46bUVyb/VmL4Y+oyPE/9z6X14s+dfxTnsY+J0UMWaOFC9SvEiyk6RE0el20xzRSO5lFC0nBEvzPmLZEFFq0Yhu7wrxBa77F7W1g7rviQF2dduJagsLQK2O4dy5u5w5c5dz5+5y/vw9wsIMD28sUCA3lSoVplKlwlSsWJgyZfJTqlQ+XFws42c1B3Ti8OLFIM6cucvZs+J3bshL4+mZj4YNy9CsWUUaNSprXr/n6yeFiDm6QZ8bVraWEDGNu704iTouBnYvFnkxT4NePJ61DQydJcJRFuiJexuQ4kWKF0l2oH4qEg63Lch4TguIWS3N+4hKobJehr84E+JFwu2+lSI/Jm1eTLWmQrA06vbytv5mQFJSMtevP+bs2TucPSsEi79/8Avb2dhYUbZsgRShUoRKlQpTuXJh8uaVbeCVIDExmevXH3H27F3Onr3DmTN3CQhI76WwsbGiVq2SNG9egWbNKlClShHzCDM99Bdeld2LRYI6QMFS0O1jUWX0fF5YYoLwVv4zDe7ffPF4DbvAJ4tlt14zRIoXKV4kSnLztPCyHPwn4+RbF3do1F3kolRpbHhQoa7l/94VwtMSpda/V6pqSvO5PqLE2UyJj0/i3Lm7HDt2m+PHb3P27D1iY1/8nZQqlTcl/6IkXl7FqVChMPb2b2e4x1wIDY3m9OlADh68wcGDN14QM3nyONO0aXmaNStPs2YVTC8s1U/FZOotc/XjMVw8oONo6DgG3POn3z45GfyWwbyxL+acFfKE/60TNxMSs0GKFyleJFlNYoIQGJvniOZZGVHGS7ilm/bMOKwTeEl4WPatSu/etoDE24SEJM6du8fx47c5duw2p0/feSEc4exsT82aJahVS+RX1KxZgjx5nE1kseR1uXv3GQcO3GD//uscPXorXd6MlZWKBg3K0KlTTdq2rYaHhwlzZeJi4N+/Yf3P8ChArLNzgJYDoc+XkL9Y+u2DbsCPPV9s/mhrByN+g/YjZBjJTJDiRYoXSVYR+liUZG7/QywbwspaxOA7j4NKDQx/EQYHicqjvSssKvFWo9Fw8eJ9Dh68wbFjtzl1KvAFsZI3rzP165ehQYMyvPOOJ2XLFrDM3iOSVBISkjhz5g7791/nwIHrXL6sD4va2FjRpEl5OnWqSevWVcid28DU6OwgORmObhR5MTdOinXObjDeR9w8pCU+Fnw+hm1/vHicZr1hwkI5udoMkOJFihfJm6DViuFwm+aIqiFDDbFAdKh9bzi0H2m4H4Uu8XbfSjFx10ISb0NCojhw4HrqhSs0NL3LPU8eZxo0KE39+mVo2LAsZcrkt8hSZMnrc+9eCFu2nGfz5nNcuaIXMvb2NjRvXpFOnWrSsmUlHB1NMG1cq4VLh+HPifqhjq0GwajZLwqSg2vh1w8hJjL9+qLl4CtfEa6VmAwpXqR4kRhDQryY3rx5jshryQjP6iI01Kw32Bu467x7FTb8BnuX6xMMQSTetnhfDKYzo8TbpKRkzp27x/7919i//zoXL95PV7Ls4uJAo0ZladSoHA0alKZcuYJSrLzF3L4dzJYt59i06Sy3b+uTsXPlsqNjxxoMGtSQ6tVNkKeVlAgrvxeTqTUaKFwaPl8FFeqm3+6hP/zYC26dSb/ezgHGzIPWH8gwkomQ4kWKF0lmeHpfuJN3LMy4T4SVFTTsKkJDVRq9+OWm1Yqhcxt+Fc3pdJSsIjwsZpZ4GxUVx75919i16zIHDlxHrU5fuly5chFatKhI8+YVqFWrJLa2WdttVmL5aLVarl17xObN59iy5Rx374akvle9ejEGDGhIp041cXTM5oGflw7DjH4QfE+EdAd8K+YepU2aT4gXnppNs1/cv+VAIWIMdbeWKIoUL1K8SF6FVguXjwgvy5ENoEk2vJ2LB7QbCh1GGRYf8bHgt1zMadF1BrWyEi36u34ElRuazV3c06eR/PvvZXbtusThwzdJSND/zG5ujjRpUj6lTLY8BQoYmC0jkWSAVqvl1KlAli07xrZt51M/W66uuejZsw79+zekTJn8rzhKFhKlhtkjxQBIEPlkk1a8mMx7ZCP8Mjh9pR9Aicrw1TooXjE7rJWkIMWLFC+SjIiPFVU+m+dAwIWMtytVVXhZmvcFB8cX3w95JMo2t/+hL9t0dIHWQ8R+hUopY38mCQx8ys6dl9i16xJnztxNFw7y9MxHmzZVad26Cl5eJWSSrSRLCAmJ4p9/TrB8+THu3QtNXd+oUVkGDGhA69ZVs8eTp9WKG4u5o8XwR2c3kZjbpEf67R7fEWEkXdKvDgcnkfz77vvK2yoBpHiR4kXyIk/uihbiO/+EyFDD21hZiSnOnceJ/BRDHpPb50Ro6MA/+kTeAiXFPm2GgJPpP3N37z5LTa68evVhuvdq1ChOmzZVaNOmKmXLFpC5KxLF0Gg0HDhwg6VLj7J371U0GnGpKVbMgzFj3qVnz7rZ0+vnoT9M66sXJ60/EMm8udKU7ycmwJIvwPfnF/dvNwxG/mY4v02SpUjxIsWLRIf/BZHEd2yjvsX48zi7QduU0FDBki++n5wsut5u+FVUDemo3FCEhhp0NtyELht59EjN1q1CsJw7dy91vY2NFfXrl6FNm6q0alWZIkXMJ1FY8vZw/34oK1YcZ+XK/wgJEYM2CxVyZeTI5vTtW1/5vJikRFj+LfwzVXhkCpeByaugfJ302/23DWYNfPEGx7O6CCMVKausnW85UrxI8SK5exWWfwOH1mW8TYnKwmPS4n3DyXmxUbB7CWz6Xdy9gZiP0qQndJ3w4hdfNhMSEsW2bRfYvPkcJ04EpIaErKxUNGxYNqWhWFXzG74neWuJiUlg1ar/WLBgH48eiUnQefI4M3x4MwYNaoizs8JtAy4eEsm8T4PE//KA76DnxPQ3H8FBMK0PXDmafl9HF/j4rxfDTpIsw6zEi1qtZu3ataxbt449e/a8cns/Pz98fHxo2bIlnp6e7Nmzhzp16tC9e/fXOp8UL285D26JO6z9q/R9VdKiUkG9DkK01GhhODT05K7Iidn5J0SLL1hc3KHdcNGK3FBPl2wiLi6RPXuusHbtSQ4cuEFyst6bVKdOKTp39qJ9++rkyycbbknMl/j4JNatO8mcOXsJChJeDjc3R4YMacyQIU1wczOQZ5ZVRIbB7BGi5wuIEPHE5emTeZOTYOkUMR/peTqNFQMe7UzQ0yaHYzbi5ezZs5w+fRq1Ws2aNWs4c+bMK/fx9fVl6NChqNVqPD09mTRpEsOGDXvtc0rx8pbyKBBWfQ97lhmuHHJ0EaGhjqPFXBNDXD0uQkNpq4+KloMuE8B7gMlKJ7VaLRcuBLFmzUk2bz6Xrqy5WrWidOrkRYcONShaVIaEJJZFYmIymzadZfZsv9QBns7O9owY0ZwRI5orF07SasWIAd3cIxd3GL8Qmjx3k3x6N8zo/2ILhfJ14Is1ZpOYn1MwG/Giw9fXl2nTpr22ePH29sbNzc2oc0nx8pYRHCSaUu36S9wtPY+9o/Cy9PgMcnu8+H5yEhxeL0SLrjsnQM13RT5LnbYma9n/5Ek469efYe3ak9y8+SR1faFCbvToUZvu3etkb/mpRKIQyckatm+/wO+/7+HatUeAyImZPPk9unatpdx06we3YPr7cOOUeN16MIz6PX0yb8hDsc2FA+n3dXaDz5ZC/Y7K2PYWIsWLFC85n5BHwqW7w8fwZGdbO9G2v9fn4FHwxfej1LBjkQgP6YYj2tqJ/JcuE0w2GDEpKZk9e66yatVx9u+/nlqh4eBgS9u2VenZsy6NGpWVZc2SHIlGo2HLlvNMnbqN+/fDAOFd/OabztSrV1qZkyYlwrKvYc104ZEpUlYk85arrd8mOVkk/q/87sVwdPdPYfBUsLFVxr63CIsXL6GhoXh4eBAaGoq/vz8zZszIcPv4+Hji4/XTTyMiIihWrJgULzkV9VNYO0P0WEnbel+HlbW4e3r/qxcbUoGYQrvhV5GIG5cys8c1n6g06jAS3Asoa38GPHyoZtWq/1i9+r/UREaA2rVL0qtXXTp0qGG6AXgSSTYTF5fIn38eYvbsPURFie/3du2q8b//daBkybzKnPTCAREienZfJPMO/F54bNMm857bBzPef3FIa6UG8MU/hr9zJK+NRYuXgAAx4tzTU+QlLFy4kD179rBuneGqkW+++YZvv/32hfVSvOQwIkLB9yfRzjsu+sX3VSrhNen3NRQp8+L7YU/EndN2H314qVRV4WVp0dckwxGTkzUcPHiDZcuO4ed3JdXLkiePM7161aVPn3coXVqGhSRvL8+eRfLTT7tYseI4Go0WW1trBg9uzIQJrXB1VUDMR4TC78PFQFWA6s1EMm/aJP2wJ0LknH2uACV3HtHFt06brLfrLcGixcvzqNVq3N3dCQsLMxhKkp6XHE50OKz/VXhLYiIMb9Oom5hfUrKygf0jhOhZ/4te9NRqJe6oar5rktb9T59G8s8/J1ix4nhqpQVA/fql6d+/AW3bVsue5l0SiYVw/fojvvtuCwcOXAfAw8OJ77/vSufONbO+0aJWKzyz88fpk3knLILG3fTbaDQibL1syov9o/p8CQO+Ed4bSaawaPHi6+v7Qlm0SqXizJkzeHl5vXJ/mfOSQ4iNEl4W359EaaMh6rQVrt1ytV58LyEeti0Qybzhz8S68nXhwxnibsoEXL78gD//PMimTWfTzX7p0aMO/fs3oGxZ04SsJBJLYf/+a3z77ebUBHZv70pMm9ZdmeaLD26Jzry6CfNtP4QRv6WvOrx0WPSEefYg/b5eLeHbzbIrbyaxWPGi87L4+/unho1e5Xl5HileLJy4GNHGf+0Mveh4nurNYNAPosPt8yQnw76V4o7oyV2xrmh5kVDXsEu2e1qSkzX8++8V/vzzIMeP+6eur1mzOAMHNqR9+xrZP3VXIrFgEhKSmDdvH7///i8JCck4O9vzv/91oF+/+llflZSYIJJ5184QHpmi5eDzlemTecOfwcwBcGpn+n0bdIavfE3efduSyMz1O1tKFkJDDc+SCQgIYObMmamv3dzcmDhxYqpwAZHz0r17d6OrjyQWQkIcbJwNg0rDos8MC5fydWH6Hpi570XhotWK1t4ja4j23k/uQt4i8NEiWHQZGnXNVuESGRnHokUHadjwR4YMWczx4/5YW1vRqVNNtm4dz/btH9GzZ10pXCSSTGJnZ8NHH7Xi338/pVatkkRFxfP557507z4/tVdMlmFrB0OmwYy94vvk/k0YXx/W/aSvOnLNC99vE/lzaTm2CeaMMtwsU/LGKOp5CQgIwNfXlzVr1nD27FkmTpyYrlvuwoULmTFjBv7++jtStVrNwoULU1+HhIS8tNroeaTnxcJITIDdi2HVjyLL3xCe1WDgD1CvvWEBcuUY/DUJLh8Rr53doPdk0Qkzm9229++HsWjRAVavPpFaJeHm5ki/fvUZOLChnC0kkWQhyckaliw5wvTp24mJScDe3oaPP27NiBHNs35ydUQI/DZMNLEEURww4Bv9+xoN/DwY9ixNv1+/KSInT/JKzC5slJ1I8WIhaLWiOdyiz+DJHcPbFC0PA7+Dxt0NN4q7c0VMgj2+Rby2cxAN6Xp9LpLsspFbt54wb94+Nmw4TVKSSOArUyY/Q4c2pVu32tLDIpEoSFBQKBMnruXgwRsAVKlShLlz+1GunIEeT2+CViuS/xd+Kl5/OEPMRtKRnATfddN/J+kYMw86jspaW3IgUrxI8WLePAqEuaNfjBHrKFAS+n8N7/YznLEffE/Eof2WibsdXW+XflOyfe7QuXN3mTt3L7t2XU4djNiwYRlGjWpB06bllesMKpFI0qHValm37hTffLMZtTqGXLns+OGHrvTuXTfrK5JWTxM3TgCj50CnMfr34mPhizZw6ZB+nUoFX659cfyAJB1SvEjxYp4kJoi7lpXfiX/w58lTGPr+D9oMEbHm54kIgdVTRYO6xJTy+EbdRPJu8QrK2p4GrVbLoUM3mTvXj6NHb6eub9u2KqNHv4uXV4lss0Xy+iQmJhMaGkVISDRRUXHExCQQHR2f+iweCcTGJqDRaNBqQaPRotVqU5+1Wi0qlQoHB9vUR65cdinP4rWzswN58jjh4eFMnjxOODrKAX7ZSXBwBGPHruTw4ZsAdOnixYwZPbJ+YvXfX8GqH8Tyx39Bm8H696LD4dNm4H9ev87WDn7cBTWaZ60dOQgpXqR4MT8uH4HfR8DdKy++5+IBfb4QXW4N5ajERsPG32DtTH2vl+rNYPB0qPiOklanQ6vVsnfvVX7+eTcXLoiRAjY2VnTtWpvRo1vIUmcTkZSUzKNH4QQFhXL/fij374fx9Gkkz55F8uxZFCEh4hEWFvPqgymAg4MtHh5O5MnjjIeHEwULulKsmAfFi3tQrJgHxYrloUCB3HLkQxai0WiYO3cvs2btIjlZQ6lSeVmwYADVqmVhB1ytFnw+ET2oVCr4fBU0761/P+wJfNQIHupvcHB0gZ8OQpmaWWdHDkKKFylezIeIEPhzkhicaAjv/jDsZ3DL9+J7SYmw80/hqdG14y5dAwZPg9qts616SKvVsm/fNX7+eTfnz98DIFcuO/r1q8ewYc1kEm42EBeXSGDgU27desKtW0+4ezeE+/fDCAoK5fHjcJKTNa8+CGBlpcLd3YncuR1wdLTHyckOJyd7nJzsU187ONhibW2FSqXCykqFSqVCpSJ1WaPREheX+MIjNjaBuLhEIiLiCA2NJjQ0ivh4A8NCDWBra03Rou4ULepB2bIFqFixEOXLF6JChYJZ7zF4izh5MoBRo5bz8KEaOztr/ve/jgwZ0jjrwkhaLcweKTp3W1mL0uiGnfXvPwqEjxpC6CP9OvcC8NuxjKfbv8VI8SLFi+nRamHPMlj0qeGy58KlYdwf4OX94nsaDRxaB3//T3/XUshThIea9sq2Kc9arZYDB67z88+7OXtW9IzJlcuODz5oxMiRzcmTx/kVR5Bklvj4JG7ceMTVqw9Thcrt28HcuxeSOj7BEHZ21hQpIi7+RYu6U6CAK3nzOqc8XMib15k8eZxxd3fMtjwkrVZLTEwCISFRhIZGp3qAHj0K5969EIKCQgkKCuXBg7DUJG9DFCvmQYUKhahYsRCVKhXGy6skRYq4ZX0eRw4lLCyajz/+h927LwPQunUVfvmlN+7uTq/Y8zXRaOCnQeC3XISGvt0ibq50BF6GTxqLYbA6CpeGX4+abJaauSLFixQvpuXedZg9Ai4efPE9axuRnd/3f4ZDRBcOiEz+WykNDd3yiyGL7YYZzoNRAK1Wy8GDN/j5592cOXMHEK7/QYMaMWpUc/LmdckWO3I6sbEJXLv2iEuX7qc+rl9/RGJissHtXVwcKFu2AGXKFMDTMy/FiuWhaFF3ihXzIH9+F4tNjk5O1vDoUTj374dy924IN2485tq1h1y//ognTwyPxChUyJXatUtRp05JatcuReXKRbK+NDgHodVqWbz4MN9/v4WEhGSKFHFnyZIhVKlSJGtOkJwEU/uImUh2DjB1F1Rrqn//6nGY5A3xaUKXZbxg1n5wktcpHVK8SPFiGuJjRULt2hki5PM8lerDeB8xEPF5osOFaNn5p3idy1nMH+r2sVjOJs6evcuPP25N7Ybr4GDLgAENGDWqBfnzy8+TsWi1Wu7dC+HUqUBOnQrk9Ok73Lz5xGC4x9U1F5UrF6FcuYKULVsg5ZGf/Plzv3XehtDQaK5ff5QqZi5dus+VKw9f+L05ONhSs2Zx3nmnNM2bV6BmzeLY2Egx8zyXLt1n5MhlBAQ8JVcuO+bO7Ufbtga+j4whMQG+6wontovvrOl7oGI9/fundsGUDvrBsCDmq32/HexkUjdI8SLFiyk4swfmjISH/i++55gbhkyH94YbDvmc2C4muermg7QfKZo6GcqDUQh//2CmT9/O9u0XAbC3t6F//waMHt2CAgVcs82OnEJiYjKXL99PFSunTgUSHBz5wnZ58jhTrVpRqlbVP4oV83jrREpmiImJ5/z5IE6fFiLw9Ok7qNXpk5Hd3Bxp3LgczZtXoFmzChQsKD/DOsLDYxk+/G8OHbqJSqVi8uT3GD26RdZ85hLi4Kv2cG6vaJY5a7/I09OxfzVMfz99192mPWHy6mwLh5szUrxI8ZJ9hD4Gn4/FP6UhGneHUb+LMujniQiBBRNg7wrxunAZ+GQxVG2smLnP8+RJOL/++i8rV/5HcrIGlUpFz551+PTTNjIRNxNotVpu3XrCoUM3OXz4JseP307tMKzD1taaqlWLUqdOKerUKUXNmsUpWNBVCpU3RKPR4O//lJMnAzl8+CaHDt14QcxUqlSYZs0q0L59dapXL/bW/86TkpKZMmUTf/8tunL36FGbmTN7Zc0099ho+LKNqLB0zSuqi0pU0r+/ZR7MHZN+n05jYNRsk0y5NyekeJHiRXk0GpFhv3iyCPk8T75iMHa+aOlviMPrxdwPdbC44+j2ifC2ZFM7/8jIOBYs2I+PzwFiYxMAMaH2iy/aU6FCoWyxwdJ5/Dicw4dvpj6ez89wc3Okdu2SqWKlevVi5MolOw0rTVJSMufPB7F//zUOHLjO+fNBpP2aL1EiDx061KBjx5pUrlz4rRYyS5YcYcqUjSQna6hTpxSLFw/OmkT86AiY9K6YSO1RCH4+BEXK6N9f/i0s/yb9Ph/8KFpGvMVI8SLFi7L4nxc9W66fePE9KyvoPF609TeUqxL2BOaMhiPrxesSlYW3pUJdRU3WkZys4Z9/TjJjxnaePYsCwMurBF9+2Z769cu8Yu+3G41Gw6VL99mz5yp79lzh0qX0s6gcHGypW7cUjRuXo3HjclSpUsRik2hzEiEhURw8eIPduy/j53c1VawDeHrmo2PHGnTqVJPy5d9O0X7w4A2GD/+biIg4ihXzYOnSD7PmBiYiFD5rBoGXIH9xIWAKpDSw1Gph/jjYPDf9Ph8tgrYfvvm5LRQpXqR4UYbYKNGWf+PvoDFQEVLGCyYshHK1XnxPq4W9K2HBeIgMFVVHvSdDny+zLVnt5MkAvvpqY+pF19MzH5Mnv0e7dtXe6rvPlxETk8Dhwzfx87uCn9/VdN4VlUpF9erFaNJEiJVatUri4GBrQmslryImJh4/v6ts2XKeffuuERenT6yvWrUo/frVp0sXr7eut8ytW08YOPBP7tx5hpOTPYsWDaJZsyzo2h32BD5pCvdviLD4z4cgT4ow0mhgRr/0IXcrK/h6I9Tv+ObntkCkeJHiJes5ugnmjTU8+dneUfRg6TzW8Cyip/dF6fSJ7eJ1mZrC25I2kU1BHj5U88MPW9m06SwgSm4/+aQ1H3zQWJaXGiA6Wlzgtm07z9696S9wTk72NG1anpYtK/PuuxVl2bgFExUVx7//XmHLlvMcOHCNhARxQ+LkZE+XLl7061c/azvSmjmhodEMG/Y3x47dxs7OmgULBtC2bbU3P/DT+/BJE3gcKHJfZh3QFyMkJsA3ndPPebNzEJVKVRq9+bktDClepHjJOiJC4JcP4dgmw++/856YmKpzh6ZFqxWlzws/FW39be3EGPken4GN8nfosbEJ+PgcYM6cvcTGJqBSqejb9x0mTWonL7rPobsj37btAn5+V9MJlqJF3WnZsjItW1amfv0yWZPUKDErQkOjWbfuFCtWHMffPzh1fbVqRenfvwGdO3vh5JTzy3kTEpIYO3YFW7dewNrail9/7U337nXe/MCPAoWAeXZf3LTN3Efq5Pu4GPi8JVw9pt/e2Q1+Pgylqrz5uS0IKV6keMkabpyC77uLKc7P41FQZMc37m44Q/5RIPw2VJQMguh38PFf6bPuFWTv3qt88cV6goJCAahbtxTffdflrbqTfBWJicns23eNDRtOs2dPesFSsmRe2revTvv21alatagMq70laLVa/vvPn+XLj7Njx4VUb4ybmyODBjViyJDGOb6zdHKyhk8/XcOaNScBmDq1G4MGZYEXJOiGEDDqYKjwjvCuOKbcREWGiffuXNZvn6ewGCNg6MYwhyLFixQvb4ZWC9sXwoJxwq35PO1HiPlCzm4vvqfRiFLAvz4X3STtc8GgH6HzOLBWPkTz+HE4X3+9ka1bLwBQqJAbX33VgU6dasoLMOLidOnSfdatO8WmTecICYlKfa9EiTy0b1+dDh1qSMEiISQkinXrTrF8+TECA8WIDwcHW/r0eYcRI5pTrJiHiS1UDo1Gw9dfb+Kvvw4DMHnye4wda2CUSWYJvCSmTUeGig68P+wAB0fxXshDmNAQntzRb1+0PPyaUnL9FiDFixQvxhMXIwaN+S178b1iFYT3pHIDw/vevwk/D4YrR8Xrak3hoz/TlwgqRHKyhmXLjjF9+nYiI+OwtrZi6NCmfPppaxwdc767+1U8fhzOhg1nWLfuFDduPE5dny+fC1271qJLFy8pWCQGSU7WsGvXJebO3Zs6Td3a2opOnWoyalQLKlUy0MMpB6DVapk1aye//bYHgLFjvfn883Zv/j9y8zRMfFeE0mu3hm8264sWHtwWgxzV+tAdFd6BGXshVxbNYjJjpHiR4sU4HtyC77qJu4PnadpTCBdD5c/JSbD+F1g6BRLjxTYfzsy4o24Wc/nyAyZOXJs68dnLqwQzZvSgcuUsmltioWg0Gg4evMGyZcfYs+dK6mBDe3sbWreuSo8etWnatLxsIy95LbRaLUeP3mbuXD8OHbqZur5Nm6pMnvweZcvmzCGD8+bt5ccftwHwwQeN+P77Lm/eAuDKUfi8lfBON+gM/1urzwP0Py8qlGLS9E2q0xa+3ZwtuYKmRIoXKV4yz5GNYjJqzHOD4Kys4cMZYsaQoTuOwEvC23LztHhdq5Uol86GOG1sbAKzZu1i0aKDJCdrcHFxYPLk9+jfvwHW1m9vf5GnTyP5558TrFhxPDXnB6BOnVL07FmH9u1r4OqaPc0AJTmTixeDmDdvH9u3X0Cj0WJlpaJXr7p88kkbChd2M7V5Wc6yZUeZPHk9Wq2WgQMbMnVqtzf3wJzbC/97T9zwNe8DE5frQ+sXD8EXrcW4AR3e/eHTv3P0GAEpXqR4eX2Sk2DxF7Bu1ovvueYTdwTVmxnYLxlW/wirfhBDGJ3dYPgv0GpQtrS4Pn36DhMmrCIg4CkAHTrU4NtvO7+1M1y0Wi3Hj/uzdOkRdu68RFKSGNzn6pqLHj3q0L9/gxx7ZywxHbduPWH69O3s3Cm8tQ4OtnzwQSPGjHkXd/ecFebw9T3F+PGr0Wq1jB3rzeTJ7735QU9sh2+7iO/Q1oNFkzqdODm+VbyXtqdWj89g6Mw3P6+ZIsWLFC+vR8gjmNobLh168b3ydWHKeshX9MX3osPhx95wepd4Xb8jjFtgeH5RFhMXl8isWTvx8TmARqOlYEFXZs7sgbd3ZcXPbY7ExyexefNZFi06xJUrD1LX16pVgv79G9C+fQ0cHWVLfomynD59hx9/3MqJEwGAEM2jR7/Lhx82yVGNC1esOMbEiesA+OKL9owZ8+6bH/Twevixpyh2eH7G0Z5lMGtg+u0nLhNemByIFC9SvLyai4dgai8xWPF53hsOI3833Pn2wS2Y0hGCrotKovEL4d33s8Xbcu7cXSZMWM2tW08AMUzt22+74ObmqPi5zY2QkCiWLz/G338fSZ3W7OBgS/futRkwoCFVqrzd+T6S7Eer1bJ371WmTt3O9euPANHF+scfu9G0aXkTW5d1zJ+/jx9+2ArA9OndGTCg4Zsf1G8FzBogKj2fFycrvhOdzXXkzgN/Xc+RFUhSvEjxkjFarUiu/XPSiy3+be3FMMU2gw3ve24v/NBD9CTIW1QkkJX1Utzk+PgkfvllN/Pm7UWj0ZI/vwszZ/akVau3q4ETQEDAU/74Yz++vqdT+7IULOjKBx804v336+PhkbNc9RLLIzlZw/r1p5k2bXvqOImOHWvyzTedckxYd/r07cye7YdKpWLOnPfp2tXASJTMsupH+Pt/IgS/6Irek52UCGNqQ8BF/bYtB8Jnf7/5Oc0MKV6keDFMdLhIrj2yQby2sRX/GCAGh01ZD+VqG953yzyYP14IngrviPkbeZQf5Hbr1hNGjlzG1asPAejatRbff98lx8XTX8W1aw+ZPduPrVvPp1YNVa1alOHDm9G+fXXs7GTXW4l5ERERy6xZO1my5AgajRZnZ3smTmzHoEENLb7CTavV8sUX61m69CjW1lb89dcHb34zlZwE4+rBrTPwTnv4boveo33jFIyvJ0JLOmbugxrN3+ycZoYUL1K8vEjgJVEG/eCW+Iewc4D4WPFeTW/4YrVhN2RSoph+uu0P8dq7v6gmslN2cJtWq2X16hN89dVGYmMT8PBwYtasnlkza8SCOHfuLr//7se//+o7b3p7V2LUqBa8846n7MsiMXsuXgxi8mRfzp0TrQyqVCnCzJk9qVGjuIktezM0Gg3jx69i/foz2NvbsGLFMBo2LPtmBw28DGNqieagE5eDdz/9ews+go2/6V8XKQs+FxX/Ls5OpHiR4iU9fivg92FCrNjai2x2nXDp9bkYqmio+21EiBgPcOGAEDxDpotsd4UvmOHhsUycuCa1S27jxuWYPbsvBQrkDJfz63DiRAC//ro7tZ+GSqWifftqjB3bUuazSCyO5GQNq1b9x9Sp2wgPj8Xa2ooJE1oyblxLix6OmpiYzLBhf7N792Wcne3ZunU85cu/oUd69VRY8qWYfbTwit7DHRsFw6rAk7v6bd//CgZ+92bnMyOkeJHiRZAQD398BNsWiNdOrqKnQEKcaCT32VJo1NXwvnevwpQO8ChAbPv5KqjfQXGTT54MYPToFTx4EIaNjRWTJrVj5Mjmb94UykK4eDGIGTN2sH//dUB0Mu3a1YsxY7xlqbPE4nn2LJL//W8jW7acA6BmzeLMnv0+pUvnN7FlxhMXl0jfvj78958/xYt7sH37R282/ylt+KheB5FbqLthPLULvmyr39bGFhacz7aZcUojxYsUL0Kd/9BDxEpVKnAvAOqnImelWAX4egMUr2h43xPbYVofiImEgqXg2y2KTzfVaDTMmbOXWbN2otFoKVkyL/Pm9aNmzbdjKNnNm4+ZNWsn27eLpDwbGyt69arL2LHeFC+ex8TWSSRZy8aNZ/niC1/Cw2NxcLBlypSODBzY0GLDoCEhUbRv/xt374ZQr15p/vlnxJvloQVegtG1RNh+0gpR0aljej/Yt1L/ukoj+OlgjmheJ8XL2y5ezu1LqQoKBXtH4XEJFaWLNOoqujTqppmmRasF35/hz4liuVpT+MpX8ZK88PBYxo5dgZ/fVQC6davF1KndcXHJObHcjLh3L4Sff97F+vVn0Gi0qFQqunb14pNP2lCyZM4rhZRIdDx8qOajj1Zz+LAIjTZrVoFffultsRVJN248okOH34mKiqdPn3f46adebybGdNVHz4eP1E/hw4oirK/jo0XQ9sM3+wHMALMSL2q1mrVr17Ju3Tr27NnzWvvMnDkTNze31P0nTpz42ud768XL6X/h644iPOSaD7Qa8SG3soIPpkLPiYZzVhLi4Lfh+oGMbYfCmLlgq2yDs6tXH/Lhh0u4c+cZ9vY2TJ3anT593lH0nOZAREQss2f78eefB0lIECXrbdtW5bPP2lKhgvJVXBKJOaDRaFiy5Ag//riNuLhEPDycWLBgAI0blzO1aUaxb981BgxYhEaj5ZtvOjFsWDPjD5aUKMJHt8+KRqDfbNJ/d/sth5kD9Ns6u4neL+6WHVrOzPVbUT/T2bNnWbt2LWq1mtDQ0FfvgBAuAMOGDWPYsGF4eXkxfPhwJc3MOZz1g286CeFSoAQkxArhkjsPTN0NvSYZFi6hj2FiCyFcrKxFh8cJPooLlw0bztC+/W/cufOMokXd2bx5XI4XLklJySxbdpSGDacyf/4+EhKSadSoLDt2fMRffw2WwkXyVmFlZcWQIU3YvfsTKlcuQmhoNH36/MHcuXuxxKBAixYVmTKlIwDffbeFffuuGX8wG1vRy8XGFo5vgf2r9e+92w+8WupfR6lFfuNbRLaEjXx9fZk2bRpnzpx55bbu7u4EBgamel5AVFq8rplvrefl3D6Y0l5UERXyhPCnImeljJfIb8loUOLtc/B1J3gaJNT7l2uhVkvD22YRiYnJfPfdZv766zAATZqUY/78ATm+wdqBA9f57rstqd1HS5fOz5QpHfH2rmSxsX6JJKuIjU3giy/Ws2bNSQDatavGr7/2sbjwsVar5dNP17B69QlcXBzYunU85coVNP6AK3+ApV+Bi4doXueRcqxHgaL6KD5Gv+3UXVC79Zv9ACbEbDwvmSUgIAC1Wp1OuOjw8/PLfoMshYsH9cKlcBmR6xITCeXrwKx9GQuXw+vho0ZCuBQtB7NPKC5cwsLEnZVOuIwb583KlcNztHAJCgpl0KC/6NvXh+vXH+Hu7sj333dh376JtGxZWQoXiQTIlcuOX37pzcyZPbCzs2bHjou0a/crN28aGGFixqhUKqZN6069eqWJjIxj0KC/iIiINf6AvSZBmZrie/33ESIfEaBQqRfLpGePhLiYF4+RAzE78WIINzc31Gq1wffi4+OJiIhI93iruHQYvmwnhEvR8hCtFi5Ez+rw4y6RrPs8Wq2Yl/F9d6Haa7USwqWosnHmgICndOjwO8eO3cbZ2Z7Fiwfz+efvYW1tVh/DLCMhIYk5c/xo2nQ6//57GRsbK4YNa8rRo18yZEgTi+5vIZEogUqlol+/BmzcOJZChdzw9w+mXbtf2bHj4qt3NiPs7GxYtGgQxYp5cOfOMz77bK3xYTAbW1FkYWMLxzfDgX/073UZD2XTjCZ4HAgrc07fl5dhEVcNDw+PDHNmpk2bhqura+qjWLFi2WydCblyVNT8x8eIsudoNYQ/EzX/0/dAbo8X94mLEZOkdYO+ukyAH7aLkJGC/PefPx06/EZAwFOKFHFny5bxtGlTVdFzmpKjR2/h7T2LadO2ExeXSIMGZdi7dyLffNP5rRwkKZFkhpo1S7B798c0bFiGmJgEhg79m4ULD5jarEyRJ48zCxYMwMbGiq1bz7Ny5XHjD+ZZDfp+JZbnjtEP1LW2EZVGVmluhNb9lH4OUg7FIsTLy5J9J0+eTHh4eOojKCgoGy0zIVePwxdtIC4aSlSGmAgIeyJaRs/YC275XtwnMQG+7QIH1woV/9EiGPmr+AdQkHXrTtGr1wLCwmKoWbM427dPyLGJqSEhUYwdu4IePeZz+3YwefM6M2fO+6xbN0o2mZNIMkHevC6sXj2CQYMaodVq+eabzXz99UY0aef7mDleXiX4/PP3AJgyZRPXrj00/mC9P9eHj2aP1IePytSE7p/ot9Mkw2/D0s9ByoGYlXjx9PQ0uF6tVmf4nr29Pblz5073yPFcOwFftBbtoktWgbgoePZANJSbuU+f0JUWjQZ+GgRn/hW9X6bvUbwvgFarZebMnYwfv4rExGTat6/OunWjyZ8/Z/6Ntm49T9Om01m//gwqlYqBAxty6NBkunWrLfNaJBIjsLGx5scfu/K//4nu3osWHWLEiGWpE9UtgREjmtGiRUXi4hIZMWIZMTHxxh1IFz6ytoFjm+DAGv17/b6GwqX1r6+f0M+jy6GYnXhxc3MzmPvi7e1tAovMkOsnYXIrkZDrWU2UQz+5C3mLCuGSr+iL+2i1ooxu/2rxwf96g2hApyBJScl8/PE//PbbvwCMGfMuf/wxAEdHZcuvTcHTp5EMHbqE4cOXEhoaTcWKhdi2bTzTpnWXISKJ5A1RqVSMGtWCefP6Y2dnzbZtF+jdewFhYdGmNu21sLKy4vff+1KgQG5u3XrCV19tNP5gntXEPCOAeWOEtx3AwRHG+6TfdvFkCHkDT4+Zky3iJaOwT0BAQGpfFx2TJ09OV1nk6+vLsGHDFLXPYrh5OkW4RIiE3IR4eOgvPC2z9kHBkob3+2cabJotlj9bqngpXXx8EiNGLGPNmpNYWan46adefPFF+xw3n0ir1bJp01maNZvB9u0XsbGx4qOPWrFz58dvzVgDiSS76NLFi1WrRpA7twMnTwbSseNsHjwIM7VZr0WePM7MndsPlUrF6tUn2LjxrPEH6z0ZStcQPbzSho9qvgutP9BvFxMB88a9kd3mjKJXE5048fHx4ezZs0yaNAlfX9/U9/38/PDxSa8WJ06ciFqtxtfXF19fX06dOvXCNm8lt87C5y0hOlwIF00y3L8hWvfP2CtyXQyxY5GYUAow8jdo0VdRM6Oj4xkwYBE7dlzEzs6aRYs+oG/feoqe0xSEhUUzbNjfjBq1nLCwaCpVKsz27R/x2Wdt32ymiUQiyZAGDcqwefM4ChcWlUjdu8/j/n3LEDANG5ZlwgTRimLixLUEBj417kBpw0dHN4ocRh1DfwK3NEMuj6yH41uNN9qMkbONLAH/86IDbmSYUNwqlWgu5+IOM/dD6eqG9zuyEX7oLvJdek+GwVMVNTMsLJoBAxZx5sxdHB3tWLJkiMW2+X4Zx47dZuzYFTx6FI6NjRUTJrRizJh3pWiRSLKJ+/fD6NFjHnfvhlC8uAe+vqMpWtRAdaWZkZSUTM+eC/jvP3/q1i3Fhg1jjPdIL/8Wln8jOqgvuqIfDXBgjago1ZG/uHg/1xtMus4mLLZJncQAARdhkrdeuNjaC+HimBum/ZuxcLl4UEyG1migzRD44EdFzXzyJJxu3eZx5sxd3N0dWbduVI4TLomJyUyfvp0ePebz6FE4np752LZtAh9/3FoKF4kkGyla1B1f39GULJmXe/dC6dZtHkFBrzeCxpTY2Fgze3ZfnJzsOXkykKVLjxl/sN6ThRc+IgTmjNKHj5r2hHfe028XfA+WTnkzw80QKV7MmcDLMOld8eEsXUMo5+snwMEJftwJ5Wob3s//AkxJGc7YoDOM/8PwTKMsIjg4gu7d53P9+iMKFMjN+vVjclzOx507z+jceTazZ/uh1Wrp0+cddu/+hGrV3qK+QhKJGVGkiBAwpUrlJSgolG7d5lqEgCla1IMvvmgPwNSp27h/30ibbe3E7CNrGziyAQ6tE+tVKhg7P72nZdPvcPPV43ksCSlezJU7V0SoKPyZEC4uHnD5CNjnEk3lKjcwvN+jAFFGHRMBVRrD5FWK9nEJCYmiZ8/5+PsHU6SIO5s2jc1xPVx27rxE69Y/c+7cPVxdc+HjM5Cff+6Nk5O9qU2TSN5qChd2w9d3NJ6e+bh/P4yuXS1DwAwc2IB33vEkOjr+zbrvlq4BfVJyGueOhrBgsZy/OHyQJk1AoxG9X5KT3shuc0KKF3Pk3rUU4fJUuAXdC8L5fUJpf70p4zLnsCeiGinsiSip+26LEDsKERoaTa9eC7h58wmFCrmybt0oSpTIq9j5spukpGSmTt3GkCGLiYyMo06dUuzZ8xkdOtQwtWkSiSSFQoX0AubBgzD69vUhJCTK1Ga9FCsrK376qRf29jYcPHiDdetOGX+wPl+I60T4MyFgdHQYBRXe0b++fRY2zzX+PGaGFC/mRtgTmPguqIOFAClQAk7vEt6Tr9ZD7VaG94uOEKMCHvpDgZJirpGCLf/Dw2Pp0+cPrl59SP78LqxZM4qSJXOOcAkJiaJvXx/mzt0LwLBhTVOSAt1NbJlEInmeggXFzVORIu74+wczcOCfxjeDyyZKl87PJ5+0AeCbbzYTHGzkXL604aPDvvrwkbU1fPRnes/73/8TOTA5AClezAmtFn75EEIfiVlFRcvD8S1ibsUX/0C99ob3S4iDbzqLRF7XfDD9X8ijXOgmMjKOvn3/4NKl++TJ48zataMoUyb/q3e0EM6du0urVj9z5MgtHB3tWLBgAN9801kOUpRIzJhChdxYtWo4bm6OnD17lxEjlpGUlGxqs17KiBHNqFatKGp1DF98sd74A5WuITwwIJJ3I0LEcqkq0Otz/XZx0WI2Ug4oMpbixZzYsQhObBNKukYLoaBVKpi4DBp3M7xPcjJM7wcX9osErR93ZtzzJQtISEhi8OC/OHfuHu7ujqxZM5Jy5QyMI7BQNm8+R9euc3n0SI2nZz62b59Ap041TW2WRCJ5DcqWLcDSpR/i4GCLn99VJk5cZ3w+STZgY2PNzz/3xsbGih07Lr7Z9Ow+X4o5d+HP0o8G6PuluBHW8d9WOLnD+POYCVK8mAsPbokW/gBth8K/f4vlD2dm3FhOqxUtoo+sF4Lnm81QrpbhbbMAjUbDxx//w9Gjt3Fysmf16hFUqlRYsfNlJ1qtll9//ZeRI5cRH5+Et3cldu78mPLlc1bysUSS06lTpxQLFgzAykrFP/+cYNasnaY26aVUrlyEMWPeBeCbbzYZP7fJ1k7vZdkyTwziBbBzgP7fpN924+/GncOMkOLFHEhOghn9IT4GqjcTTeniokVibrePM95v+bdCYatUMGkl1GyhqJnTp+9gw4Yz2NhYsWjRoBxTJhwfn8S4cStTv+SGDWvKkiVDcHFxMLFlEonEGFq3rsKMGT0A+O23PW+WEJsNjBnjTaFCbty/H8bChQeMP1DTnuBRSKQepO2826irvokdwNk94obZgpHixRxY9aPo3+LsBp414MpREQL6ZAlk1H1xy3xY8a1YHjMPmnRX1MS//z6Smrw6a1YvmjWroOj5sgtRMTWf9evPYG1txfTp3fnmm85YW8t/DYnEknn//fqp7fgnTVrHxYtBJrYoYxwd7fjyS5HTOHu2H0+ehBt3IFs76JhScbThV31ui60dtHtuRuB2yx67I7+hTc21E7Dye7HcaSxsWyCWh/8ChUoZ3ufoJhEuAjEKvcNIRU3ctesS//vfBgA++6wtvXrVVfR82cXDh2q6dJnDyZOBuLg4sGLFMAYMaGhqsyQSSRbx6adt8PauRFxcIkOGLDHrEuouXbyoVasEMTEJTJv2Bjkp7w0XoaLbZ+HS4fTrrdIUHexeAvGxxp/HxEjxYkpio2BGPzFksUkPOLVTdMWt0xbafmh4n4gQ0WxIq4X2I6D/14qaePnyA0aNWo5Go6Vv33qpdzKWTkDAUzp3ns2tW6JHzZYt42jatPyrd5RIJBaDlZUVc+b0S+0BM3Kk+VYgqVQqvv22CwBr15403lPkmhe8B4jljb/p1+ctIjqu64gM1ZdVWyBSvJgSn0/g4W3IW1Q8bp4WwxY//jPjdv6LPhPN60pUgpG/K9r2PywsmiFDFhMXl0izZhWYPr07KgXPl11cvvyAzp1nc/9+GJ6e+di8eZxMzJVIciiurrn4668PcHS048iRW/z44zZTm5QhXl4l6NZNFF1MmbLR+EqpLuPF87FNouu6jk5j0m+n8/RbIFK8mIrjW2HHQiE+un4Em+eI9aPnQp4MKnjO7ROuPoAJi0QcUyGSkzWMGrWcoKBQSpTIw/z5/bGxsfw+JydPBtC9+1yePYuicuUibNw41iKm0UokEuMpX74Qv/8uqjZ9fA6wadNZE1uUMZMnt8fBwZaTJwPZuvW8cQcpUQlqtxYe+k2z9eurNRXl1Dqu/ScKRCwQKV5MQdgT+GWIWO44GnYvFhVHjbpB8z6G94mPhd+Hi+X2IzOebZRFzJq1k4MHb+DgYMtffw3Gzc1R0fNlBydOBNC3rw8REXG8844n69ePJl8+F1ObJZFIsoH33qvO2LHegEjgNdcZSIULuzF6tKgc/eGHrSQkGDmPqGtK643di0UHdhA3yx1Gpd9uq2V6X6R4yW50XXTDn0KpqmBlA3evgFt+GLcg4zDQqh9FiMmjEAyZpqiJO3deZPZsPwB++qlXjujlcubMHfr1W0hMTAJNmpRj5crh5M6t3NwniURifnz2WRtq1y5JZGQc48atJDlZY2qTDDJyZAsKFMjN/fthrF170riD1GolOrXHRMKuv/TrvfuDY5qbtn0r9eLGgpDiJbtJ20W3w2jYnOLSG+8DbvkM7xN4GdbOEMtj5oKTq2Lm+fsHM378KgCGDm1C167KNb3LLs6fv0ffvj5ER8fTsGEZFi8egqOjciE3iURintjYWDNnzvs4Odlz4kQA8+fvM7VJBnF0tEv1vsye7Wec90Wlgi4TxPKm2aIbOwjhokvoBdFTbO/yNzPYBEjxkp2k7aLb53/g+5MYVe49ABp2NryPRgO/DRVhpfqdoGEXxcxLTExmzJgVREXFU69eaf73v46KnSu7uHgxiD59/iAyMo569UqzdOmHUrhIJG8xJUrk5YcfugIiPG6u/V/ef78++fO7cP9+GOvWnTbuIN79IXceeHIHjm/Wr+84Ov12WxdY3LwjKV6yi3RddJtD2GN9pdGol7Rq3vaHSKrK5Sy8LgpW+/z6624uXAjC1TUXc+f2s/hBhP7+wfTt60N4eCx16pRi+fKhODram9osiURiYnr2rMN771UjKUnD6NEriIlJMLVJL5Arlx2jRum8L3tITDSixNs+l+jvAqJpnY7iFcX8PB13r8DlI29gbfYjxUt2kbaLbssBsHW+WP/xX2KdIZ49gMUpsyoGT4N8RRUz79SpwNQ8lxkzelC4cAY2WQhPn0by/vs+hIZGU716MVasGIaTkxQuEolE9FSZMaMnBQu64u8fzNSp5lk+3a9fA/LlcyEoKBRfXyO9Lx1Hg42tECc3T6dfnxYLK5uW4iU7SNtFd/B0WDpFLLcfCbVbZbzfvLEi2apiPbGtQkRFieQ1jUZLt2616NjRsqcoR0fH07//Qu7dC6VkybwsXz5UzimSSCTp8PBw4pdfegOwZMkRzp+/Z2KLXsTR0Y6RI5sDb+B9yVMYmvYSyxt+06+v31F4/nUc9oWwYOONzWakeFGatF10m/eBGyfgaRAULg1DZ2a835GNcHQjWNvAhIVgrVwIZ8qUTdy9G0KRIu78+GM3xc6THSQmJjNs2N9cvHgfDw8nVq4cRt68shxaIpG8SLNmFejWrRZarZaJE9eaZffdAQMakDevM3fvhrBhwxnjDqJL3D24Rnj0QVxbdCElgKREUVZtIUjxojSLJupzW+q0FU3mVCr49G+Rx2KI6Aj97KKeE0VJtULs23eNf/45gUqlYs6c9y26fFir1fLFF77s33+dXLnsWL58KKVKZVDBJZFIJMDXX3fC1TUXly8/4O+/zS/vw9HRnpEjRX7KnDl+aDRGlHeXqwVVGovcyy3z9OvbDRUhJR3bffRVSWaOFC9K8uwB7Fwklof/LFr7A3T7BKo0yni/JV9AyEMoXAb6/k8x82JjE/jyy/WAKIuuV6+0YufKDpYvP8bKlf9hZaXijz8GULNmCVObJJFIzJy8eV1SJzrPmLGThw/VpjXIAAMHNsDVNRcBAU/Zv/+6cQfRNa3b7gNxMWLZvQA07qHf5skdOLP7jWzNLqR4UZKt84XSrdYUzvwrOuuWqASDvs94n6vH9cm8431EtrhCzJnjx927IRQq5Mqnn7ZR7DzZwcmTAXz11UYAPv/8PVq2rPyKPSQSiUTQt289atUqSXR0PFOmbDS1OS/g6GhP797vALB48eFXbJ0B9TtCwVJiIKNfmr4uhsqmLQApXpQiPlYoXIAW78PeFWJ5vI8YV26IxAT9xOhWg6BmC8PbZQG3bwczb55o0PT9911xdrbchNZHj9QMHfo3iYnJdOhQI7W5k0QikbwOVlZWzJjRA2trK3bsuMj+/ddMbdILDBrUEJVKxf791/H3NyKx1toaOo8Tyxt/Ez3EACrVh9I19Nud3A5P7r6puYojxYtS7FsJESFQoKQYBZAYD2VrQeWGGe/j+xPcuSxGmg/7STHTdLkhiYnJvPtuRdq2VS6nRmni45MYOvRvnj6NpGLFQvz6a+8cMflaIpFkL5UqFWbIkMYA/PjjNuNySxSkRIm8vPtuRQDjc3NaDxYddoOui2gAiBzMtN4XrVYMDTZzpHhRAq0WNqY0nuswUv9B6Dg64yZzD27Biu/E8ojfRFdEhdi48SxHjtzCwcGWH37oatEX+xkzdnD27F3c3BxZvHiwbEInkUiMZty4luTO7cDVqw/ZuNH8Jk8PHizE1Zo1J4mKisv8AZxyQ5sPxXLapnXN+6bvN7bzTxEJMGOkeFGC8/uFB8XBSQxSfHIXXDygWW/D22u18Ntw4Z2p1Qpa9FXMtLi4xNSGTOPHt6REibyKnUtpDh26wR9/7Afg11/7WPTPIpFITI+HhxOjR78LiBuj+HgjJzorRJMm5fD0zEdUVLzxIwM6jQUrK+F5uXNFrHNwFF4ZHepg0arDjJHiRQk2pXhdWg4U4SMQH4yMkm+vHoML+0UuzMsmS2cBy5Yd5eFDNYUKuTF8eDPFzqM0ISFRqQMkBwxoQOvWVUxskUQiyQkMGdIkdaLzsmVHTW1OOqysrPjgA1GpumTJYbTGzCMqVAoadBbLG9OMpnm+EaqZd9zNFvEyc+ZMFi5cyMKFC5k58yWN2VLw8/OjR48eLFy4ED8/PyZNmoSvr282WJoFPPSH/7aK5Tpt4fRuIUY6vKRD7u4l4rlZbyjkqZhpkZFx/P67GAHwySetcXCwfcUe5olWq+XTT9fw5EkEZcsWYMqUTqY2SSKR5BAcHe1Sqy9/+20PERGxJrYoPT171sXJyZ7bt4M5duy2cQfRlU3vXQ7hz8RykTJQO03V6cWDcPfqmxmrIIqLF51YGTZsGMOGDcPLy4vhw4e/dB+1Wo2fnx/Dhw9n+PDhlC5dmu7duyttatawea4IA9VuA+dTxq3XbZexKImNFl0PQVQYKYiPzwHCwqIpXTo/PXvWUfRcSrJmzUl2776Mra018+b1l1OiJRJJltKrV11Kl85PWFg0Pj4HTG1OOlxcHOjcWYxwWb/eyI67lRuKApKEODH8V8cL847+wFxRXLxMmzaNYcOGpb729vZm4cJXZzIHBgai1Wrx9/dPt79ZExOpb6/cbij8m+JR6TA6432ObhAjBAp5QtUmipn27Flk6j/h55+3w8bGMidGP3sWyXffbQHgs8/aUqVKERNbJJFIcho2NtZ89pnwQixZcoTo6HgTW5Sebt1qA7B9+wXi4hIzfwCVSu992fWnuOEGES0oWEq/3Z6l4gbbDFFUvAQEBKBWq3Fzc3vhPT8/PyVPbRr+/RtiIqBoeVEmHaUWM4xqt854H13IqOUgRXNd5szxIzo6nurVi9GuXTXFzqM0X3+9CbU6hsqVizBiRDNTmyORvJUEB0fw7bebzXIWUFbx3nvVKVUqL2p1DKtX/2dqc9JRt24pChd2IzIyDj+/K8YdpEFnMd/oyV19Xxdr6/S5LzERcGD1G9urBIqLF0O4ubmhVqtfuu/atWvx9fVl4cKFTJo0KcPt4uPjiYiISPcwCRoNbJotljuP03fJbT9SZHYb4lGgSNRVqaDVQMVMCwuLZsUK8c83aVI7iy2N3r//Ghs3nsXKSsWsWT0t1nskkVgyyckaunWbh4/PAaZN225qcxTD2tqKESPERGcfn4PGTXRWCCsrK7p08QJgwwYjS7pzOUE54cHhcpquve/2S7/dnmXGHV9hTFJt5OHhQWhoaIbve3l54e3tTffu3Rk2bBilS5emR48eBredNm0arq6uqY9ixYopZfbLOblDDGB0doMiZcH/vKgeavVBxvvsWSqea7SA/MUVM2358mPExiZQuXIRmjYtr9h5lCQmJoHPPxdJ20OGNKZGDeV+XxKJJGOsra2YNKktAAsW7Gfnzksmtkg5unevTd68zjx4EMaWLedMbU46dKGjffuuolbHGHeQKqJvDJcO6dflKSSaq+q4eUpMnDYzTCJeXiZcADw9PfH01Ce49uzZE19fX4PemsmTJxMeHp76CAoKympzXw9dyVmbD0X4CETjn9wehrfXaPTi5WUC5w2Jj09KnYUxYkQzi/W6+PgcICgolMKF3Zg4sZ2pzZFI3mrat6/BsGFNAfjoo1XcufPMxBYpQ65cdnz4ochFnD9/n3GlyQpRoUIhKlUqTEJCMtu3XzDuILo8y7TiBcTIAB0JcRBw0bjjK4ii4iWtAEmLWq3O8D3ghbJoXc6MoTCUvb09uXPnTvfIdu5cgXN+IjzUpAccXifWP5+5nZaLB8UET8fc0LCLYqZt2nSW4OBIChVypUOHGoqdR0mePo1k/nxRufXllx1wcpJddCUSU/Pllx2oXbskERFxDB36t9k1dMsqBgxoiJOTPdeuPeLgwRumNicdutDRG1UdqVRw/6YYHKyjYv302904aaSFyqG4eHFzczMoOry9vQ3uo1ar6dGjR7p9dB6Xlwkek6LLdWnQGc7uES62ivWgrFfG++gqkZr2Et0NFUCr1bJw4QEAPvigMXZ2NoqcR2l+/nkX0dHx1KhRnE6dapjaHIlEAtjaWvPHHwPx8HDiypUH/PLLblObpAhubo707l0XECF4c6JzZy9UKhX//efPo0fqzB/AxR1Kpcy2u5Qm76XSc+Ll+gmjbVQKxcNGkydPTldZ5Ovrm670OSAgIF3jOjc3NyZOnJhOqCxcuJDu3bsbrFoyORGhotEPQMcx+rr4l5VHR0fA4RTvUmvlQkaHD9/k2rVHODra0a9f/VfvYIbcuvWElStFsvGUKR2xyij5WSKRZDuFC7sxc2ZPAObN28u5c+Y/jdgY3n9ffH/u2XOFp08jTWyNniJF3PHyEvl/+/ZdN+4ghkJHntVFzqaOt1G8TJw4EbVaja+vL76+vpw6dQofH5/U9/38/NK9BiF4Zs6cmfoICQlh3bp1SptqHDsXQXysGCkeFQbP7oNrPhE+yohD68Q+RcsLD41CrFolPnA9e9bFzU0Z747STJu2neRkDa1bV6FevdKmNkcikTxHu3bV6NLFC41Gy/jxq4iNNe+BfsZQoUIhvLxKkJSkYd26U6Y2Jx0tWlQCROKuUejES9qKIxtbfSUSiCnUUWrjjq8QKq05ZSBlAREREbi6uhIeHq58/ktyEgzwhKdB8OkS8Fsuuur2ngyDp2a830eN4MpRGDwNen+uiGnh4bHUqDGF+Pgkdu36mGrVTFSF9QZcu/aQd9+dhUql4sCBSZQtW8DUJkkkEgOEhUXTosVMnjyJYMyYd/nii/amNinLWbnyPz77bA2lS+fn0KHPzab44eLFINq0+QUnJ3uuXPkh8+kBoY+hdyGR+7I+VD9detFEWDdLv920f6FWyyyz2xCZuX5LH/ybcHSjEC6u+YTn5fw+kbTbfkTG+9y/KYSLlRW0HKCYadu2nSc+Pony5QtStWpRxc6jJHPn7gWgfftqUrhIJGaMu7sT06aJES4+PgcICHhqYouynk6dauDoaIe/fzAnTwaa2pxUqlQpQr58LkRHxxtnl0dB0d5DqxXXJh3PJ+2aWehIipc3Ye8K8fzecNj1l1h+p8PLe7boyqhrtYY8hRUzTefa7N69ttncIWSGu3efsXmz6KswZozh5G6JRGI+tG5dhRYtKpKYmMzXX280tTlZjrOzAx07iplC5tRx18rKiubNKwKwd28Who6eT9o1s4ojKV7ehJunxXOVxvqeLS8rj05OBr+UboUKJureufOMkycDsbJSpTYysjTmz9+HRqOlefMKFus5kkjeJlQqFd9+2xlbW2v27r3Gnj1Gtq03Y3r1ElVHO3deIiHBfErD331XiJd9+64ZdwBDzeo8CqZvVnf9hH4GkhkgxYuxhD6GkIciTpgYL4Yy5i0KNd/NeJ9zfvDsgShPq9dBMdN8fYWoaty4HAULuip2HqUICYli7VrhORo7VnpdJBJLoXTp/AwdKprXfffdFpKTNSa2KGupU6ck+fO7EBkZx9Gjt0xtTipNmpTH2tqKW7eecO9eSOYPoPO83DwNcWm69ab1vqiD9TOQzAApXozldkqr6KLl4XFKT5pytTOeYwT6IYzN+6YvQ8tiduwQ3RC7daul2DmUZN26U8THJ1GtWlHeecdMe/tIJBKDjB/fEnd3R/z9g9m0yci5O2aKlZUVbduKwba671lzwNU1F3XqiGnQBw4YUTJdsKS4+U5KTJ/bYsZ5L1K8GMvtlH/KMl761sme1TPePjIMjm0SywqOA7h/P4zr1x9hZaVKLaGzJLRabWpfl/79G1hkvo5E8jbj4uKQOtDwl19257jJ023aiKZuu3ZdNivPkq6VxJkzdzK/s0oFVQ2Ejp5v5SHFSw5A53kp6wUBKXMlPKtlvP2Bf0R4qWSVl3fefUN0tf5eXiXw8HBS7DxKceJEAP7+wTg62tGpU01TmyORSIxg8ODGeHg4ERj4jA0bjGxdb6Y0aFAGV9dchIREcfq0+VQd1a5dEoDTp+8YdwBDSbtm3KxOihdj0XleSlWDu1f0yxlxMmV0vPcAoXIVws9PiBdvb8vzugCsWHEcEDM7nJ2VC61JJBLlcHKyZ9SoFgDMmbMXjcZ8PBRviq2tNS1bVgbMK3Tk5VUCgMDAZ4SERGX+ADrxcvUYJKY0GrS1S9+s7vZZs5kwLcWLMUSGweMUxZ3LWUzddHCCQi/JzwhKGehVTrnqn9jYBI4cEUlk775reeIlKiou9ctA145bIpFYJgMGNMDZ2R5//2AOHbppanOylLZtReho714jq3sUwM3NkTJl8gNGho6KV4TceUT399tpcpXS5r0kxEHgpTczNIuQ4sUYdCGjQp4QfE8sl6qacbJuYoJe7BQtp5hZx4/7ExeXSKFCblSqpFwPGaXYu/cacXGJeHrmo3p1y+sILJFI9Dg7O6SWFi9efPgVW1sW9euXQaVSERDwlCdPwk1tTiq1apUE4MwZI6qCVKo0JdNp/l5mmrQrxYsxpEvW1eW7vCRZ93EgaJLB3lHRxnSHDgnvTosWFSwy0VXndWnbtqpF2i+RSNLzwQfiYrh37zUCA3NO1103N0cqVxbf5ceP+5vYGj26vBejPC9guN+LmSbtSvFiDKnipSYEpsQ8X5bv8iClH0CRsormu+jUtiWWF8fGJqR2h3zvvZcIQYlEYjF4euajRYuKaLVaVq82j4teVtGgQRkAjh+/bWJL9Og8L+fO3TOuyqtaSt7LlSOgy1PKUwgKlNBvI8WLBXMrjefF/zUqje6nxHsVDBklJCRx+fJ9ALy8Sip2HqU4ePAGMTEJFCniLkNGEkkOQhc62rTpbI5K3K1fX4iXY8fMx/NSrlwBnJ3tiY1N4Pbt4MwfoHQNkccZpYY7l/Xr04aOgq5DtOlDZVK8ZJaYSHiQIkYKlIRnQjBQqmrG++i2L6KceLl69SHx8Um4uztSqlRexc6jFLoqKRkykkhyFt7elXBysuf+/TDjy3jNkHfe8USlUuHvH2w2eS9WVlaUK1cQgNu3n2T+ANY2UKmBWE4XOnp+ztEpIy3MOqR4ySwBF8R8h7xFQJ3y4ShQEpxe0oY/GzwvuhhnzZolLPLir3O9Nmmi3O9IIpFkP7ly2dGunbi527gx53TcdXNzTC2MOHXKfPq96CqObt0ywvMCrzek0QxCR1K8ZBaDIaNX5GikzXlRiHPnRNWTrtbfknj4UE1g4DOsrFTUrWt5+ToSieTldOkiRpXs2HExR4WOqlUTQ2OvXn1oYkv0lC4txItRnhfQi5dLh/SDGM2wWZ0UL5nFP6VMuoyXPln3ZfkusdH60JKCnpdz50SyriWKl//+EzHjqlWLkjt3LhNbI5FIspr69cvg6GjH06eRXL36yNTmZBk6z8uVK+YjXsqUKQBgXM4LQPk6InwU+lgMEoYXm9WZwYRpKV4yi67SKO1YgJdVGj1MyUR38RANgBQgLi6RO3fEJNHKlYsocg4lOXZM/I502fsSiSRnYW9vQ8OGwvO8f7/5NHZ7U3Ti5do1cxIvOs9LsHFeLjsHfRpE2sTciuY1YVqKl8yQEAd3UkYBeFbXZ2OXfknY6IHy+S737oWg1WpxdrYnb15nxc6jFLr5ILrBYhKJJOfRvHkFwMipx2ZKxYpCvNy/H0Z4eKyJrRGULJkXGxsrYmMTePTIyERih5S5eHHR+nXl66Tf5sZJ446dRUjxkhkCL4lmc675RAvlhDjReK7gS/I0siHfJSBANH8qVSqfxSXrxsUl4u8v7K9ataiJrZFIJErRrJkQL6dOBRIXZx7zcd4UNzdHihRxB+D6dfPwvtjaWlOypKg4NTp0ZEi8OLml3+buVeOOnUVI8ZIZbhlqTlcVrK0z3ue+8mXSgYHPhCml8il2DqW4desJycka3N0dKVAgt6nNkUgkClGiRB7y5nUmKUnDlSsPTG1OllGxYiEAs8rl0YmXoKAQ4w5gSLzY2qXfJtm0AlSKl8yQbizAa3TWhWwJG+nabnt6Wl5/F12suGLFwhbnNZJIJK+PSqWiRo3igL46MiegEwr374ea2BI9+fK5APD0qRHTpcGweLF5XrwkGXfsLEKKl8xgKFn3ZZVGkC09XnTixRI9L9euibsV3d2LRCLJudSsKaohL1zIOeJFFzZ6+FBtWkPSoBcvkcYd4HXES5L0vFgGSYn6UFG6MumXJOtGhEJEituusHKVNPfuCcVfooQy1UxKcuuW6EVQvnxBE1sikUiUpkYNMfrj4sX7JrYk6yhc2A0wL/GSN68QL8+eGSlecqUUfqQLG9mn30Z6XiyEKDUkJohlZzcITrlzeOlYgJRk3TyF9R8GBQgNFR8wndq2JB49UgNQtKiHaQ2RSCSKo+tBcvfuM5KTc0azOnMUL4p4Xl7IeZHixTKwsdUv68ql8xYVQiYjsiHfJSEhiejoeEBkvlsaulI+3ReARCLJuRQu7IatrTUJCcnGl/GaGbrvrsePw81GkGWZeIlNkzMjc14sFOs04iUpxQPzvBJ9nmyoNAoLE8rYykqFq6tldaeNiYlHrY4BoFAhN9MaI5FIFMfa2orixUV4+86dZya2JmvInz83NjZWJCdrePIkwtTmAHrxYnTY6HU8LxopXiyDtJ4Xq5TS6IS4l++jCxsp6HnRXfxdXR2xsrKsP6fuzsvZ2R4XF4dXbC2RSHICJUsK8XL3bs4QL9bWVri6Cq93RIR5NKrTiZeIiDji440QGfay2ijnYJR40XlelGtQFxYmxIu7u+WFjIKDxV1K/vyyv4tE8raQL5/4f9fl6uUEnJzEhV0Xwjc1Tk765FqjGgIa9Lw8l7Br4mojm+w4ycyZM3FzcwNArVYzceJERfZRFJVKiBZNshhaBa8WL7qhVvmKKWaWLmzk7u6k2DmUQveP7uxs/4otJRJJTkGXm6e78coJODqK77CYmAQTWyKwsdH7JRITjfCQyD4vQoQADBs2jGHDhuHl5cXw4cOzfJ9sQed90XleEuNePlnTMaX6J145V6IlC4DYWKHcdf/4Eokk56PzEutuvHIC5uZ5UalU2NqK61RCQnLmD2BQvNim3yani5dp06YxbNiw1Nfe3t4sXLgwy/fJFnR/PN04AI3m5X9A3SyIaLViJmk0QjxZWVled9rYWHGXkiuX7Su2lEgkOQVdYYEuXy8nYG6eFyBVvLyR5yU+jXixstJHHSBni5eAgADUanVq+Cctfn5+WbZPtmH9nOcFXh460o0Vj1IrZpLO8WOJrfV1/+i5cr2iaksikeQY7OzEBTApyQiPgJmiyzExF88L6H/PWeZ5gfShIxOLF0VzXgICAgyud3NzQ61WZ8k+8fHxxMfrPzAREQqWquk8L6o0mi8hTh8eeh5dDxhFxYtQLxaoXVKz4O3tsyX1SiKRmBEvi7hbGjrPt84Tbg7oPC9GicRcGYgXWzuIT/GY5WTPS0Z4eHgQGpq5IVYZ7TNt2jRcXV1TH8WKKZccm+p5SU7SZ16/1PPiJp4VDBtZsufF2lp8/JKSzKOxk0QiUR5L/K56FYmJQiDoBIM5oA8bZaHnJW3F0ds4VTqzwuVl+0yePJnw8PDUR1BQ0JualzE6z0tyItil9CVJfIl40XleopXrJKm14NsXncclIcG0Cl4ikWQfOu1iyd9dz6MTCHZ25iNedBj1e37bw0aenp4G16vV6gzfy+w+9vb22NtnU7WKTrwkpYiX6PCXVxLpPC8y58UgUrxIJG8fuipDB4eck6ivS4q1tTWfELiuIMKoas604kWr1StOW/MRL4p6Xjw9PXFzczOYx+Lt7Z1l+2Qb1s+JF3h52CjV86JWzCR7e6H04+NN68IzBl1CmVEdICUSiUWStit4TkGXFGtOYSNd8rCjoxEFETrxotFAYpokZDPyvCgeNpo8eXK6KiFfX990ZdABAQGpfV1edx+TkTZsZJsJ8aKg5yV3blF2GBHxioZ5Zoi9vfh9GtUBUiKRWCQ68WKJg2QzwtxyXhISklIFVdpuu6+NQ5qmpxk1qsvp4mXixImo1Wp8fX3x9fXl1KlT+Pj4pL7v5+eX7vXr7GMyng8bgclLpXV3L+HhltczwcND/IOEhES9YkuJRJJTCA8XoXY3N8saJPsydDONzKVZaNp+M0Z5Xqxt9CGijEYE5OScFx1pW/t379493Xu6LrqZ2cdkGAobvSxhNxuqjXQNn3RfCJZE/vyixDw4OBKtVmuReTsSiSRzPH4sChjy5MmgxYQF8vSpmN6sm9tkanQhIzs769TwfKZxcILEhIwnS7+N1UYWi6FqI7MJG8VaXPZ+gQLiHz02NoGoKPNp7iSRSJTj3r0QQD9d2tKJiUkgMlJcB3Q3ZKZG53kxKmQEEBejv265eOjXv01hoxyF0Qm7ypVK61yvSUkas2pN/To4Otqnull1E6YlEknOJTlZQ1CQaHtRokReE1uTNTx7JrwuDg62uLg4mNgagS6MZfTcuKDrosoodx5wy69fL8WLhZLO85ISr32dJnWJ8a+eQG0kuXLZpU4QtcRZIfnzC+/LkydSvEgkOZ1Hj9QkJiZjZ2dNoUKupjYnS9CHjFzMJvT96JEawPjf8Z3L4rlE5fTt2+PS5CdK8WJBZDZhN5ezGGYFioWOVCpVapxVF0u2JEqUEK7jgIBgE1sikUiU5saNx4Dwuug6bFs6wcF68WIuPHigBqBwYXfjDnD3inguWUW/TqOBgAv611K8WBCZTdi1ssqWiiOdALh7N0SxcyhFuXIFAbh584mJLZFIJEpz/vw9AKpXV3CMSzZz584zAIoUMVIoKMCDB2EAFCniZtwBdOKlRGX9useBECOEGpNXw6dLjDcwC5DiJTNkNmEXwDFFvChYcVS8uBAvuliyJaEXL49NbIlEIlGaCxfE+JacJF5u3RI3XuXKFTCxJXoePlQDbyCo7ug8L2nEy+1z4rlsLWjeG+q2M97ALECKl8yQNmz0Ok3qIFsqjooXF9nglul5Ef/w0vMikeRstFptGvFS3MTWZB23bokbL92NmDmg97wYIV5io+DJHbGc1vPif148l6n5RrZlFVK8ZIbUsFHC63tesqHiSBc2CgqyRPEi/uEfPw63yIRjiUTyegQGPuPp00hsba2pVKmwqc3JErRabeqNV9my5uN5eSPxcu+aeHbLD65pKsL8UzwvpWu8mXFZhBQvmSFPyj/cg1uvL16yYTijLmxkiZ4XFxeHVPGli4dLJJKcx8GDNwCoU6eUcV1fzZAnTyKIjIzD2tqKUqXymdocAGJi4nn2TFQFFS7slvkDGMp3Ab3npbT0vFgeFd4Rz9f+A+cURfvs/sv3yYbhjLqw0cOHaouc0Fy3bikATp58cRinRCLJGRw4cB2Apk3Lm9iSrENXPVWyZF7s7c1jovS1a48A0TBPN4IlUxjKdwkLhpCHomzas1oWWPnmSPGSGXTiJeg6FEv5B7x2XDTzyYhs8Lzkz5+b3Lkd0Gi0qcljlkTdup4AnDwZaGJLJBKJEiQkJHHs2G0AmjWrYGJrso4zZ+4AUKVKEdMakobLlx8AULmykTYZ8rzovC5FyooWIGaAFC+ZwS0fFC4tlnXl0uHPRBgpI7LB86JSqahatSgAFy++whNkhujEy9mzdy3ScySRSF7O0aO3iI6OJ29eZypXzhn5LqD3Fr/zjqeJLdFz5YoQL1WqFDXuAAbFi3nlu4AUL5mnfIr35fZZKFdHLF85mvH22VBtBPoP6qVLlideypTJj7u7E3FxiRZpv0QieTmbN4uL33vvVcfKKmdcdpKSkjlz5i5gXuJF73kxQiTGREJwSu6hIc+LmeS7gBQvmadiPfF8/QRUbiiWXyZeGnSGGXth4HeKmlWtmuibcOlSkKLnUQKVSkX9+sKjpYuLSySSnEFcXCI7d14CoHNnLxNbk3VcvfqQ6Oh4cud2oHx58yiTTkpK5vp1kfNiVNjo7lXx7FEIcqcZyHhbel4sH514ufYfVKovlq8ey3j7AiWgZgsRK1SQatWE5+XKlYckJSUrei4laNlSqPx//71iYkskEklWsm/fNSIj4yhc2I06dUqa2pwsQxcyqlOnlNl4k/z9g4mLS8TR0Y5SpYwYfGkoZBQbDQ9uimUz6fECUrxkHs/qYGsPkaH6aZv3rkGEabvbliqVF2dne+LiEi0yadfbuxIqlYpLl+6ndoeUSCSWzz//nACgU6eaZnORzwqOHhUJyLqcPXPg/Hnhea9UqbBxv2vdQMa0lUaBF0VRikdBcDefXjY555OUXdjaQdkU1+eD21AsJXP+Zd6XbMDKyio178USk3bz5HGmdu2SAOzZI70vEklO4M6dZ+zdK5qevf9+fRNbk3XExiZw6JDwRphT9dSRI8Km+vXLGHeAl1UamVG+C0jxYhwVdHkv/0GlBmLZxOIFoGZN0XL7xAnL7JfSqpX4h9m9+7KJLZFIJFnB0qVH0Wq1NG9eAU9P82jilhUcPnyT2NgEihRxN5syaa1Wy5EjovK1ceNyxh3EkHgxw3wXkOLFONI2q3udpN1sQveBPXToBtqX9Z4xU1q3rgqIu4dnzyJNbI1EInkTYmISUkNGgwY1MrE1WYvuBqtVq8qoVCoTWyO4desJT55E4OBgm+rFzhRRangmKpUoUUm/PuC8eDajfBeQ4sU4dEm7ARf0IaQbJyExweDmSUnJ/PnnQQYPXkxMTLxiZtWtWwo7O2sePlQTGPhMsfMoRZky+alZszhJSRo2bDhjanMkEskbsGrVccLDYyle3IMWLSqa2pwsIzlZkxrabt26iomt0aMLY9WpUwoHB9vMH0BXaZS3iL7FR3ISBIpKMel5yQnkLy6Sl5KTxATO3HnEjCNdbPA5rK2t8PE5yK5dlxTtIuvoaE/t2qLV/uHDNxU7j5L07FkXgDVrTlqk90gikYjy6Hnz9gEwevS7WFvnnEvN2bN3efYsity5HYzPLVEARUJG966La1suZyhU+g0tzFpyzicqO1Gp9M3qrp/U571kEDpSqVQ0aiRKpXUfMKXQfXAtVbx06lQTe3sbrl17JBvWSSQWyqpV//HkSQSFC7vRq1ddU5uTpei8wt7elbG1tTaxNYKkpGSOHxfVT02aZKF40d2Qe1YHM6sUMy9rLImKBpJ2X5L3ohMvR49mj3g5evQWyckaRc+lBG5ujqm5L//8c9LE1kgkkswivC57ARg71hs7O/MYWJgVxMYmsGnTWQB69KhjYmv0HD/uT2RkHO7uTsY1p9No4L+tYrlsLf163VgAM8t3ASlejCdtszpd0u7VoxkOaWzYUIiXixfvo1bHKGZWtWpFyZ3bgfDwWIv1XPTuLe7UfH1PERERa2JrJBJJZli06CCPHoVTqJAbvXu/Y2pzspRduy4THh5LkSLuNG6sbOPRzKAbv9CuXVXjQnTn9sKjAHDMLbrC60j1vNR4UxOzHClejKVcbeFGexokEpxsbCH0MTy+Y3DzggVdKVMmP1qtluPH/RUzy8bGmkaNhPfl338ts+S4SZNylC1bgKioeFat+s/U5kgkktfkyZNwZs/2A+CLL97D3j7neF1A33CvV6+6ZtNwLyEhiZ07LwLQqZOR4xe2+4hn7/6Qy0ksJyaIGX4gPS85ilzOUDIl09z/ApRJ+dC8JHSk874oHTpq164aAFu3XrDIpFcrKyuGD28GwJ9/HiIx0fLGHUgkbyMzZuwkOjqemjWL06VLzpljBBAUFMrhwzdRqVRmlcdz+PBNwsJiyJfPJXVGXKYIfQzHN4vl94anObCvKJ/2KASlqmaJrVmJFC9vQtpmdamho4yb1enyUXRdEJWiVavK2Nvb4O8fnDqky9Lo2rUWefM68/Chmq1bz5vaHIlE8gouXLjHmjUiT+3bb7uYjWciq9B5gRs1KkuxYh6v2Dr72LJFhIzat69uXMho92JROVupfnqRsnmOeG4/QkQWzIyc9enKbnTN6tJOmL6aseelfv3SqFQqbt58QnBwhGJmOTs7pLasttQLv4ODLYMHNwbAx+eARXqQJJK3hcTEZD75ZA1arZYuXbyMa5JmxkRHx7N0qfhuHzCggYmt0RMXl8iuXSI9oGNHI0I7Gg3sWCSW26Xxutw4JfI5bWzTe2PMCCle3gRd0u6NU1A+xY0YeAmiww1u7u7ulNpKWulS5g4dagCWGzoCGDCgIbly2XHp0n05MkAiMWPmz9/H1asPcXd34ttvO5vanCxn9er/UKtj8PTMR5s25hNC2bv3KpGRcRQq5GrcxO4z/8KTO6IpXdOe+vU6r0vTXmY1jDEtiouXmTNnsnDhQhYuXMjMmTNfub2fnx89evRg4cKF+Pn5MWnSJHx9fZU20ziKVRDZ2fExEPEMCnmKaqNrJzLcpXlz4RHZteuSoqblhNCRh4cTH37YBIAZM3ZYZOm3RJLTuXnzMb/+uhuA77/vQt68Lia2KGtJTEzGx+cgACNGNDOrhns6b1C3brWNC9OlJuoOAPtcYjnsCRxcI5Y7j8sCK5VB0b+CTqwMGzaMYcOG4eXlxfDhL3dBqdVq/Pz8GD58OMOHD6d06dJ0795dSTONx8pK73G59nqhI10y7b591xUdFZATQkcAo0a1wM3NkRs3HrNxoxwZIJGYE4mJyXz00WoSEpLx9q6U45J0QeSUPHgQRr58LnTvbj69XW7efMyRI7ewslIxYEDDzB8g5KG+t0va0ND2haLSqMI7UN58ft7nUVS8TJs2jWHDhqW+9vb2ZuHCha/cLzAwEK1Wi7+/f7r9zZJMNqurWrUoxYp5EBubwP791xU1TRc6Wr/+DBqNZXotXF1zMWpUCwBmzdpFQkKSiS2SSCQ6Zs7cwblz93B1zcX06T3MZkhhVqHRaJg/X4w5+PDDJsbNDFKIJUuOAGK+UtGi7pk/wM6/QJMMVRrpBzEmJcK2BWLZjL0uoKB4CQgIQK1W4+bm9sJ7fn5+Sp02+zE0Yfr6CZG9bQCVSpXqfdm+/aKiprVtWxVX11wEBYWmDu2yRAYPbkyBArkJCgpl2bKMq7kkEkn2ceDA9dT5RT//3JvChd1Ma5ACbN58nmvXHuHsbE///uaTqBsREcu6dacA+OCDxpk/QHIy7DSQqHt4PYQ+ErP7GptpxCMFRcWLIdzc3FCr1S/dd+3atfj6+rJw4UImTZr00m3j4+OJiIhI98hWdOIl6DrkKSJyYGKj9JM4DdC+fXUA/PyuEBeXqJhpuXLZ0b17bQCWL7fci76jox0ff9wagJ9+2snTp5EmtkgiebsJDo5g3LiVAAwc2DD1hiwnER+fxIwZ2wF9+NpcWLfuFDExCZQtW4CGDY0YDnl6l2iw6uIBTdKIFF2i7nsjwNYua4xViGzPPPLw8CA0NDTD9728vPD29qZ79+4MGzaM0qVL06NHjwy3nzZtGq6urqmPYsWKKWF2xrjlg8IpjYEuHhS18vDS0FHNmsUpVMiVqKh4Dh26oah5uruFf/+9wuPHhqugLIG+fetRtWpRIiLi+OGHLaY2RyJ5a0lISGLEiGU8exZFxYqFmDKlo6lNUoTly49x714oBQrkZtiwpqY2JxWNRsPff4vrywcfNDIuVKdL1G05EOwcxPLNM6JPmRmXR6fltcWLr68vPXr0eOXj7NmzLz3Oy4QLgKenJ56enqmve/bsia+vb4bemsmTJxMeHp76CAoKet0fKeto2FU871gIlVJCR6d3Z7i5lZUVbduKO5UdO5QNHZUrV5C6dUuRnKxJbW1tiVhbWzF9endUKhXr1p1OnaAqkUiylylTNvLff/44O9vzxx8DyZXLvO/QjSEyMo7ffvsXgE8+aY2jo72JLdKzfftF/P2DcXFxSPWsZ4rgIDgpPEq0S5NTqvO6NOkpwkZmzmsPnujevXumqn7SCpC0qNXqDN8DIZLSnkeXMxMQEICX14uZ7Pb29tjbm/iD9d5w8P1JuOJ0SU4nt8OjQChUyvAu71Vj8eLD7N59mYSEJEUnr/bv34CTJwNZufI/xo71NqtSv8xQs2YJ+vevz7Jlx5g82Zd///00R02slUjMnaVLj7Js2TFUKhXz5/enbFnz7AHypsyfv4/Q0GhKl85vVsMlNRoNv/wiboyHDm2Ks7ND5g+y6y/RnK5aUyguKlIJC4YDq8Vyp7FZZK2yKHYV8/T0xM3NzWDui7e3t8F91Go1PXr0SLePzuPyMsFjcgqXhtptxPL5fVC7tej3snVehrvUretJvnwuhIfHsnfvNUXNe++96ri7O/LgQRgHDihb4aQ0n3/+HnnyOHPz5hMWLNhvanMkkreGo0dv8dVXGwCYPPk9vL0rm9giZQgMfIqPzwFADJe0sbE2rUFp2Lr1AjduPMbVNRdDhxoRykpOgl1/iuW0oaGdi0R5dPm6UNF8xNrLUPQWfPLkyekqi3x9fdOVPgcEBKRrXOfm5sbEiRPTCZWFCxfSvXt3g1VLZkX7keJ592JoO1Qs7/oLYqMNbm5tbUWPHqKGfvVqZScnOzjYpp7rzz8PKXoupXFzc+Trr0WM/eefd3Hp0n0TWySR5HyuXHnA4MGLSUrS0LmzF6NHtzC1SYqg1WqZPNmXuLhEGjcuZ1bddJOT9V6XYcOa4eqaK/MHObkDnj0A17z6dIe05dEW4nUBhcXLxIkTUavV+Pr64uvry6lTp/Dx8Ul938/PL91rEIJn5syZqY+QkBDWrVunpJlZQ912kL84RIZCTITwxkSpYd/KDHfp00co3H37rvHwoVpR8wYPboy1tRUHD97g4kUT5AVlId261aZdu2okJWkYM2YFsbEJpjZJIsmx3LsXwvvv+xAZGUe9eqX55ZfeOa6fi45Nm85x6NBN7O1tUnPszIUtW85z69YT3NwcUzuPZxpdom6rD8AuJd3i6EYhaNwLQJOMi2PMDZXWUgffZEBERASurq6Eh4eTO3fu7D356mmw5AvhemveB/74CEpUhoWXIIN/gm7d5nL8uD+ffdaWjz5qpah5Y8euYP36M7RvX52FCwcpei6lCQmJ4t13ZxIcHMmHHzbhu++6mNokiSTH8exZJJ06zSYw8BmVKhVm/foxxt3xWwBqdQxNmkzj2bMoJk5sy4QJyn4fZ4bkZA3Nm8/g9u1gJk1qx/jxLTN/kCd3YUApkdKw5BYUSSmx/rgxXD4C/abAgG+z1vBMkpnrt2VmbporbYaIMrMbJ8GzOjg4wd0rcOFAhrv07Ss69K5e/Z/iXXDHjHkXENnqt249UfRcSpMnjzO//NIHEKGwgweVLTmXSN42wsKi6dvXh8DAZxQr5sGKFcNyrHABmDp1G8+eRVG2bAFGjjSvsNjy5ce4fTsYd3dHBg82oikdwM4/hXCp+a5euNw+J4SLtY3o7WJBSPGSlbjnh8Ypbrd9K8WwK9CXoBmgXbtquLrm4v79MMW74JYvX4jWraug1WpTW15bMi1aVGTgQFGaPm7cSovuYyORmBNhYdH06rWAy5cfkDevM6tWDadgQVdTm6UYhw7dYMWK4wDMmNEDe3vzqWIMCYli5sydAHz2WVtcXIyoMHpyFzb+JpbTJuqmlkf3gDyF3szQbEaKl6ym4yjxvH8VvNtPLB/fLD48BsiVy46uXWsByifuAowdKyq91q8/zf37YYqfT2m++qoDFSsW4unTSIYO/VvOPpJI3pDnhYuv72hKl85varMUIzQ0mgkTRJnwwIENqVevtIktSs/MmTtQq2OoVKkw/frVz/wBtFr4dajo/F65ITTqJtarn8K+VWLZzOcYGUKKl6ymUgPwrAbxsSJ8VNNb1NRvnZ/hLrrQ0a5dlwkJiVLUPC+vEjRqVJakJA0+PpZfauzoaM+ff35A7twOnDlzh6+/3mRqkyQSiyUkJCpVuOTJ48y6daMoV878G5YZi1ar5bPP1vD4cThlyuTnq6/Mq1vwxYtBrFghbmp/+KGrcWXbu/6Cs3tEJ91PFoNVymV/2wJIjIdytfVjbiwIKV6yGpUK2qd4X7bOh05jxPLOP4WgMUDlykWoUaM4iYnJrF6tfBdcXe7LypX/KV7llB2UKpWPuXOFl2vp0qOsXXvSxBZJJJbH/fthdO48J1W4+PqOonx5ywolZJbVq0+wc+clbG2tmTu3H46O5tMtWKvV8r//bUCr1dK5s5dxHqHgIPD5RCwP+gGKlhPLd67A6qliudvHGRaUmDNSvCjBu++Dows8uCXUboGSooRa56IzgC53Y/Hiw8THKxv6aNy4HO+840lcXCKzZu1U9FzZhbd3ZT75RAxv/PxzX86dMxymk0gkL3LjxiM6dfodf/9gChd2Y8OGMTleuAQEPGXKlI0ATJzYlmrVsnku3itYv/40p0/fwdHRjq++6pD5A2i18Nsw0bqjYj3oMkGsT0yAmf2F1+Wd96BZ7yy1O7uQ4kUJcjnrk3W3+0DH0WJ582zxgTJAly5eFCzoyuPH4WzceEZR81QqVap7dO3aU1y79lDR82UXH33UCm/vSsTFJdK//yICA5+a2iSJxOw5ffoOXbrM5dGjcMqWLcDmzeNybNt/HTExCYwYsZSYmAQaNizDyJHNTW1SOoKDI1JD4OPHt6RQIbfMH2TPUjGyxtYePlkC1ikhpxXfiiqj3Hngoz8t0usCUrwoh67j7vHNYlyAvSMEXIRLhw1ubmdnk9ruef78fYqXTXt5laBDh+potVp++GGroufKLqysrJg/vz9VqxYlNDSa999fyLNnkaY2SyIxWzZvPkfPnvNRq2OoVasEGzeOpUgRd1ObpSharZaJE9emhsd+//19rKzM51Io8nDWEhYWQ5UqRRgxwghh9ewBLJgglgd+p59hdOUYrJkulsf7WMQAxowwn79YTqNkZTH4SqOBw776yqOXlE3361cfFxcHbt8OZs+eq4qbOHlye2xtrdm//zqHDuWMPinOzg4sXz6U4sU9uHPnGf37LyI6Ot7UZkkkZoVWq+Wnn3YxcuQy4uIS8fauxJo1I/HwcDK1aYrz11+H2LDhDNbWVvj4DKRwYTdTm5SOdetOsWfPFWxtrfn9977Y2mYySVerhd+HQ3S4aJja7WOxPjYKZg0Q1yTvAdC4W9Ybn41I8aIkHVISd3cshPYpDYCObhRJVAZwcXFgwIAGANnSh6Vkybyp5/v++62Ke3uyi/z5c7Ny5XDc3Z24cCGI4cOXkpiYbGqzJBKzIDY2gREjlqXOyRk+vBlLlgzB0dHexJYpz7Fjt/n22y0AfP11Rxo0KGNii9Lz4EFYah7Op5+2oWLFwpk/yN4VcGI72NrBp0tEAzqAhZ/CQ38xxmb07Cy02jRI8aIkDToLt1zoY3h4G6o3A02yfgiWAYYMaYKdnTWnTgVy8uSLE7mzmgkTWuHi4sCVKw/YsEHZXJvspHTp/Cxb9iEODrbs23eNESOkgJFI7t0LoXPnOWzdeh4bGyt++qkXX3/dCWvrnH8puH8/jOHDl5KcrKFbt1oMGWLkfCCF0Gq1fPrpGiIi4vDyKmFcHk7II5if0rOl3zdQopJYPrFdP9fo07/ByfIbDub8T6wpsbXTT5jeOh86pXyodizMsGy6YEFXunWrDcCCBcr3YcmTxzm1cd20aduJjIxT/JzZRa1aJVm8eDD29jbs3HlJChjJW82//16mdeufuXTpPu7uTqxZMzK1x1ROJyIilkGD/iQkJIoqVYowY0ZPsxq6CKLNw8GDN3BwsOW33/pmvqeLVguzR4iBwGVrQc/PxPrwZ/DLELHc9SOoYV7JycYixYvStBsGVtZivlGRMsJlFxECB/7JcBddgtbu3ZezZQbRkCGNKVUqL48ehTNjxg7Fz5edNGtWQQoYyVtNUlIyU6duY9CgvwgPj6VmzeLs3v0J9eubV8hEKRISkvjwwyVcvfqQfPlcWLx4sFn1cwG4cOEe33yzCYDJk9+jTBkjOhrvXw3Ht4j5erpwkVYLv4+AsCfCCzN4atYabkKkeFGafEWhfkrXxh0L9Xkwm+dkWDZdtmwB2rSpCsBPP+1S3MRcueyYNq07AEuWHMlxPVKaN6/I4sWDsbOzThUwcoyA5G3gwYMwevf+g7lz9wIweHBjNm4cS9GiObuiSIdGo+Hjj//hyJFbODnZs3z5UIoW9TC1WekIC4tOGW2STOvWVfjwQyPCWaGPYd5Ysdz3Kyglrh/sXQFH1gtBM2mF6DuWQ5DiJTvQCZY9S0VDIDsHUWd/9ViGu3z6aRtUKhVbt57n4kXDCb5ZSZMm5enWrVZqmV5O804IATMkVcAMHPgnUVE5J0QmkTzP5s3n8PaexbFjt3FysmfBggH88ENX7OzMZ+ig0kybtp0NG85gY2PFokWDzK4RnUajYfz4Vdy/H0aJEnn47be+mQ9nabUwZ5RohFq6BvT+XKwPvgdzUzq89/8GytTMStNNjhQv2UGNFlCkLMREwqld0OJ9sX5TxhnflSoVpmtXL0CMas8OvvmmM+7ujly9+pBFiw5myzmzkxYtKvL33x+SK5cdBw/eoGvXuQQHR5jaLIkkS4mIiGXcuJWMHLmM8PBYatQQYaJOnXLWxetVLF58mHnzRNXmrFm9aNasgoktepF58/bh53cVe3sbFi4chKtrrswf5OBaUcVqbSOScW1sRTn0rEGiu26l+tBzYlabbnKkeMkOrKz0Teu2zYdOKe69w+tFM6EM+OyzttjaWnPo0E0OH76puJl58jindt796add3LsXovg5s5tmzSqwfv1o8uRx5vLlB3ToIFqiSyQ5gSNHbtGy5U/4+p7GykrFhAmt2Lx5HJ6e+UxtWrayevUJ/ve/DYBo/d+rV10TW/QiR4/eSs0x/PHHblStWjTzBwkLhnkp3pU+X0Lp6mJ54+9wYT84OMFny/Tl0jkIKV6yi1aDwD6X6LIbFw1VGqeUTf+R4S7Fi+ehf3/Rh2Xq1G1oM8iRyUp69apLgwZliItLZPJk32w5Z3ZTo0Zxtm4dT8mSeQkKCqVjx9mcPn3H1GZJJEYTHh7Lp5+uoWfP+QQFhVK8uAcbN45l4sS2mW9yZuH4+p7i00/XAKIYYfz4lia26EXu3n3G8OFL0Wi09OxZlz59jJzqPG+MqCbyrAZ9vhDr7lyBxZPF8vBfRKFIDkSKl+zCxR2a9RHLW+dD5xTvy3YfSMg492LChJY4Otpx4UIQ27dfUNxMlUrF9Ok9sLMTnXdXrVJ+yrUpKFkyL1u2jKN69WKEhUXTvftcVq36z9RmSSSZZteuSzRrNj318ztwYEP27PmMOnVKmdiy7GfjxrNMmLAarVbLwIEN+e67LmZXEh0WFk2/fosIDY2matWiTJ3azTgbD/nCoXWimvWTJaI1R2ICzOinH7rYbmjW/wBmghQv2YkucffwOqjcEPIWhfCnImaZAXnzujB8eDMApk/fQVKS8om0ZcrkZ+LEdgBMmbIxx4ZV8uZ1Yf360bRtW5WEhGQ+/XQNkyatVXyqt0SSFTx8qGb48L8ZPHgxT55E4OmZjw0bxjBtWndcXHJOVcnrsnXrecaNW4lGo+X99+vx449dzU646Mq2ddO7ly790Liy7Vtn9b1bek+GsiI/khXfgv95ix+6+DpI8ZKdlKslZk0kJsC/f0OHlDyYTRlPmwbR98XDw4mAgKesWXMyW0wdMaIZjRqVJTY2gdGjl+fY0mJHR3sWLRrE55+3Q6VSsXz5cbp3n8vjx+GmNk0iMUhCQhJz5vjRuPE0tm69gLW1FWPGvMuePZ9Sr15pU5tnErZtO8/o0ctJTtbQs2ddZszoYVbDFkE3cHENx4/74+wsyrYLFjSi0+3dqzC5lUjGrdIY+v5PrE87dHHCQoseuvg6mNdf922gU0py1ZoZUPc9Ma781hnRvjkDXFwcmDBBxG1nzdpJRITh7rxZiZWVFb//3hd3d0cuXrzPrFk7FT+nqbCysmLcuJYsXz4UV9dcnDlzl9atf+bIkVumNk0iSceBA9dp0WIm06ZtJzY2gdq1S7Jz58d88UV7cuUyr8Zr2cXKlf8xYsQykpJE2/+ff+5ldsIF4Ndf/2XdutNYW1uxcOEg4+YWPQqAz1uKRqflasP328DOXgxhTDt0sVHXrP8BzAzz+wvndJr3Fd6XmAhY+Z2+8mj2SIjOuGy3f/+GeHrmIzg4Mlsa1wEUKuTGTz/1AmD+/P0cPZqzL+YtWlRk586PqVixEE+fRtKr1wK++26LDCNJTM6tW0/44IO/6NvXh4CAp+TL58Lvv/dl8+ZxVKlSxNTmmYz58/fx2Wdr0Gi09OtXn99+62uWc5rWrj2Z+r09bVo348q2nz2ASd4Q8hBKVIapu8Apt2jB8WXbHDV08XUwv79yTsfaGj5aJErXjmwQscpCnvDsPvz1eYa72dvb8MMPQk0vXnyYy5czLrHOStq2rUbfvvXQarWMG7eS0NDobDmvqShZMi9bt46nX7/6aLVa/vhjP+3b/8bNm49NbZrkLSQ4OILPP19HixYz2b37MtbWVgwd2oTDhyfTo0cds8vpyC60Wi1Tp27jhx+2AjB6dAtmzOhhlsJl+/YLfPyxGAczcmRz+vVrkPmDqJ8Kj8vjQChcGqbvEXktsdHwv/fg6nFRFPLt5hwxdPF1ML+/9NuAZzXo/qlYXvQZjPhVLG9bABcPZbhbs2YV6NChBhqNlsmT16HRaLLBWPjuu854eubj0aNwPvtsTY4sn06Lo6M9M2f2ZPHiwbi7O3HlygPatPmFxYsP5/ifXWIeREfH8/PPu2jQ4EeWLTtGcrKGVq2qsG/fRL79tgu5cxvRzCyHkJysYdKkdakjD774oj1fftnBLIXc3r1XGTVqORqNll696vLll+0zf5DocPiiNdy7BnmLwHQ/yFMI4mJgSnu4fFgIlml7RIfdtwSVNod9G0dERODq6kp4eDi5c+c2tTkZEx8Lw6sKV1+nsaJceuci0Yn3jwuiJ4wBHj8Op0mTaURFxTNrVi/efz97psJevBhEhw6/k5iYzOTJ76VOos7pPHkSzscf/8P+/dcBqFevNDNm9KBs2QImtkySE4mJiWfp0mPMn7+PkJAoAGrWLM5XX3V8a5Nx0xIdHc/Ikcvw87uKSqVi5swevP9+fVObZZAjR27Rv/9C4uOT6NixJvPm9cu8Zyg2WgiXK0fBNR/8fAiKVxDXjykd4ZwfOLoI4VLRyF4xZkRmrt/S82Iq7HPBuJQGdVvmQuPukKcwPLglyt0yoGBBVz77rC0AU6duTf2CU5pq1Yrx/fddAFGyvW/ftWw5r6kpUMCVFSuG8f33XciVy47//vOnZctZ/PTTLuLiEk1tniSHEBOTgI/PAerV+4Hvv99CSEgUJUvmZcGCAWzbNkEKF0RpeOfOc1Lb6fv4DDBb4XLqVCCDBv1JfHwSrVpVYc6c9zMvXBLi4dsuQrg4ucL0f4VwSYiDb7sK4ZLLGX7clSOES2aRnhdTM3Mg+C0TU0D7fQ3fdxdNh2afEKXVBkhKSqZNm1+4evUhffq8w88/9842cydOXMuKFcfJnduBHTs+fqvajgcFhfLFF77s3SuEm6dnPmbO7EmDBjmzg6VEeaKi4lix4jgLFuzn6dNIAIoX92DChFZ061b7reuOmxEXLwYxaNBfPH4cTt68zixZMoRatUqa2iyDXLwYRM+e84mIiKNJk3L8/feHODjYZu4gyUnwfQ84tkm0+J++R8woSkyA77qK6lR7R5G0W7WxIj+HKcjM9VuKF1MT/gyGVBClb4OniQZDB9eAZ3WYe0oM2TLAqVOBdOoksso3bx6Xbd004+OT6NFjHqdP36Fs2QJs2zbhrWqIpdVq2bbtAl99tYHgYHGx6dzZi8mT36NYMQ8TWyexFIKDI/jrr8MsXXqEiAjRYbtYMSFauneXoiUtO3deYsyYFcTGJlCuXAGWLx9mtv9rp04F0r//QiIi4njnHU9Wrhye+SZ0Gg3MGgh7V4hWGj9sh5rvQlIi/NBTCBr7XPD9dqjRXJGfw1SYlXhRq9WsXbuWdevWsWfPntfaZ+bMmbi5uaXuP3Hi60/EtDjxArBnmfiw2jnAjL0wpYMYb/7Bj/p5FQb45JN/WL36BBUrFmLHjo+xt8+e4VtPnoTTtu2vPH4cTuvWVfjrrw/Msq+CkoSHxzJ9+naWLTuGVqvFzs6aIUOaMG5cS+Mmw0reCvz9g/njj/2sW3eKhATRLbt06fyMHNmcHj3qSNGSBq1Wy9y5e5k+fQdarZamTcvj4zPQbJOVDx++yQcf/EVMTAJ165Zi+fJhmb+x02phzmhRvGFlDV9vhPodhCdmah847CsEzffbwCvn5R2ajXg5e/Ysp0+fRq1Ws2bNGs6cOfPKfWbOnAmQKlj8/PxYt24dPj4+r3VOixQvWq0ogzu3VyjslgNh5gAxq2LBBRHnNEBISBRNm04nNDSa0aNb8OWXHbLN5HPn7tKlyxwSEpL5+OPWfPppm2w7tzlx6dJ9vv9+S2pDO3d3Rz76qDUDBjTAzi7nTXKVZJ7kZA37919jyZIjqYnfALVqlWT06Ba0alX5rRP/ryIiIpYJE1aza9clAAYMaMAPP3TFxsY8xd2//15m+PClxMcn0aRJORYvHoyjo33mDqLVinYZa2eKtv6fr4TmfYRwmdEfDvwjrgnfbIY6OfP71mzEiw5fX1+mTZv2WuLF3d2dwMDAVM8LiGGBr2umRYoXgAe3RfVRQhx8tlR8UE/thEoN4JfDkMGX286dFxkyZAkqlYoNG8bwzjue2WbyP/+cSO1fMHduP7p2NZyjk9PRarXs33+d777bzM2bTwCRtzBmzLv06FE32zxiEvMiLCyaf/45ybJlR7l7NwQQ32Xe3pUYNapFtv6vWhI3bjxiyJAlBAQ8xc7Omh9/7EbfvvXMshQaYPPmc4wdu4KkJA1t2lRlwYIBxv3Pr54KS74UyxMWiqGKycnw0yARQrKxhSkboJ4R5dYWgsVWGwUEBKBWq9MJFx1+fn7Zb1B2UqSMSNgF8PkYBv0gMsmvHhNTqDOgbdtq9OxZF61Wy/jxK4mKynhCdVbTu/c7DBvWFIAJE1Zx4MD1V+yRM1GpVLRoURE/v8+YNasn+fO7cO9eKBMnrqNBgx/5669DxMYmmNpMSTag1Wr57z9/JkxYRa1a3/L991u4ezcEV9dcDB/ejKNHv2Dp0g+lcMmAzZvP0a7dbwQEPKVwYTc2bhzL++/XN1vhsnr1CUaNWk5SkoYuXbzw8RlonHDZNEcvXIb9LISLRgO/fiiEi5U1fLEmRwuXzGJ24sUQbm5uqNXq7DXGFHT/RFQdRYTAxt9hyAyx/q/P4cndDHf7/vsuFC3qzr17oXz99abssTWFKVM60rmzF0lJGj78cAnnzmVsZ07Hxsaa99+vz7Fj/+O77zpTsKArjx6p+eqrjdSr9wPz5+8jMjL7xKUk+3j4UM3vv++hYcOpdO06l7VrTxEXl0jlykX46ade/L+9846rqvzj+JuNyFZEFEWGiltx5MqJs9RK1ByV/nI2NBuamWnT0IZmDshyZBqKWpalgiv3wIkbcIGDeQHZ4/z+eLj3iooCMu6F5/16nde9Z97nuefecz7n+a6QkDnMnj2IevWql3dTdZLMzGxmz97MpEmrSUvLpHPn+mzf/h6tWrmUd9MeiaIofP/9Dt5773cURZQm+OGHkcXzWdqxEpZMFu9HzQafd4VwWThRrDM0go/WQecXS7ILeo9OiZeCsLe3Jz4+/pHrMjIySEpKyjfpLcYm8M5Pwt4ZvBpqeUDTzpCeAgsnFFh52srKnIULR2BgYMC6dUfYvj20zJpsaGjIggXD6dKlAampmYwa9RNhYdFl9vm6iIWFKWPHduXQoY/5+msfnJ3tiIlJ5osv/sLLaw4ffbSRK1fulnczJU9JcnI6gYHHGDHCj3btPsPX9x+uXYulalUzRoxoz5YtU9ix4z1GjGhf9IiTSkR4eDQDBizkp59EdvG33urJ2rUTqFbNspxb9miysnJ4993fNcVq33jjKUoT/LcBvntdvH9pKrwyW1znf3xLJC01NITpv0KXISXYg4pBoce3AgMDCQgIeOJ2M2bMwMvL66ka9SAFCReAuXPn8umnBSd10zsaPQMD34I/F8GiSfDJRpjcHo5vh+Bfoderj9ytQwcPJkzoxrJlu/nggwBat3ahenWrMmmyqakxy5ePYciQJZw+fZPhw5exZctknJxsy+TzdRUzM2NefbUTw4e3Z9OmEBYv3klYWDQrV+5n5cr9dO5cnzFjnqV37yY6WZNF8jBpaZns3HmeP/88yc6dF/IlKmzf3p2XX27H88+3KLqzZiVEURQCAo7y8cebSE3NxM7Ogm+/fZm+fZuVd9MKJCkpjXHjVrJv32UMDQ348svBvPZap+Id7L8N8PVIMcrSbyxM+FYsX/qOiDYyMID3VwqnXclD6JTDbkREBO7u7g855xoYGBAUFIS398OhYRkZGWRkZGjmk5KSqFOnjv457N5PajKMbSyKNQ77UGRX/GWGKLy1/ALYPTo1fXp6Fv37f8/Fi7fp06cpv/zyvzK1FcfF3WPQoB+IiIjB09OJTZvewtbWosw+X9dRFIX9+6+wYsV+duwIJTdX/M5r17bDx6cNgwe3wcOjRjm3UvIg9+6ls3v3RbZtO8uOHedISdFeb9zcHBg0qBU+Pm1wda08CRuflsTENKZP38CWLScB6NTJgx9+GKnTDzyRkQm88oo/ly7dwcLCFD+/1+jZs3HRD5STA6tmwe9zxXzXYSKyyNBQ1LoLzBMx7/4Mff9Xch3QA/Q+2igkJAQ3N61DW6WINnqQg3/CnBeEvXPRUeG4FXZSDB9+vL7A3c6di6J//+/Jysop09pHam7ejGfgwIXcvZtEmzb1+O23CZUqiV1hiYxM4NdfD/Lbb4fyVepu2bIugwe3ZtCgVmU2ciZ5mNu3VezYcY4dO0I5cOCKJicLgLOzHYMGtWLQIC+aNKmls86kusrRoxG89dYaIiMTMDIyZNq0frzxRg+dHn08c+Ymr722nLt3k3B0tGb16nE0a+Zc9AMlJ8DcEXB8m5j3eQ9e/1pc53/5CAK+Fsun+MFz40uuA3qCzokXf39//Pz8HhIvERERBAYG5ktCp05QN368OHGBgYEEBQVV7DwvBfHZYNi/CTyfgbcWw+RnIDdHhMs9xnlr8eKdfPnl35iZGfPnn5Np3rxOGTYazp+/xeDBP5KYmEbr1i6sWTNBJm4rgPT0LLZvD2XjxuPs3n2RnBxRKdzIyJBu3RrSt29zevdugoODFDKlSUZGNsePX+W//y6zZ89Fzp6NzLfezc2B3r2b0r9/c1q3dpGCpRikpWUyb96/+PvvRVEUXFyqsXjxK3h56aZTrppNm0J4//0A0tOz8PR04tdfx1G7tl3RD3TtHMwZJIrxmlWBqcuhxwhRw2jpFNiad49760cY+GbJdkJP0BnxohYnAQEBnDhxgmnTptG2bVt8fHwAIWp8fX0JDw/Pt9+8efM0Iy/Hjh3D19e30J9ZocRLbJQwH6UmwZuLIO6WGGq0rwk/nRdmpEeQm5vL6NE/Exx8njp17Pn333ext69apk0/c0b4viQkpNKiRR3Wrp2AnV3ZtkHfiI1N5o8/TrJx43FOn76pWW5gYICXlwu9ezehb99meHjUkDfPpyQ3N5fLl++yb99l9u69xKFD4fnC2dXfeZ8+TenTp6msIv6UHD9+jXfeWUtERAwAQ4e25fPPX9LpUdns7By+/PJv/Pz2ANC9uydLl75avAy/+zaKLOrpKeDoIjLnerSC6BuihtGlo8LHZeICeHFyifZDn9AZ8VIeVCjxAvDXUlj0hsj5suQkzHoOIi9D39fh3eUF7paYmEa/ft9x7Vos3bp58uuv48p8WPbcuSiGDVtKfHwKTZrU5vffJ+psBIGuceXKXbZuPc327aH5hAyAq2t1OneuT8eO9enY0UOOyhSCjIxszpy5ydGjERw5EsHx49dQqVLzbePgYEXXrg3p0qUBXbt6yu+1BEhLy2T+/G34++8hN1ehZk0b5s0bgrd3k/Ju2mOJi7vHpEmrNZmzJ0/25oMP+hX9GpqTA6tnw7ovxXzLHjAzAGyqw4lg+OplkRrDyg4+XFthM+cWFileKpJ4yc2Fd58Vyeo6DIQhH4h5AN9gUU6gAM6di2LAgIWkp2cxdWpvPvigXxk1WsvFi7cZOnQJsbH3aNTIiYCASdKXo4g8zv8CoGHDmnTs6EGnTvXx8nKhZk2bcmqpbpCbm0t4eAxnz0Zy5sxNTp8W0/2RQQBVqpjStm09unXzpEuXhjRq5CRHtEqQ48evMXXqOsLDReqEoUPbMmfOCzrvxH/mzE1ef30FUVEJWFiYsnDhCJ57rkXRD3RPJaKJjv4j5l+aCuPmgYGh8G1ZNUtc3z28RFRpzXol2Q29RIqXiiReQNhK32glqop+shFO7YIti6GmK/idhSoFm2M2bjzO22//BsCqVWPp1avsn3iuXLnL0KFLuHs3ifr1HVm/fhKOjpX7BltckpPTOXgwjIMHr3DgQBjnz996aBtHR2uaN69Dy5Z1adGiDi1a1KmwI15JSWlcuXKXS5fucOnSbc6ciSQ0NCpfRJCaatUsadfOlXbt3HjmGTeaNKktCyGWAipVKl9/vZVffz2Eoig4Olozb97Qcrn2FJUNG44xffoG0tOzcHNz4Oefx9CwoVPRD3T9vAi4iLoiCu6+8xN4jxKCZt6rcPgvsV3f14WPi6nums/KEileKpp4AVg5C9Z+AfZOIvronY4Qc1Oo+YnfPXbXmTM3smLFfqytzfn333fLJaQzIiKGIUOWcPu2Cjc3B9asGS+zjZYAcXH3OHw4nAMHwjhyJJxLl+5oQrDvp0YNKzw8HKlf3xF39xp4eNSgfn1HatWy1fnRhtTUTCIj47lxI44bN+K5di2Wy5fvcOXKXW7fTnzkPubmJjRpUpsWLerQvLkzXl71cHd30Pm+6jOKorB58wnmzPmD2Nh7gP6MtqSkZDBz5kbWrz8GgLd3YxYtGlW8QIMDf8C8VyDtHtSoK/xb6ntB+Gn4fLBw2DUxE0EY/V4v2Y7oOVK8VETxkpkOE5oLJf/8JGFCmtlPOHktOAiNCg6JzszMZvDgxYSEXKNx41ps2TKlXDJ+Xr8ey5AhS4iMTMDeviqrVo2ldet6Zd6OikxqagahoVGcOXOTU6eEuUQ9bP8ozMyMcXKypVYt7eTkZIuTkw329pbY2FTBzs4CGxuLEh2lyM3NJSUlk8TENGJjk4mJEZP6fXR0MlFRCdy8GU9MTPJjj+XkZEP9+o40aFCTpk1r07x5HTw8auhsBeKKSHh4NDNmBGp8RDw8avD110Po2NGjnFv2ZM6di2LixNWEh0djaGjA1Km9mTq1d9Erfefmwq9z4LfPxXyLbjBzPdg6QNBq+GEiZKSBYz2YFQgNKmch28chxUtFFC8Ap3bDtB5CsHy3X4TWBa8Gl8aw+ASYFpzV8/ZtFX36fEts7D0GDWrF4sWjiv7nLAHu3k3k1VeXc/ZsJObmJixaNLJ49mRJoUlOTic8PJorV+4SFhZNWNhdrly5y7VrsWRn5xb6OFWrmmFjU4WqVc0wNTXGzMwYU1NjTE2NMDU1xsTEiNxchZycXHJycsnOziU3N5ecHIW0tEySk9NJScng3r2MR5p1HoeVlTl16thTt241XFyq0aBBTRo0ECNJxYr+kJQIaWmZLFmyi0WLgsnMzMHc3IQpU3oxcWJ3na+mrigKq1Yd4NNP/yQjI5uaNW1YvHgUHToUQ3ClJMLXo+DI32L+xSkwbr4QNMumioy5AG36ioR01vYl15EKhBQvFVW8AHz7P9i+AlyaCIfdiS1AFQ2jPoFXH18m4dChMIYNW0p2di5vvtmDmTMHlFGj85OSksGkSasJDj6PgYEBn3wykPHju8oh/TImKyuH27dV3LqVf7p9W0wqVSqJiWkkJqaVWhtMTIyoXt0KBwdLHBys8t6LqVYtW41g0XWzQ2VDURT++usUX3zxF5GRCQB07dqQuXN99MIcrFKl8v77AfzzzxlAmIm+/3548XzDblwU/i2Rl4Q5aOpP4P0KRN+Ez320YdAjPxHX6XJ4aNQXpHipyOIlKQ5ebwSJMTBylqhC/cVQMDKGb/ZAk8fX2Vi//ijvvLMOgK++Gszo0Z3LoNEPk52dw6xZm1m16gAAo0d35rPPXpBD/TpITk4uSUlpqFSpqFSppKZmkpmZTUZGNpmZ2Xnvc8jOzsHIyAAjIyOMjAwwNjbC0NAAIyNDqlQxwdLSnKpVzbCyMsfS0gxLS3PMzIylaNUzzpy5yezZf3DkSAQATk62fPLJQAYObKkX5/Lw4XAmT/6NyMgETEyMmDnzecaNK+bD06Et4DtKlHRxqCP8Wxq0hpM7RRh0YqwIg57+G7Qr+2hPfUOKl4osXgB2rxMppkEU7jryN+wLFDWQvtkD7i0fu/uCBTuYN+9fDA0NWL58TLkVQlMUBX//vXz22RYURcHbuzFLl75K1aqyqJ1EomtERyfx9df/EBBwFEVRMDc34c03ezBpUg+9qJqdnp6Fr+8/mgy/9epVZ+nSV2jRom7RD5abC2s+gzV5o93Nuwr/FpvqEOALqz7WhkHPCgQn15LtTAVFipeKLl4A/D+AwG9ETYwZa0UV6tD9YOMA3+8H5wYF7qooCtOmree33w5jbm7Chg1vlKvj7Natp3n77d9IT8+iQQNHfvppjMxoKpHoCKmpmfz8838sWhTMvXvCV+nFF7346KPni5cmvxw4ffoGkyev5cqVuwAMH/4Mc+a8ULwMvylJIpro0BYxP+htURE6PUVk0VUvl2HQRUaKl8ogXnJz4ZsxwmHX1Fzkf1n5sSje6FAHvj8ANQquaZSdncOYMT+zc+cF7O2rsmXLFNzcyq8qbkjINcaOXcHdu0lUrWrGt98OY+DAVuXWHomkspOVlcPvvx/hu++2c/duEiAKh3722Yu0aVOvfBtXSLKycliwYAc//BBMTk4uNWpYMX/+sOLnnAk/BV8Nh5sXhX/LlGXQezREnBG16G6F5YVB/wj9xpZkVyoFUrxUBvECImndpy/Cka1gaQuzNsKiSaJ8gHND+PY/sKtR4O4pKRn4+Czm9Omb1KtXnS1bJpdr9tvo6CTeeONXDh4MA+D1159l1qyBmJrqdtSCRFKRyM3N5a+/TjNv3j9cvRoLQJ069nzwQV9eeql1uUQpFoeLF28zefJvhIZGATBgQEvmzvUpXp239FRhIgr8VhTHre4MszdBw7YQvAYWjs8Lg3YR12EZBl0spHipLOIFxJ/qw16ifEC1WqJuxtwRIoGdRyuYv1v4whRATEwyAwYs4MaNeFq2rEtg4BtYWJSfz0l2dg7z529j0aJgAFq3dmHZstf0ZnhaItFXFEVh795LzJ27VVNVu1o1S955pxejRnXU+dBnNRkZ2SxevJMffggiMzMHOzsLvvrKh0GDijmSe3KXECe38goIdxkCb/4oHhiXTYW/lojlbfrCh2vAulqJ9KMyIsVLZRIvAEnx8F4XuH5O+Lq89wvMeVFEJDXtDF9tB/OCQ03DwqIZNGghCQmpdO5cn5Urx5a7A96OHaFMmbKWxMQ07O2rsnjxK3Tt2rBc2ySRVEQUReHgwTC++247hw6JG7SlpRmTJvVg3LguWFrqj8/G0aMRfPDBeo1vi7d3Y+bPH1q8ciRJ8fDT+yI1BUD12vD2EpEgNPwUfD8OLh/XhkGPnAVGMlryaZDipbKJF4DYKFEyIPoGNGgDE7+HWc+L5Elt+8GcP8CkYEESEnKNl19eRkpKBs8+24CVK1+nSpXyFTDXr8cyfvwqzp6NxMDAgIkTu/HBB/0wNzcp13ZJJBUBRVH477/LfP/9do4evQqAqakRo0d35u23vfWqHlZSUhpfffU3q1cfBKB6dUs+//yl4oVvKwr8twEWvy1yaBkYiKzm/5srcrT8Ogc2LRDmIys7mL4G2vUv8T5VRqR4qYziBeDmJXi3s8gt0Mobhn8Es54TttiuQ0XJ9cc8GRw9GsGIEX6kpmbqjIBJT8/ik082s2bNIQAaNHDkhx9G0rx5wc7IEomkYBRFYc+ei3z33Q5CQq4BokzEiBHtefPNntSqZVuu7Ssq//57hpkzN3Hnjqhz9fLLzzBr1gDs7Irh2xJ9Exa9oc2UW7eRSDrXpBMc+gt+fFOY5EFcUycugGrFKNwoeSRSvFRW8QJw6Rh80F2E7XUdBr1eFdkfs7Og3zh4x088SRTAkSMRjBwpBEyXLg1YsaL8BQzA9u2hTJu2npiYZIyMDJk82ZspU3pJZ16JpJDk5uYSHHyehQuDOHnyBiAKWI4a1YE33uhBzZr6Vek9MjKeTz75g23bzgLg5uaAr+8QOnWqX/SD5eSIFP6/zBAFFY1NYPhMGPahML8vmQwHNottHesJ85FMOlfiSPFSmcULQEiQGHHJzoJBb0GzLiLbY24uDPkAxvo+QcCEM3KkP6mpmXTr5skvv/xPJ0w1cXH3mDlzE1u2nASgadPaLFw4gkaNapVzyyQS3SUjI5tNm0JYtmy3xhfE3NyE117rxMSJ3YrnD1KOpKdnsXTpbhYtCiY9PQtjY0PeeKMHU6b0Kt6D1rVzsGAcnBejuzTuAFOXi4jNLYth5UwhaIyMwec94d/yGB9CSfGR4qWyixeA3b/D1yOE/fbVz8TQ5vfjxLoxX8HwGY/d/fDhcEaN0j0BA7Bly0lmzNhIQkIKJiZGvP9+XyZN6i5LC0gk95GYmMbq1Qf45Zd9mjwtVlbmvPJKRyZM6IaDQ/mlRSgOiqKwY8c55sz5g+vX4wBo396dL798qXgPMJkZ8PtX8Ptc8aBnYQX/+xqenyjyZS2cAFdCxLaNO8AUP1GORVJqSPEixYvgj0ViuBNg8jJIvwf+74v5t5fAgEmP3f3wYTECk5aWSffunvz8s+4ImOjoJKZN28COHaEANGlSm6++GkzbtjINt6RyExWVwPLl/7FmzSFN9W4nJxvGju3KyJHt9bIKd3h4NLNn/8GuXRcAqFnThk8+GcigQa2KV5Po3AH4bqxINgfQfoC4Jla1gVWzRMby3FwRDv2/r6H/OFlQsQyQ4kWKFy0rZ8HaL4SZ6OMN4oli3Zdifvoa6DHisbsfOhTGqFE/kZYmnHiXLx9TvJTapYCiKGzYcIzZs//QVD4eMqQNM2cOoEYNee4llQdFUThyJIIVK/bxzz9nycnJBcDT04lJk7ozaFArvfQPS0nJYOHCIPz89pCVlYOJiRETJnRjypRexauBlpIIP88Q/i0Ado7w5iJ41gcO/ikijGJFjhu6D4cJ34F9zZLrkOSxSPEixYsWRYGFE+EffxEq/cW/cGCTsOUaGokQ6vbPP/YQBw+G8eqrP5GamkmTJrVZs2acTtnJ4+LuMXfuVtatO4KiKFhamvH++30ZM+ZZTEykKUlScUlNzWDz5hOsWLGf8+dvaZZ37lyfSZO6062bp15Uen6Q7GxRmmD+/G3ExCQD0KNHIz799AXc3QvOGv5YDv4pIoni8r6nvq/DuPmQlixEi7omkZMbvL0U2vQugZ5IioIUL1K85CcnB74cCvs3CbvuvN2weQHsXCPqIn35L7To9thDnDlzk1Gj/ImNvYezsx2//TZB54onnjx5nZkzN3HqlIikaNDAkS+/HFy86AOJRIe5di2WVasO8PvvRzSjjlWqmDJ4cGtGj+5M48b66cSuKAq7dl3g88+3cPmycC6uV686c+YMolevJsUTYnG3hfl8X6CYr+UB7/hDs2dh8w+w+hMRnWlkDEOnwYiPwUz/TGsVASlepHh5mMx0mNkPTu8B2xrwzV5YPg0O/wVVLEUZgQZtHnuI69djGTnSn4iIGOzsLFix4nXatXMrm/YXktzcXNatO8pXX/1NQkIKAAMGtGD69OfKtfCkRPK0ZGZmExR0jrVrD7NnzyXUl+569aozenQnhg5th62t/kbBnD0byeefb2H//isA2NlZMHVqH159tWPxTF6KAtt+Fn5+KYlipHnIBzDqE7gWCgvGi0y5IDKRT14G9YpZsFFSIkjxIsXLo0lJgve7ij9sTVfw3QnfvQ6nd4t6HN/+By6NH3uIuLh7vPback6cuI65uQmLF4+iX7/mZdP+IpCQkML8+dtYvfoAubkKRkaGDB3alqlTe+PsbF/ezZNICs2VK3dZu/YwgYHHiYu7p1neo0cjxozpTPfunnpTLPFRREUl4Ov7Dxs3hqAoCqamRrz+ehcmT+6FjU0xR0CunAC/d+HMXjFfvzW8uxxquonQ5y2LhbixsoOx86HPGOmQqwNI8SLFS8Ek3IWpnUSRMbcW8Pnf8NlLIrldtVrw3X5wenzETmpqJm+88Ss7doRiYGDAF1+8xJgxncuoA0Xj3LkofH3/ITj4PAAmJkaMHNmBKVO8dcpvRyK5n5SUDLZsOcW6dYc5fvyaZrmjozVDh7bj5Zfb4eqq3yOJcXH3+PHHnaxadYD09CwAXnzRiw8/fI46dYr5gHHjojAD/bdBzJtZwGufw4uT4cAfsHSK1uel5ygY/y3YFdOHRlLiSPEixcvjuR0h6iAl3BUJ7GashRm94fp5qOUO3+57Ysrr7OwcPvpooyZt/1tv9WTGjOd01jnw2LGrzJ//r2ZI2tzchNGjO/Pmmz30qoaLpOKSk5PLgQNX2LTpBFu3ntaEORsZGeLt3Zjhw5+hR49Gep/PSKVKxc9vDz/9tJfU1ExA5GuZPXsgLVrULd5B716HNZ9B0EoR4mxgAN1HCOFiYACL34IjW8W2tTxg8lLw8i6ZDklKDClepHh5MuGn4L2ukJoEHV+ANxbC+93gzlWo11T4xFg//ulHURQWLgxi3rx/AejXrxkLFozQmVDqR7F//xV8ff/R1HSpWtWM119/lv/971kZXi0pcxRF4ezZSDZtCuHPP09qkskBuLpWZ/jw9gwZ0qZCjBLeu5fOzz/vY9my3Ron4+bNnZk+vX/xo6IS7sK6r2DrMsgSQogOA4VocagDgd/Cpu8hI1Wk/H95hphMdfcaVZmR4kWKl8JxZi/M6ANZGSJs8OUZorBj/B3wfAZ8g4Uz7xMICDjK9OnryczMwd29Bj//PIYGDXQ3N4I6osHX9x9CQ6MAUU33xRdbM358V1luQFLqXL8ey+bNJ9i0KYSwsGjNcltbCwYMaMFLL7WmXTs3nR3JLAppaZmsXn2QRYuCiY8XTvSenk588EE/+vZtWrw+3lPBhm9E1GS6OCYtusP/voK6jWHzQtj4rXDUBTHCPGWZKLQo0VmkeJHipfDs3wxf+Iih1uEfQbfh8H4XSE6AVj3h861g+uRkUCdPXmfs2JXcvq3CwsKU778fzoABLUu//U+Boihs23aWJUt2a0ZiAJ59tgETJnSjW7eGeu0IKdEtrl6NYevWM/zzzxlNOD8IE2avXk146aXWdO/uqZfJ5B5Famomv/12iCVLdmlGlNzcHHjvvb4MHNgSI6Ni/LfSUkT22/W+QsAANGwrSp406gB/LYYAX0iOF+vqNRWjMB0HPbaem0Q3kOJFipei8e9ybd2jSQvAsz1M7ymeaDq9CB+vFzkQnkBc3D0mTVqt8SuZMKEbM2c+rxc2+pCQa/j772Xr1tPk5oq/hIdHDcaP78rgwW10orK2RL9QFIVLl+7wzz9CsNyfRM7Q0IBOnerz0kut6d+/uU6bWotKcnI6K1fux99/ryY6qnZtO959tw9DhrQp3vUgKxP+/UlkC4+/I5a5NIbRX0LbvrDVX9QpShC5YXBuCK/Mga5DZRSRHqFT4kWlUrF+/Xo2bNhAUFDQE7cPDg7Gz8+PXr164ebmRlBQEG3btsXHx6dQnyfFSzFZ9xWsmCneT18jUmJ/3F9cNLx6wfRfRSrtJ5CdncPXX//DkiW7AOjY0YNly16lenX9KAIXGRnPzz/vY+3awyQnpwMi38QLL3gxbFg7mjVzrhBD+ZLSITc3l9Onb7JtWyhbt54mIiJGs87IyJBOnTzo3785ffs2q3A+VgkJKSxf/h+//LJP49Pi4lKNt97qiY9PW8zMijGilJMjkmn+OgfuXhPLarrCq59ClyGwY5UQNOqU/jVdYdRs6DmyUA9cEt1CZ8TLiRMnOH78OCqVioCAAEJCQp64T2BgIOPGjUOlUuHm5sb06dMZP358oT9TipdioiiwbKqwFRsawmtfCPvw1yMgI00Il2m/QutehTrc1q2neeeddaSkZODkZIO//2hat65Xun0oQZKT0/n99yMsX/4fN2/Ga5Y3auTE0KHtGDy4td4IMknpolKlsnfvJXbtOs+uXRfz5WIxNTWiS5eGPPdcC3r3boKdXdVybGnpEBOTjJ/fHlatOqCJkPLwqMGUKb0YNKhV8UZaFAUObIaVH8MNUYwR+5owYpbIybI3QEQX3bkq1lV3hpF564x1o3ispOjojHhRExgYyNy5cwstXry9vbG1tS3WZ0nx8hTk5oo6SP/+JObb9IWXP4RFb8L1c2LZsOnChlyIC8SVK3d5/fVfCAuLxsTEiJkzn2fs2C565UeSk5PLvn2XCQg4yrZtZ8nIyAbA2NiQHj0aM2xYW3r2bFxh/BQkT0ZRFC5evE1w8Hl27brA8ePXNIUQASwtzejWzZN+/Zrj7d24QpmE7ufq1Rj8/fcSEHBUk6elceNavPNOL/r3b168/7miwIlgWPERXD4ullnZwbAPYcAbIiP4r3Mg8rJYZ+cIw2eKqs8ygkjvkeJFipfioyiwfQX8+KYoKVC9Nry3QhRz/HuZ2KZRe/hw7ROT2YEIj5w6dR1bt54BhBlpwYLhepnlVqVKZcuWkwQEHOXkSa3Dpb19Vfr2bUa/fs3o1Kk+5ubyya+iERWVwMGDYRw4cIV9+65w+7Yq3/r69R3p2bMxPXs2om1b1wotZo8du8qyZbvZti1UU6LAy8uFKVN64e3duPhm1fOHhGg5vUfMm1eFwe+K6dRukXzuWqhYZ11NK2jM9bckgiQ/ei9e4uPjsbe3Jz4+nvDwcHx9fQvcPiMjg4yMDM18UlISderUkeLlabl6Fr4YCjcvipogY74U1Va/HyfCDy2sRbrtLkOeeChFUViz5hBz5vxJWlomlpZmfP75Swwd2lZv/UcuX75DQMBRNm48TnR0smZ51apm9OjRiL59m9GzZyOsrWWBN30kNjZZI1YOHAjL57sCIkKoUycPevZsTI8ejahbt1o5tbRsyMnJZfv2UJYuzR+Z5+3dmAkTutGxo0fx/8sRZ4S/3ZG/xbyJKTz/hkjdcOU4rJwFYSfEuqo24PM+vDhFFJmVVCj0WrxEREQA4OYmCv75+/sTFBTEhg0bHrn9nDlz+PTTTx9aLsVLCZB2T5iRdv0m5tv1h1c/gyVvi6ckgP7jYeL3hXr6uXo1hilT1mrSnffp05T584fqte9IdnYOBw6EsW3bWbZvD+XOnUTNOhMTIzp29KBfv2Z4ezehVi3b8muopEAUReHGjThCQq5z7NhVjh6N4MKF2/m2MTQ0oEWLOnTqVJ9OnerTtq0rFhYVPwItNTWT9euP4u+/l2vXYgHhxzN4cBsmTOj2dPmcoq7A6tmwe52YNzQSPisjZ0FUGKz6WHudqWIJL74jRmGs7J6uUxKdpVTES2BgIAEBAU/cbsaMGXh5eT20b2HFy4OoVCrs7OxISEh4pClJjryUMooC234R6bUz04Vj3Idr4Ph2CPharHdpAjMDClWRNScnl6VLdzN//r9kZeVQrZol8+YN0cnijkXl/kiTbdvOcuXK3Xzr3dwc6NjRg44dPejUqT4ODvor2vSZtLRMzpyJJCTkGsePXyMk5BoxMckPbde4cS06d65Px44etG/vXqlG0W7ciGPVqgOsW3cElSoVEFF3r73WidGjOz9dpNTVUJH1NmgV5OaIZV2HiQiixBhYNUtrOjKrAgPfgqHTwKb603VKovPo9chLYGDgQ2HRBgYGhISEPCSKHoX0eSklIs7AF0OEo5yhEfxvLri3hHmviNwKpuYwaaFwnCvE8PG5c1FMnvyb5gl36NC2fPbZixXqBhEWFs327WfZtu0sJ0/e0OSPUVO/vmOekPGgQwcPWWOpFEhLy+TixducOxdFaGgUZ89GEhoaRVZWTr7tTEyMaNbMmTZt6tGmTb1KeT5yc3P577/LrFixn+Dg8xp/FheXaowf341hw9piYfHkhJUFHFzUFtq8AE7t0i5v11+YpLOzhGg5vl0sNzGF/hOE6egJddYkFQe9FS/qUZbw8HCN2ehJIy8PIsVLKZKaDAsnaId5n3kOxs0XIdbqi06XIfCOP1jaPvFwGRnZfPPNvyxZshtFUXBysuXzz1+kX79meusLUxCJiWkcORKe50cRxvnzt3jwr+fm5kDz5nVo1syZ5s2dadrUGRubiiPmSpv4+BTOnYvSCJXQ0EjCwqIfEo0ANWpY0aaNK61bC7HSrJlzpXW0TkpKY/36Y6xcuT+fb0+3bp6MGdOZHj0aFS8bLkBKEuxYIbLi3goXywwNoeOLwgRkXlU44h7aItYZGUOf/8GIj6FGnafsmUTf0Dnx4u/vj5+f30PiJSIigsDAQKZNm6ZZNn369HwOuvPmzePYsWMF+rw8iBQvpYyiwD8/wZLJoiaSQx346Hc4fxB+mQE52eBYDz5aJ6KSCsGRIxG8885arl+PA6Br14Z88cVLuLtX3FL1CQkpHD4czoEDYRw8GMbFi7cfuV29etVp1sxZI2jq13ekZk2bCifuCkt2dg43bsQTFnaX8PAYwsLuEhYWTXh4tKZuzoNUq2ZJ06a1adq0Nk2a1KZ163o4O9tV2u9QzcWLt1m16gCBgcc1+VksLc0YNqwdo0d3frr/X1SYECw7VoiHHhAPNP3GiQihW3nr1aLF0BB6jIJRn4jK9pJKic6IF7U4CQgI4MSJE0ybNi1ftlx/f398fX0JDw/X7KNSqfD399fMx8XFPTba6EGkeCkjwk8LM1LUFfG09PrX0KQzzB0uEkcZGcPoL2DIB4VKz52amsmPPwazZMkuMjNzMDExYuLE7kyZ4l38oWo9Ii7uHmfPRnLmzM2818h8yfHup0oVU1xdq+Pm5oCrqwOurtVxdXXAzc2B6tUt9fqmnJubS0xMMpGRCURFJRAVpSIqKoHIyASuXo3h2rXYh0w+9+PiUk0jUoRgccbR0Vqvv5OSJCUlgz//PMm6dYcJCbmuWd6ggSNjxjzL4MGtsbQsZr4URREmoc0LReSQ+tZSxxNemAydX4L9m2DLj3D9vHa/rsNEKv+6nsXvmKRCoDPipTyQ4qUMSU2GBeNhz+9ivv0A4ffyywyRARNEaYFpq0V2zEIQERHDJ59sZtcukVWzVi1b5swZxHPPtah0N6D4+BRCQ4WQOXv2JqGhUdy4EZ8vIdqDWFqa4eRkS40aVjg4WFOjhhU1alhr5h0drXFwsMLKyrzMcpFkZ+egUqUSH59CXNw94uNT7pvuEReXwp07iURFJXDrluqx4gREmLK7ew3c3Wvg4XH/q0OlELpFRVEUTp++ydq1h9m8+YRmlMXY2JDevZsyenRnOnV6ilDnjDSRwv+PH7R5WADa9hMhzU7u8NcS2P6LtspzFUvoPVo449Zp+HQdlFQYpHiR4qXsUBRRFG3pFGFGqlFXRB5dOydCqjPSwLaGKC3QpnchD6mwfXsos2f/oRl9ePbZBnzxxUvUr//k+koVmaysHG7ciOPq1RiuXo0lIiJG8z4yMuEhP5rHYWpqRNWqZlhYmGFpaZb33pSqVcV7Y2NDwEDjf21gIN7f/5qZmU1aWiZpaVmPfE1NzdTUiCosRkaG1KxpQ+3attSubYezsz21a9tSt241PDwcqVXLRq+yNJcXKlUqmzaFsHbt4XxFIV1dqzN8eHuGDm37dFFDMZFClGz101ZxNq+qFSXRN4Rp6OhW7ShM7fpiXe/RUFVenyX5keJFipeyJ+ykSGp3KyzPjOQrqr1+9bJIeAdFKi0AIlLkxx93smTJLjIysjExMWLs2C68/bY3trYyq+aDZGRkc+NGHNHRSdy9m0RMTHLeaxLR0clER4vXhIRH+4aUNnZ2FtjZVaVaNUvs7atib699X6OGNc7OdtSubYejo7VeVCLXRbKzc9i37zKBgcf599+zmrT9ZmbGPPdcC0aMaE+HDu7FH2VRFLhwWJiG9gVqQ50d68Ggt4QJ6NCf8OePIsGlmjZ9hemoTR9Z5VlSIFK8SPFSPqQkwYJxsHe9mO8wECYvhd++gL+XimWez8CMdYUqLaDm2rVYPvlkM8HBwk5ubW3OhAndGTu2S4WtG1OaZGXlkJKS8cCUqXmfmprBvXsZ5OTkah6Y1ZcJRVFQFO2rmZkx5uYmVKliSpUqD76aYm5ugq1tFWxtLaQgKUXOnYsiMPA4mzeH5Mv47OnpxIgR7Rk8uPXTFYXMyhRiZfMCuHRMu7x5V3hhCrg0Fv/x7SsgNUmss7CCXqOFqHFuUPzPllQapHiR4qX8UBRRA2nZO+KC5+gCM9dDzE34fizcUxWptMD9BAefY+7crZrcMHZ2VXnzzR6MHt25UmQ7lUju586dRDZtCmHjxuP5MgLb2VVl0KBWDBnShpYt6z6dr5gqRpiF/loC8XmfYWIK3UeIkRRVtPB1Ofav1jTk3AAGvQ3er0rTkKRISPEixUv5c+UEfDlU5HYwNoGx86DjC/D1SBFWDUUqLaAmNzeXv/46zTffbCM8PBoABwcrJk/2ZtSojpiZVdyCeBJJUlIa27eHsnHjcfbvv6LJYWNqakTv3k0ZPLgN3bt7Pr0zdvhp+GMh7ForfNlAON0//wb0GCHEyp+LtNWdQSScG/Q2tO4tTUOSYiHFixQvukFKInw3Vgw3gxAvU3+CTQvg96+KXFrgfrKzc9i0KYRvv92uceqtVcuWqVN7M3RoO0xMpIlCUjFITk4nKOgcW7acYs+eC2RmaqOx2rVzxcenDc8/3/Lp/cBycuDwX0K0qNPzAzRoI+oKubeEf/zz526xsBJJ5Qa+KZxxJZKnQIoXKV50B0URQ85+7+aZkerBx+uFXdx3FMTfEaUFJi6A58YXqrTA/WRmZhMQcJQFC3Zw+7YIw3RxqcbEid0ZMqSNDJ2V6CX37qUTFHSev/46xe7dF8jIyNasq1/fkUGDWjF4cGtcXEqg3s+NC7DzN9i1Bu7m5X4xNIJnBwvTUEqSGGU59q92H+eG8EKeaUhWd5aUEFK8SPGie1wOEWak2xHCjDTuG+j2MnwzWntRfNZHlBuoWa/Ih09Pz+LXXw+yaFEwsbH3ALC1tWDkyPaMHt2Z2rVlJVqJbnPvXjo7d17gr79OsWvXBU2kEIC7ew0GDmzJwIEtadiwBGr9xESKMh+710L4Ke1yK3thzvV+BU4Ei4RyUVfEOgODPNPQZPDylqYhSYkjxYsUL7pJSiJ8+zrs3yjmO78EU5eL5FU/fyhKCxgaQffhMHQ6uDYt8kekpmawdu1hfv55n6bcgJGRIc8914Lx47vi5eVSkj2SSJ6KW7dUBAWdY/v2UA4evJLPJOTm5sDAgS0ZMKAlnp5OT5+kMSle/Pd2/QZn/9M62BoZi4Ry3UeILLfbfoYdKyFNPARgYX2facjj6dogkTwGKV6keNFdFAW2LBZmpOwskdRu5Czh+7J6NpwI0m7bfgAM+xCadCzyx+Tk5BIUdI7ly//j4MEwzfLWrV0YO7Yr/fs3l34xkjJHURTOn7/F9u2h7NgRypkzkfnWu7k58PzzLRgwoCWNG9d6esGSnir8WHavFSOc2drRHJo+Cz1GQvMucHKnGIlRO9ODNq2/9ysiI65EUspI8SLFi+5z+bhIanfnqph3qANDpwmnwD9+EE6+6p9m02fh5Q/F02ExLuahoVEsX/4ff/wRonmydXKyZfToTgwe3IZatWxLpk8SySNIT8/iyJEIgoLOsWNHKJGRCZp1BgYGtG7tQp8+TenTpxkeHiVQjDQnW4iRXb/Bgc3aERQAt+ZCsLTtD1eOC8Fycqc22ZyBATzzvIga8vIu1v9NIikuUrxI8aIfpKWI6IUN87U5JOwcYfB70KqnSHoVtEr7tOjWXIzEdBkihrqLSExMMqtXH2DVqgMavxgDAwM6d66Pj08b+vdvTtWq0sFX8nQoikJYWDR7915kz55LHDwYls9/xdzchC5dGtKnT1N69WpM9eol4PCqKHDxiBAse9eL/CtqHF2ESajzS3DnGuxZB0e2akOgARq2hW7DoetQqF776dsjkRQDKV6keNEvMtOFjT3ga220g5W9KOrWebBYt3WZ9gmypquoVt17NJhVKfLHZWRk8+efJ/j996McPqytaF6liinPPdecwYPb0LlzfYyMpEOipHAkJqaxb98l9u69xJ49l4iKSsi33tHRmu7dG9GnT1OefbZBySVVvHFB5GLZvVY4w6uxqQ5dhgoxkp4qBMvBP7QhzgB1GwnB0n249GWR6ARSvEjxop9kZ4knx9/napNfWVjBgDeFUPlvg8hBkRgr1tk5ivwTAyZBVZtifeSNG3Fs3BhCYOAxrl6N1SyvWdOGl15qjY9PGzw9SyC6Q1KhSEvL5MSJ6xw6FM7evZc4efK6JmEciKRxzzzjTrdunnTt2pBGjUrA4VZNTKSo5L7rt/yRQuZVRS6l7sPBzEL8X/Zt0P5fQPiYdR8uRItbc2kWkugUUrxI8aLf5OQIn5d1X2qLOppVESGcA9+E49uFqSn6hlhnYS0EzIvviCygxUBRFE6cuE5g4HH+/PMkKlWqZl2jRk707t0Ub+/GtGpVV1Y0roSkpmZw7Ng1Dh8O59ChME6dupEvMgjAw6MG3bp50q2bJ+3bu5dsyQp1pNDutXBmb/5IoTZ9RdbbGnXh4J9C2MTc1O5r4yBGYLoPh0YdZIizRGeR4kWKl4pBbi4c+RvWfqEtBmdiCr3HwOB3hY0/wBeun8tbZwZ9xoDP+1DLvdgfm5GRzc6d5wkMPM7OnefJytLepKpXt6RHj8b07t2ELl0aYGkpC0NWRJKT0zl27CqHDoVx+HA4p0/fJDs7N982jo7WtG/vTufO9ena1RNn5xLOJZSRJiKFdq2FY/88IlJohHBwD9khHG/vr+JsYQ2dXhSCpVXPYvmISSRljRQvUrxULBRFJMxa+4XITwEiH0yPkSIKKSoMAubC+UN56wyFvX/YdHFxfwri41PYtes8QUHn2bPnIsnJ6Zp1pqZGdOjgQa9eTejVqwl16tg/1WdJyofs7BwuXrzDqVPXOXHiBidPXufy5bs8eGmsXduODh3cad/enQ4d3KlXr3rJmYLUpCTCqd0iSujApocjhbqPgGZdREjz7nVwJUS73sRMpBfoPlwkkzOVwlqiX0jxIsVLxeXsPmFOOr5dzBsYiMy8wz8SacwDvs6fxrxNXyFwmnV5avt+ZmY2R49eJSgolKCg81y7Fptvff36jjzzjBvPPONGu3ZuODvblfzNTfJUKIrCrVsqTp7UCpUzZyJJS8t8aFsXl2oaodKhg0fpiNPsLDGCeCJITBePasOWQRsp1KavcM7dsy5/gjlDI/DqJQRLxxdkFWeJXiPFixQvFZ9Lx2DdVyKCQs0zz8OImWBaBdb7wt4AYXoCaNxBhFk/83yJ2PzV4bBBQecIDj7PsWNXycnJb1ZwcrKhXTu3vMkVT08nGcFUhmRl5RAeHs3587c4f/4WFy7c4ty5KKKjkx/a1tLSjJYt69KqlQutWtXFy8uFGjVK4fqhKHDzklasnNmTPwIIRIHDNn3Eb1UVA3t/F2I9R1vfiKadhdNtlyFg61Dy7ZRIygEpXqR4qTxcPSuik+4XKq16woiPReK7wG9g+wptTguXxqL0QPfhosZSCZGQkMLhwxEcPRrBsWNXOXPmYR8Ja2tz2rRxpW1bV5o2rY2npxO1atnK0ZkSICYmWSNQhEi5xZUrd/P5K6kxMjLE09MJLy8hVry8XPDwqFF6jtiqGDgZLMRKSBDE5s+qi3U18Zv16gX1msGtMOHrcniL8HtR495SjMJ0GyaccyWSCoYUL1K8VD4iLwuTUfCv2ifUJp1g+Ext1t6/lohq1iAu/j7vQ9/XwdyixJuTmprByZM3OHr0KkeOhBMScp2UlIyHtrO2NsfT04lGjWrh6emU994Ja+ui56+p6CQlpXH1agwREbFcvRqT9z6Gq1dj80WH3Y+lpRmNGtWiceNaNGokvucmTWqXbCTQg2SkQeh+7ejK/eHMIJzOm3QWYqVxBzHycmqX2PZaaP5ta9fXhjbX9Sy9NkskOoAUL1K8VF7uXof180RxOfVoi4eXMCe16A5b/WDT99oMpDbVRZXcHiPAya3U8l5kZ+dw/vwtjh69SkjINS5evE1YWPRDpiY1tWvb0aiRE+7uNahd2w5nZzucne1xdrbDxqZKhRytuXcvndu3E7l9W6V5vXYtjoiIaK5ejSUu7l6B+xoYGODqWj1PpAih0rhxLZyd7Uo/tD03FyJOa0dWQvflz14L4NZCiJWW3YVj7bkDwgn9wqH85iADA3BvBa17wbNDoL6XzMUiqTRI8SLFiyTuNmz8Fv5eBukpYplLY3j5I+g4CHauESJHXVsJxGhMi+7Qsod4rVGnVJuYkZFNWNhdLl68zYULtzWvt2+rHrufpaWZRsiohU3t2nbY21fF1tYCOzvxamlpVq4iR1EUUlIyUKnSSExMRaVKJTExjdjYZO7cSeTWrUTu3BFC5c6dxHyRXAXh4GCFq2t1XF0dcHNz0Lx3da2OhUUZlnaIvqkdWTm5ExJj8q+vXluIFa9eUK2WMG+eDIbTe7Sjf2oc64ntWvcSvzub6mXVC4lEp5DiRYoXiZrEWNi8UJiN1DeNWu7Cebf7CDj0J/y19OEnYIBaHloh07K7yOhbBqhUqVy6JITM9etxREbGExmZQGRkwmNHHx7EyMgQG5sq2Npa5JusrMwxNjbE2NgIU1NjjI0NMTExypsXr+r53FyFrKxsMjOzyczMITMzm6ysnLx58T4jI5uMjGySkoRISUgQIiUxMfUhv58nYWVlTs2aNjg52VCzpi0uLtVwda2eJ1QcsLIqp/DflCThXKseXYm8lH99FUto3k2IENdmwq/l5E4hWGKj8m9rZS98XFp5i+KHTm5l1QuJRKeR4kWKF8mDpCTClsWw8TtIihPLqlhC867iJtKoA6So4PRu4X9wJUTrAKzGpXGemOkh9rMu+7wuqamZREUlEBWVkE/U3LqlQqVK1Uz3FwIsb0xMjLC1tcDGpgo2NhbY21elVi3b+0SKDU5Otjg52ehO0r+cbBG2rB5duXA4fwizoSE0bCfESqMOoj7X2b3CFKROmqjGxEwklfPyFr8195ZgZFSm3ZFI9AEpXqR4kRREWkqe38t3Dz8R2zlCy57iJtOgrTApqcVMxOn82xoYiJuQ2szU9FmdyrGRlpZJYmJaPkGjnu7dyyA7O4esLO0k5nPzXrM17w0NDTE1FSMxpqbGee+N73uvXm6sESe2tupX8b5KFVPd9tFJiIZrZ4Wz7NW812uhWnOjmloeWr8VMwu4fFyMrFw4/LDfioeXVqw06VSsAqISSWVDihcpXiRPIjcXrp4RT8ong0Xir/vDUgGcG2jFjGtzIWBO7RKC5saF/NsaGkGDNlox06RTqUQxSZ6CtHtw7VyeODmrFSpq5+0HUZt3vHoJH5aoMPFbeVRuFie3PDNQnrixrlbq3ZFIKhpSvEjxIikqmRlw8bBWzFw6mt9sZGgI9VuLG1SrnlDTTWyvFjO3wvMfz9gEPNsLIdOyu3hvWoYOpZWZ7CwROq8ZRckTKvc7Z9+PgYEQHy5Nhb+KazMhXGIj80KYgyH+dv591LlZ1L8H6bcikTw1UrxI8SJ5WlISRWSI2unywZEWU/O8XB15pgHraqLa76ldYnowEZmpuRiNUTsAN2hToknyKiWKIiqLXz2bX6jcvJi/iOH92DmKRHD18oRKLXfAQAgbtbno+rn8VZlBnL8H/VZkdWaJpETRKfEyb948AMLDxZOpn59fofaxtbUFQKVSMW3atEJ/nhQvklIhNkorZB71JG5lL0SJV96TOIgCe2qfmQdNE1UsRb0ltZnJrYV04nwcibEP+KTkvT5ovlFTxVIIFLVQqeMpksMl3NWKlGuhcDtcWyfofgwMtCNtXr2gSUdZ6FAiKWV0RrxMnz4dX19fzfyECROIiIggKCiowH3UYkctWIKDg9mwYUOhRA9I8SIpAxRFPN2rTUyndz98E3V00ZoUWvYQEU7qUZkzeyA5If/2puZgWwOsq4taNTb3TbaPeF/VpuIkL1MU4W+UohIjXskJIhT5fqESf+fR+xqbCGGiFioujcG8qjgf189pRUrkpYdD4dVYVxOjMPWa5pmOmor3VW1KrcsSieRhdEK8qFQqhgwZwoYNGzSjKCdOnKB169aEh4fj5vZoG7GdnR1Xr17V7AMie2ZhmynFi6TMyckWhSLVYubCoYfNFm7N88RMXvTJ7fC8kZldwlm4oBGEgjA2ebTQKUjsWNmXnpkjM0MrPO6f7uUtS31gXjPdN1+QsLifmq5akVGvKVS1hexM4d+iNvfcOP+w47UaCyutQFEfo14TIRorihCUSPQYnREvrq6u7Ny5Ey8vL80yOzs7QkJCNMvuJyIiAnd394eEioGBAUFBQXh7ez/xc6V4kZQ7aSkiRbzaxPRgmLWRscgN4uUtTBPW1USeEANDkUgvMUZMqhjt+/vnHwzhLQyGhmBVreBRHbUQMjB8WFg8KDxSH5h/MBV+cTE0BAtrMeLh5K4VKnY1QckVpR/uN/k8mKlWjak51G2cX6DUayoKdUqRIpHoLEW5fxuXViNsbW1JSMg/NB4cHAxQ4KhLREREgcdSqVSPXJeRkUFGhvbimZRUwAVNIikrqlSFtn3FBCKPyKldcGqnSHh297oQN6H7Ht7Xwlo4ldrW0L42aCte1cssrMWNPjvr8SJHPd1Ticgp9XxpYWEFFjZgaSsEiGZ6YP7+9eaWeSNCBiIJXIoKbl4S4iTiDOxaW3CbjYzBuaEQJ+rRFNemIhJM+g9JJBWaUhMvj2Lu3Ln4+fnlMwkVBnt7e+Lj4ws85qeffloCrZNISgm7GtD9ZTEpCtyOECMyp3bCrTDhzKuKFmIkNUlMUVeefFwTU7CpkV/o2DkKHxBb9XwNYTLCAO4lFCx2kmLF+9zcB8SH7aOFSJU80WFgKEYzcnPFiFBKorYPqUnaedVdkWI/NTHvNW8qzCjS/aHM9e6bnBuI70AikVQ6Ci1eAgMDCQgIeOJ2M2bMeKRJaPr06QwbNozx48cXrYVQoHBRf967776rmU9KSqJOndItqCeRFBsDAxGeW8sdnp+gXa4oYoQk4W6emLmrFTXqZfevS02GrEwRkv1gWHZBWFfLL3RsHaFabZENtooVpCU/LDqS4oTYelCQFMd09TjMqogRJQtrYTK6X6TUbSQT/kkkknyUSZ6XwMBA4uPjnyhcpM+LRFJIMtK04qYggaOeT4p9uE5TSWFqrhUdan8VC2tRKkH9/sF16vUWNuK1ipUcQZFIJLrhsKsmODgYlUqFj48PIJx24+PjHxttFBISkm+9jDaSSJ6CnBxIjn+0sFGLn7TkR4uMJwkSKTokEkkJoRMOuyBCo0+cOIGPj4/GGTcwMFAzAhMREUFgYGC+JHQzZswgODhYs83920skkmJgZCQiiWwdyrslEolEUiKUeqj0o6KE1B/p7++Pr6+vJvuumnnz5mlGXo4dO5Yv0d2TkCMvEolEIpHoHzplNiprpHiRSCQSiUT/KMr9W1YWk0gkEolEoldI8SKRSCQSiUSvkOJFIpFIJBKJXiHFi0QikUgkEr1CiheJRCKRSCR6hRQvEolEIpFI9AopXiQSiUQikegVUrxIJBKJRCLRK6R4kUgkEolEoldI8SKRSCQSiUSvkOJFIpFIJBKJXiHFi0QikUgkEr3CuLwbUNKo60wmJSWVc0skEolEIpEUFvV9uzD1oiuceElOTgagTp065dwSiUQikUgkRSU5ORkbG5vHbmOgFEbi6BG5ubncunULKysrDAwMSuSYSUlJ1KlTh5s3bz6xTLe+Ivuo/1T0/oHsY0WhovexovcPSqePiqKQnJxMrVq1MDR8vFdLhRt5MTQ0xNnZuVSObW1tXWF/iGpkH/Wfit4/kH2sKFT0Plb0/kHJ9/FJIy5qpMOuRCKRSCQSvUKKF4lEIpFIJHqFFC+FwMzMjNmzZ2NmZlbeTSk1ZB/1n4reP5B9rChU9D5W9P5B+fexwjnsSiQSiUQiqdjIkReJRCKRSCR6hRQvEolEIpFI9AopXiQSiUQikegVFS7PS0kzb948AMLDwwHw8/Mr1D62trYAqFQqpk2bVmrte1pUKhXr169nw4YNBAUFPXH74OBg/Pz86NWrF25ubgQFBdG2bVt8fHzKoLXFo6h9BP06h1D09uryeSzOd69P56sinatHURn+b5Xhugk6fv9TJAUybdq0fPPjx49XvL29H7uPr6+v4uvrq5kPCgpSxo8fXyrte1pCQkIUPz8/xdfXV/Hy8irUPhs2bFBsbW0VQHFzc1P8/PxKuZVPR3H6qE/nUFGK115dPY/F6Ys+na+KdK4eRWX4v1WG66ai6P79T4qXAkhISFC8vb2VhIQEzbKQkBAFUMLDwwvcz9bWNt8+iqIouq4RN2zYUKQ/4YP90weK0kd9O4fFaa+unsfi9EWfzldFOlePoyL/39RU5OumPtz/pM/LYzh+/DgRERGaeTc3N0AMhT2KiIgIVCqVZsjsfoKDg0ujiZISRt/Oob6193EUpy/61H99amtZIb8T3UXX73/S56UAbG1tSUhIyLdMfQLUJ/FB7j/RDx6roBOuj6xfvx57e3vi4+MJDw/H19e3vJtUYujbOXya9uraeSxOX/TpfFWkc1VS6NP5e1r06Rzqw/1PipciMHfuXPz8/B6pLB+H+gdbEfDy8gK0P2B/f3+GDBnChg0byrNZpY6+ncMntVefzmNxvnt9Ol8V6VyVFPp0/gpDRTiHunb/qzTiJTAwkICAgCduN2PGDM0P7X6mT5/OsGHDGD9+fJE/uyz+hE/bv8LyoOoeOnQoEyZMKHC4sCQpqz4+irK6kJZUH5/U3vI8j0WlON+9Pt34KtK5Kin06fwVBn0/h7p4/6s04sXHx6fYYWmBgYG4u7s/8cQVNJymUqkKXFdSPE3/ikJgYGC+z1H/8SIiIkpcMDxIWfSxPM8hFL2PxW1veZ7HgihOX8r7fBWFinSuSgp9On9Pgz6fQ529/5WKG3AFIigoSNmwYYNmPiEh4Yne1g+u1/WvubBe8wkJCQ95m6uX6bonfVGjH/TpHBa1vbp8Hovz3evT+apI5+pxVOT/m5rKcN3U5fufjDZ6DCdOnODEiRN4eXkRERFBREQE/v7+2NvbA0I1q5P4qJkxY0Y+z+rAwMBiDbWVJQUN6z3YP1tbW6ZNm5ZPRfv7++Pj46PzQ5+F7SPo3zl8Unv16TwWtS+F2UeXqEjn6nFU5P+bmop+3dT1+5+sKl0AKpUKV1fXR3pJq78yf39/fH19NdkH1cybN0/zQz127JjOepVHRERofCxOnDjBtGnT8mV9fFT/VCoV/v7+mvm4uDid7R8Ur4+gP+dQzePaq2/nsah9edI+ukZFOlcPUhn+b5XhuqkP9z8pXiQSiUQikegV0mwkkUgkEolEr5DiRSKRSCQSiV4hxYtEIpFIJBK9QooXiUQikUgkeoUULxKJRCKRSPQKKV4kEolEIpHoFVK8SCQSiUQi0SukeJFIJBKJRKJXSPEikUgkEolEr5DiRSKRSCQSiV4hxYtEIpFIJBK9QooXiUQikUgkesX/AVlnn2iRFEdBAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "t, x_stacked = prepare_VdP_data()\n", + "t_max = 320\n", + "x, y = x_stacked[:, 0], x_stacked[:, 1]\n", + "t_train, t_test = t[:t_max], t[:t_max]\n", + "x_train, x_test = x[:t_max], x[t_max:]\n", + "y_train, y_test = y[:t_max], y[t_max:]\n", + "plt.plot(x_train, y_train, color = 'midnightblue')\n", + "plt.plot(x_test[::3], y_test[::3], color = 'orangered')\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ea55038c", + "metadata": {}, + "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 85-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 86-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 87-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 88-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 89-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 90-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 91-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 92-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 93-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 94-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 95-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 96-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 97-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 98-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", - "Multiobjective optimization : 99-th epoch.\n", - "During MO : processing 0-th weight.\n", - "During MO : processing 1-th weight.\n", - "During MO : processing 2-th weight.\n", - "During MO : processing 3-th weight.\n", - "During MO : processing 4-th weight.\n", - "During MO : processing 5-th weight.\n", - "During MO : processing 6-th weight.\n", - "During MO : processing 7-th weight.\n", - "During MO : processing 8-th weight.\n", - "During MO : processing 9-th weight.\n", - "During MO : processing 10-th weight.\n", - "During MO : processing 11-th weight.\n", + "setting builder with \n", + "setting builder with \n", + "Deriv orders after definition [[0], [0, 0], [0, 0, 0]]\n", + "320\n", + "initial_shape (320,) derivs_tensor.shape (320, 3)\n", + "self.tokens is ['du/dx0', 'd^2u/dx0^2', 'd^3u/dx0^3']\n", + "Here, derivs order is {'du/dx0': [0], 'd^2u/dx0^2': [0, 0], 'd^3u/dx0^3': [0, 0, 0]}\n", + "self.tokens is ['u']\n", + "Here, derivs order is {'u': [None]}\n", + "The cardinality of defined token pool is [1 3]\n", + "Among them, the pool contains [1 3]\n", + "self.vars_demand_equation {'u'}\n", + "Creating new equation, sparsity value [3.76342975e-06]\n", + "New solution accepted, confirmed 1/12 solutions.\n", + "Creating new equation, sparsity value [3.53590374e-06]\n", + "New solution accepted, confirmed 2/12 solutions.\n", + "Creating new equation, sparsity value [5.89895174e-10]\n", + "New solution accepted, confirmed 3/12 solutions.\n", + "Creating new equation, sparsity value [0.00031145]\n", + "New solution accepted, confirmed 4/12 solutions.\n", + "Creating new equation, sparsity value [0.0088946]\n", + "New solution accepted, confirmed 5/12 solutions.\n", + "Creating new equation, sparsity value [0.02682798]\n", + "New solution accepted, confirmed 6/12 solutions.\n", + "Creating new equation, sparsity value [9.52842834e-10]\n", + "New solution accepted, confirmed 7/12 solutions.\n", + "Creating new equation, sparsity value [3.07396998e-06]\n", + "New solution accepted, confirmed 8/12 solutions.\n", + "Creating new equation, sparsity value [6.90134673e-07]\n", + "New solution accepted, confirmed 9/12 solutions.\n", + "Creating new equation, sparsity value [9.3816947e-11]\n", + "New solution accepted, confirmed 10/12 solutions.\n", + "Creating new equation, sparsity value [9.03830766e-08]\n", + "New solution accepted, confirmed 11/12 solutions.\n", + "Creating new equation, sparsity value [1.83566666e-09]\n", + "New solution accepted, confirmed 12/12 solutions.\n", + "[0.06, 0.94] [[0.86, 0.14], [0.8200000000000001, 0.17999999999999994], [0.08, 0.92], [0.2, 0.8], [0.6, 0.4], [0.88, 0.12], [0.0, 1.0], [0.06, 0.94], [0.92, 0.07999999999999996]]\n", + "[0.88, 0.12] [[0.86, 0.14], [0.8200000000000001, 0.17999999999999994], [0.08, 0.92], [0.2, 0.8], [0.6, 0.4], [0.88, 0.12], [0.0, 1.0], [0.06, 0.94], [0.92, 0.07999999999999996], [0.76, 0.24], [0.16, 0.84]]\n", + "[0.06, 0.94] [[0.86, 0.14], [0.8200000000000001, 0.17999999999999994], [0.08, 0.92], [0.2, 0.8], [0.6, 0.4], [0.88, 0.12], [0.0, 1.0], [0.06, 0.94], [0.92, 0.07999999999999996], [0.76, 0.24], [0.16, 0.84]]\n", + "[0.2, 0.8] [[0.86, 0.14], [0.8200000000000001, 0.17999999999999994], [0.08, 0.92], [0.2, 0.8], [0.6, 0.4], [0.88, 0.12], [0.0, 1.0], [0.06, 0.94], [0.92, 0.07999999999999996], [0.76, 0.24], [0.16, 0.84]]\n", + "best_obj 2\n", "The optimization has been conducted.\n", "\n", "\n", "0-th non-dominated level\n", "\n", "\n", - "0.0 * u{power: 2.0} + -1.0110582770581997 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 2.0} * u{power: 1.0} + 0.0 * u{power: 2.0} * du/dx0{power: 1.0} + 0.021952156101475665 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05974208487282046}} , with objective function values of [2.097144 2.5 ] \n", + "0.19913638909280368 * du/dx0{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} + -0.19907157227830585 * u{power: 2.0} * du/dx0{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} + -1.0004250731895399 * u{power: 1.0} + -1.2637814584238564e-05 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0032438343913190376}} , with objective function values of [2.91261232e-03 5.00000000e+00] \n", "\n", - "-1.0047079868134445 * u{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -0.18089255641524984 * u{power: 2.0} * du/dx0{power: 1.0} + 0.19230730762704124 * du/dx0{power: 1.0} + 0.0 * u{power: 2.0} + 0.011183282281263822 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.008613160064087704}} , with objective function values of [0.74193618 5. ] \n", + "0.0 * d^3u/dx0^3{power: 1.0} + -1.0134740998571992 * u{power: 2.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * d^3u/dx0^3{power: 1.0} * u{power: 1.0} + 0.007105582081027101 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0048261417898382}} , with objective function values of [1.62371108 3.5 ] \n", "\n", - "0.0 * u{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 1.0} + -1.0197061933962375 * u{power: 2.0} + 0.0 * u{power: 2.0} * du/dx0{power: 1.0} + 0.010457764456203493 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03667289193472477}} , with objective function values of [1.67064687 3.5 ] \n", + "0.19915357895128105 * du/dx0{power: 1.0} + -1.000451212887634 * u{power: 1.0} + -0.19984819414063573 * du/dx0{power: 1.0} * u{power: 2.0} + -0.0007471974756737762 * u{power: 1.0} * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 2.0} + -1.7100187981240053e-05 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0044267036074499755}} , with objective function values of [2.71230147e-03 1.00000000e+01] \n", "\n", - "0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.19230730762704118 * du/dx0{power: 1.0} + -1.0047079868134439 * u{power: 1.0} + -0.18089255641524948 * u{power: 2.0} * du/dx0{power: 1.0} + 0.0 * u{power: 2.0} + 0.011183282281263557 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.021488562446729875}} , with objective function values of [0.74193618 5. ] \n", + "-1.0074402446941904 * u{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * u{power: 2.0} + 0.0 * d^3u/dx0^3{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.010341093015417396 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.013749302597954233}} , with objective function values of [2.02312901 2.5 ] \n", "\n", - "0.0 * du/dx0{power: 2.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 2.0} + 0.0 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + -1.0110582770581997 * u{power: 1.0} + 0.021952156101475665 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0283864611443392}} , with objective function values of [2.097144 2.5 ] \n", + "0.0 * du/dx0{power: 1.0} + -1.0125966798604864 * u{power: 2.0} + -0.00805907452711123 * u{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * d^3u/dx0^3{power: 1.0} * u{power: 2.0} + 0.005780766486098398 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.008085168052807509}} , with objective function values of [1.62114862 4. ] \n", "\n", - "0.0 * du/dx0{power: 2.0} + 0.0 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + -1.0003527793361071 * u{power: 1.0} + 0.187412393154041 * du/dx0{power: 1.0} + 0.17107497251970538 * du/dx0{power: 1.0} * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.010208938915226819 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03748517114027669}} , with objective function values of [0.73759843 7. ] \n", + "0.0 * d^2u/dx0^2{power: 1.0} * d^3u/dx0^3{power: 1.0} + -1.0134740998571992 * u{power: 2.0} + 0.0 * d^3u/dx0^3{power: 1.0} * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.007105582081027101 = u{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.008685748472871128}} , with objective function values of [1.62371108 3.5 ] \n", "\n", - "-1.0047079868134445 * u{power: 1.0} + -0.18089255641524984 * u{power: 2.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 2.0} + 0.19230730762704124 * du/dx0{power: 1.0} + 0.0 * d^2u/dx0^2{power: 2.0} + 0.011183282281263822 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.015830397552355376}} , with objective function values of [0.74193618 5. ] \n", + "0.0 * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} * u{power: 2.0} + 0.0 * d^3u/dx0^3{power: 1.0} + -1.0134740998571992 * u{power: 2.0} + 0.007105582081027101 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.00464936251106441}} , with objective function values of [1.62371108 3.5 ] \n", "\n", - "0.0 * du/dx0{power: 2.0} + -1.0110582770581997 * u{power: 1.0} + 0.0 * d^2u/dx0^2{power: 2.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.021952156101475665 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05365862114145223}} , with objective function values of [2.097144 2.5 ] \n", + "0.0 * d^3u/dx0^3{power: 1.0} + -1.0074402446941904 * u{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.010341093015417396 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.020815923181181993}} , with objective function values of [2.02312901 2.5 ] \n", "\n", - "-1.0047079868134445 * u{power: 1.0} + 0.0 * du/dx0{power: 2.0} + 0.0 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.19230730762704154 * du/dx0{power: 1.0} + -0.18089255641524976 * u{power: 2.0} * du/dx0{power: 1.0} + 0.011183282281263975 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.014178070427050249}} , with objective function values of [0.74193618 5. ] \n", - "\n", - "0.0 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 2.0} * u{power: 1.0} + -1.0110582770581997 * u{power: 1.0} + 0.0 * du/dx0{power: 2.0} + 0.0 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.021952156101475665 = d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03574021833661281}} , with objective function values of [2.097144 2.5 ] \n", - "\n", - "0.0 * u{power: 1.0} * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 2.0} + -1.0197061933962375 * u{power: 2.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * u{power: 2.0} + 0.010457764456203493 = u{power: 1.0} * d^2u/dx0^2{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.13917917271388758}} , with objective function values of [1.67064687 3.5 ] \n", - "\n", - "-1.0197061933962375 * u{power: 2.0} + 0.0 * d^2u/dx0^2{power: 2.0} * du/dx0{power: 1.0} + 0.0 * u{power: 1.0} + 0.0 * u{power: 2.0} * du/dx0{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} + 0.010457764456203493 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", - "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.05927480366474771}} , with objective function values of [1.67064687 3.5 ] \n", + "-1.0074402446941904 * u{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * d^3u/dx0^3{power: 1.0} * du/dx0{power: 1.0} + 0.010341093015417396 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.007589181812725444}} , with objective function values of [2.02312901 2.5 ] \n", "\n" ] } ], "source": [ - "epde_search_obj = epde_discovery_as_ode(t_train, x_train, True)" - ] - }, - { - "cell_type": "code", - "execution_count": 130, - "id": "347125f0", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\"0.0 * u{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * u{power: 1.0} * du/dx0{power: 2.0} + -1.0072168825573178 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 0.0 * u{power: 2.0} * d^2u/dx0^2{power: 1.0} + 0.010336798407375216 = d^2u/dx0^2{power: 1.0}\\n{'terms_number': {'optimizable': False, 'value': 6}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2, 3], 'probas': [0.4, 0.3, 0.3]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.24369256577585763}}\"" - ] - }, - "execution_count": 130, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sys.text_form" + "epde_search_obj = epde_discovery_as_ode(t_train, x_train, False)" ] }, { "cell_type": "code", - "execution_count": 131, - "id": "ec960001", + "execution_count": null, + "id": "b9c8d97a", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = -1.011u + 2.195\\cdot 10^{-2} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = -1.005u + -1.809\\cdot 10^{-1} \\left(u\\right)^{2.0} \\cdot \\frac{\\partial u}{\\partial x_0} + 1.923\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + 1.118\\cdot 10^{-2} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} \\cdot u = -1.02\\left(u\\right)^{2.0} + 1.046\\cdot 10^{-2} \\end{eqnarray*}$\n", - "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = -1.0u + 1.874\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + 1.711\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} \\cdot u \\cdot \\frac{\\partial ^2u}{\\partial x_0^2} + 1.021\\cdot 10^{-2} \\end{eqnarray*}$\n" + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = 1.991\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + -1.991\\cdot 10^{-1} \\left(u\\right)^{2.0} \\cdot \\frac{\\partial u}{\\partial x_0} + -1.0u + -1.264\\cdot 10^{-5} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} \\cdot u = -1.013\\left(u\\right)^{2.0} + 7.106\\cdot 10^{-3} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = 1.992\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + -1.0u + -1.998\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} \\cdot \\left(u\\right)^{2.0} + -7.472\\cdot 10^{-4} u \\cdot \\frac{\\partial u}{\\partial x_0} \\cdot \\frac{\\partial ^2u}{\\partial x_0^2} + -1.71\\cdot 10^{-5} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = -1.007u + 1.034\\cdot 10^{-2} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} \\cdot u = -1.013\\left(u\\right)^{2.0} + -8.059\\cdot 10^{-3} u + 5.781\\cdot 10^{-3} \\end{eqnarray*}$\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAr0AAAGyCAYAAADpmnyQAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABQqUlEQVR4nO3dT4wj553f/09LOxhYsqare3zJwsJqqlfXIGH3+JQsAk9RBibReNcmuwEvdAksElkE2GCB7VL7EuvUIncPu0CSNdlOLkKcdLPkaLXJABZLDpDsJZ4m18EeBdYI2MS5eMjqtkeLUWNVv0P/qkw2yeafLnaRxfcLGEisqiafevjUw2899fxZCYIgEAAAAJBizyWdAAAAAGDWCHoBAACQegS9AAAASD2CXgAAAKQeQS8AAABSj6AXAAAAqUfQCwAAgNQj6AUAAEDq/VrSCbiKzz//XD/72c/00ksvaWVlJenkAACAMQRBoF/84hf69V//dT33HO1vuB4LHfT+7Gc/08svv5x0MgAAwBT+5m/+Rl/+8peTTgaWxEIHvS+99JKk84vm1q1bCacmGWdnZ/rwww/12muv6caNG0knZ+GRn/EjT+NHnsaL/IzfqDw9PT3Vyy+/HP2OA9dhoYPesEvDrVu3ljrofeGFF3Tr1i0q6xiQn/EjT+NHnsaL/IzfuHlK10RcJzrSAAAAIPUIegEAAJB6BL0AAABIPYJeAAAApB5BLwAAAFKPoBcAAACpR9ALAACA1CPoBQAAQOoR9AIAACD1CHoBAACQegS9AAAASD2CXgAAAKTeryWdgGq1Kt/3ZRiGWq2W9vb2ZBhG0skCAABAiiQa9JbLZRUKhSjI9X1fb775pmq1WpLJAgAAQMok2r2hXq/3tOoahiHf9xNLDwAAANIp0aDXMAxls9ko0PU8T6ZpJpkkAAAApFCi3RsODg60ubmptbU17e7uamNjQ5VKZejxz54907Nnz6LXp6enkqSzszOdnZ3NPL3zKDzvZT3/uJGf8SNP40eexov8jN+oPCWvkYSVIAiCJBNQrVZVr9flOI4sy1KtVhs6kO273/2u3n777b7tP/jBD/TCCy/MOKUAACAOn376qb71rW/p5OREt27dSjo5WBKJBr22bSubzcqyLHmep3w+L9/31Wq1Bh4/qKX35Zdf1s9//vOlvWjOzs5Ur9eVzWZ148aNpJOz8MjP+JGn8SNP40V+xm9Unp6enupLX/oSQS+uVWLdGzzPk+/7sixLkmSaphqNhjY3N+U4jnK5XN/f3Lx5Uzdv3uzbfuPGjaWvqMiDeJGf8SNP40eexov8jN+wPCWfkYTEBrJ5njewG0OxWLz+xAAAACDVEgt6LctSs9nsm6Ks0WgMbOUFAAAAppXo7A21Wk37+/u6fft2NEdvqVRKMkkAAABIoUSDXsMwCHIBAAAwc4kuTgEAAABcB4JeAAAApB5BLwAAAFKPoBcAAACpR9ALAACA1Et0GeKrOj091erq6lIvY3h2dqaHDx/q/v37c7vCzRtvvKGTk5OkkzGWIAj09OlTvfjii1pZWUk6OalAnsaPPI0X+Rm/UXn6+eef6+OPP9arr76q556j/Q3S6uqq3n333Zl+RqJTlmE5nJyc6IMPPkg6GQAAYE49ePBg5p/B7RUAAABSj6AXAAAAqUf3Bsw1x3HUbrfVaDSUz+dlWVbSSQIAXCN+BxAXgl7MBd/35bquMpmMTNOUJDWbTUlSoVCQ7/u6c+eOOp1OkskErtWg6wLLZZnKAL8DmDW6NyBxruvKdV1ZliXP81QsFiVJ7XZb9XpdkmQYhtbX16MKEEi7YdcFlscylQF+B3AdCHqRKN/3Va/XlcvlZBiGLMtSNpuVbduyLEuVSiU6tt1uK5PJJJha4Hpcdl1gOSxTGeB3ANeFoBeJCu/uu1mWpWq12rOtWCzq4ODgOpMGJGbc6wLptUxlgN8BXBcWp1hwi7A4xYMHDyaep3dlZUWdTkeGYchxHElSLpebRfKAhdF9XWA5LVMZ4HdguUwTK0yKgWxIXLVa1fr6uqTzR1fhyNx2u63j4+PocVez2ZRhGNEAB9d15fu+2u22CoWCpPPHZPfu3VOj0UjmZICYjLouKPvpd1kZMAwjVXUgvwO4DgS9SFQ2m1WpVOrpo9U9WCOfz0f/7/u+wgcTnudpfX1dpmkqm81GlZ3rulHFCSyqUdcFZT/9LisDpmmmqg7kdwDXhT69SIxt28pkMn2DEo6Pj2VZlkzTVKfTif5198TxPE+ZTEaO4/T8fb1eVzabvbZzAOI26rqg7KffqDIgpacO5HcA1ypYYCcnJ4Gk4OTkJOmkJOazzz4L3n///eCzzz5LOilDvf766wO3SwoajUbfdsMwglqtNtZ7ZzKZoF6vD30NLJpxrwvKfnpNUjcuejngdwChYbFCnBjItuAWdSCb67rKZrO6WPyazaY2Nzf7tg/i+77W1tZ6jl1ZWRnrb4Hr4vt+NAp9d3f30mPHvS4o+4tlFmUgfN9FLgf8DiyHcVfUu46BbHRvQGIGrS60v7/fMyfjZTzP63mPZrPZM7gBmAeu6+rJkydjHz/OdUHZXyyzKANSOsoBvwPp1r2iXqlU6umfnQSCXiTCsiy12+2ebeGUNOFghFEMw+iZtufw8DDq1+V5XjwJBa4ol8tpY2NjrGPHvS4o+4tlFmVAWvxywO9A+s3binrM3oDENBoN2bat27dvSzq/IGq12th/b5qmtra2VC6XZZqmdnZ2tL+/r2q1OnaFCcybca4Lyn66jVs3pqEc8DuQbpZl9XRnSHpFPfr0LrhF7dO77Hzf19HRkWq1WnQXPEq5XI5aNHzfH9k38DpMcx7ValW+78swDLVaLe3t7UXnlc/ntbOzI9M0+ybfH/QYNJvNjv25055DHPkenvM8fGdxm7QMjPMdj3rPacrdVc6BMoBhZlGXl8tlSVKr1ZKkvq4ei1j+pfNp6LLZ7NAFRlicAkihZrPZs7jAOMJKsHseymKxOHa/t3Fc7Bs3yrTnUSgUeirQN998M2rZaTab0ePNbrlcrq/1x3GcK/fZG3UOo/K9XC4P7Kt5+/bthQ1uJikH05SBUd/xqPec5jOvcg7LWAaW2azL/6jyZNu2SqVSdHwYKIbB6KKWf8dxLg14r83M54eYIaYsW+wpy5ZdrVYLMpnMWMcahhF0Op2ebXFfvoVCYaq/m+Q8LMu6dFupVOrbX6lU+rZ1Op2gUqnElgfDziGufK9UKgPPbR5NUw4mKQPjfsej3nOSzxwHZQBBMPvyf1l56nQ6gWVZPfsbjUYgKWi1WlN/5jhmWf7r9Xo0hVyj0eg7l9B1xAoMZAPmnOd5UXeAi8KWTtd15ThONC2SdN6Kurm5eV3JHIthGMpms/J9X1J/q8rFVgDXdbW1tdX3PkdHR9re3p5pWsfJ93G4rqt6va56vT6whXPZjPsdzwPKwLlFqV/m3Tjl6fj4uGcAXneXn+sWR/n3PE/5fF75fF5ra2va3Nyc6Ili3OjeAMy5YSOQDcOQ7/sLtRTnwcGBNjc3tba2pt3dXW1sbPR00eiuDD3Pk+d5fXM6uq47dJ7HOI3K93FdHMix7Mb5jucFZYClfuM0qjwZhqFOp9OzLwwukwgU4yj/4Yp684KgF0ure233y2xubs7lKOD19XW12+0oaCiXy3O/FKdhGLJtW/V6XeVyWZZlaXt7e2BLQqlUGthn2fd9maaZSMuH9Kt8x9UN+47n3TKVgUWqXxbVZeUpnLN4UB2ZlEUu/wS9SMTKysq1fE5wyeQkcfzYOo6jw8PDkcft7e3FPk1LWOmELUiHh4c9AyCOj4+HTgQ+KOAPByh0i3vggW3bymazqtVq0WOvzc3NaJRyaNg8jpNMQ+Q4jmzb7nvvq4qzsk/6OkiqHEjDv+O4zaIcxP2Dn3Q5uCyPpqlfRr1nKOnzTrL8dxtWnmzb1s7OzpUaXRah/F8ngl4k4rJgdJHkcrmZj0Yd9lgrbPEM/7/ZbPY8Qr34utuggD/u2SAuCvuHhWkyTVONRkObm5tyHKcnHyuVSt9k/s1mc6K+n6ZpXulGY5x8v6qkr4MkykH3Z4+7YMNVXKUcXEcZkJIvB6PyaNL6ZZz3lJI/7+su/5OUJ8dxtLGxceWnjItQ/q8TQS+W1qJ0bwjnMx00lU74ozNqKc556E/oed7AR3TDWlsuDpJpt9tqNptRH7ew5SKclP7izUcmk5lokvuLxsl3TG/QdzwLVykHy1IGRuXRNPXLVa+/NBq3PIV1XPi7E04nNk2gSfnvRdCLhec4jtrtthqNhvL5/NgXY9J9CYc9IvI8T47j9MxxuLe3J9d1o0rQcZy5WZZ23POwLEulUqlvNHCj0ej7LoZVshdbmqrVaixzoQ47h1H5jnOTlOXufZf9iI96hBr3I1bKwHCLsNTvtL8DcYizLm82m2o2m8rlclHeDipzlP8pzXxStBlint70zNPb6XSCWq02dP6+YRqNRlCr1aL3MAxjqjRep1arFZRKpSCTyQSSgt3d3egcguB8Pk/TNPv+rlQqBbVaLajVasHu7m7f/kKhEB3TaDSCXC43cP7TYSadn3Ka8+h0OsHu7m5QKpWieUsvzgEZBEFgmmbQaDSGfnatVgtyuVz0ueEckJMadQ5BMDrf4zTtdRCnScrBtGU5CIZ/x6Pec5zvbBLzVgaCYD7KwUVXrV/GMQ+/A9dR/oeVpzDtkvr+jfuZk5qn8n8d8/QS9C64NAS99Xo9qNVqQafTCer1+kSVzsXjRwVLGG7axSkQj6tcB3GiHCRrXsrBdZuX34Flye95xOIUSD3f91Wv15XL5WQYhizLUjablW3bY/29ZVk9j8bb7XbssyQsi3HzHPG76nUQJ8pBcuapHFynefodSHteLzuCXiTKdd2+lV0sy+pZ+WdcxWJRBwcHcSVt6SzqaNw0iPM6uCrKQXLmqRxcp3n6HaD8p9tKECzu3FGnp6daXV3VycmJbt26lXRyEnF2dqaHDx/q/v37unHjRtLJGejBgwf64IMPJvqblZUVdTqdsSfkDpf2nPX0YcB1mvQ6QDotazngd2C5TBMrTIrZG5C4arUaLWfZbrejUbftdluGYUQVWb1el23bcl1XrVZLxWJRpmnKdd3okViz2ZRhGNytY+GMug4kjbwWsPiWtRzwO4BrMfNewzPEQLbFH8hmWVbfgINCoRCNVq1UKtHo/kKhEORyuejvwlG+hmFE/xa8SGNJjboOgmD0tYDFt6zlgN8BBMH1DGSjpReJsW1bmUymb8DB8fFxdJe/vr4etW54nhctZFCv16PjO53O9SQYmIFxrgNpvGshbA0LJ7Jf1Ankl9GylgN+B3CtZh5WzxAtvYvd0itp4LQyhmEMbLEwDGPgnK7AIpv0Ogj3XbwWWq1Wz3RLlmXFmk7M1rKWA34HEKKlF6kVjtS9eHffbDbl+37fQIRwZZpBAxoWqVUD6DbpdSANvxbCPo0hwzDmZglqXG5ZywG/A7huTFmGxAwaZLC/vz9weeCLlXZYWXqeF83vWCgUVCqVZpdgYAYmuQ6k4ddCq9XS7du3o+3r6+vyfT/exGJmlrUc8DuA60TQi0RYltW31nd4p969xnc2m5V03ncrHNnr+35UiQ9r1QAWwTjXQbht1LUwyMX3xnxa1nLA7wCuG90bkJhGoyHbtqNWCcMwVKvVov2maSqbzcpxHB0cHMi27b55GBetVQO4aNR1II13LWxsbPSU/fAxLxbDspYDfgdwnQh6kRjTNC99DHVxRO+wx3wXzWurBjDIqOtAGu9asCyrZwlVz/Po17hAlrUc8DuA65Ro0JvP57WzsyPTNPs6ps/znelE/u7vpP/5P6X/9/+kv/f3pH/8j6Xnn086VamxaK0awKyYpqmdnR05jqN2u629vb2kk4QELGM54HcA40o06G02m9Fjim65XK7vsc5C+uEPpd//fen//J9fbfvyl6U//VPpG99ILl0psmitGsAssfwqpOUrB/wOYFyJDmQrFosKgqDnX6VSSU/Am8v1BryS9H//7/n2H/4wmXSlTHerRrVaXYpWDQDAr/A7gHEl2tJ78W7UdV1tbW0llJoY/d3fnbfwBkH/viCQVlakf/WvpK9/na4OMVi2Vg0AQC9+BzCORIPe7j43nueNfCTx7NkzPXv2LHp9enoqSTo7O9PZ2dnsEjqpv/xL6ckT6QtfGH7Mz38u/Y//If2jf3SljwrPe67O/4JgUPAPAABwjeZm9oZSqTRyVOb+/r7efvvtvu0ffvihXnjhhVklbTr/6T+NPub0VHr4MJaP616DfN48ffo06SQAAIAlNxdBb7PZHOu4vb09/cEf/EH0+vT0VC+//LJee+013bp1a1bJm9xf/qX0T//p6OP+23+LpaW3Xq8rm83qxo0bV3qvWfn+97+fdBIAAMCSm4ugt1KpaGNjY+RxN2/e1M2bN/u237hxY74Cvt/6Len27fNBa4Me7a+snM/i8Fu/FVuf3rnLgy4rKytJJwEAACy5uQh6XdfV5uZm0smIz/PPn09LlsudB7jdgW8YAP7JnyzNILbV1VU9ePAg6WSMJQgCPX36VC+++CLBekzI0/iRp/EiP+M3Kk8///xzffzxx3r11Vf13HOJTiSFObG6ujrzz5iLoNfzvPRNJP2Nb0iOM3ie3j/5k6Wap/fdd99NOgljOzs708OHD3X//v25bTlfNORp/MjTeJGf8RuVp6enp1pdXdWjR4/mq3siUm0ugl7TNLW+vp50MuL3jW+cT0vGimwAAACJmougt9VqJZ2E2Xn+eemf/JOkUwEAALDU6EgDAACA1CPoBQAAQOoR9AIAACD1CHoBAACQegS9AAAASD2CXgAAAKQeQS8AAABSj6AXAAAAqUfQCwAAgNQj6AUAAEDqEfQCAAAg9Qh6AQAAkHoEvQAAAEg9gl4AAACkHkEvAAAAUo+gFwAAAKlH0AsAAIDUI+gFAABA6hH0AgAAIPUIegEAAJB6BL0AAABIPYJeAAAApB5BLwAAAFKPoBcAAACpR9ALAACA1CPoBQAAQOoR9AIAACD1CHoBAACQegS9AAAASD2CXgAAAKQeQS8AAABSj6AXAAAAqUfQCwAAgNQj6AUAAEDqEfQCAAAg9Qh6AQAAkHoEvQAAAEg9gl4AAACkHkEvAAAAUo+gFwAAAKlH0AsAAIDUI+gFAABA6hH0AgAAIPUIegEAAJB6BL0AAABIPYJeAAAApB5BLwAAAFLv15JOgCTZtq2NjQ1J0vr6unK5XMIpAgAAQJokGvT6vq979+7po48+kmEYajab2tzcVBAESSYLAAAAKZNo9wbbtrWzsyPDMCRJmUxG9Xo9ySQBAAAghRJt6a1Wq2q1WvI8T57nybIsWZY19Phnz57p2bNn0evT01NJ0tnZmc7Ozmae3nkUnveynn/cyM/4kafxI0/jRX7Gb1SektdIwkqQUF8Cz/O0sbGhWq0m0zRlGIZKpZLy+fzQwPe73/2u3n777b7tP/jBD/TCCy/MOskAACAGn376qb71rW/p5OREt27dSjo5WBKJBb2u6yqbzaper0dBru/7unPnjjqdzsC/GdTS+/LLL+vnP//50l40Z2dnqtfrymazunHjRtLJWXjkZ/zI0/iRp/EiP+M3Kk9PT0/1pS99iaAX1yrx2Ru2trai/zcMQ77vy3Xdga29N2/e1M2bN/u237hxY+krKvIgXuRn/MjT+JGn8SI/4zcsT8lnJCGxgWymaQ7cbhiGPM+75tQAAAAgzRINek3T7Atwfd/vaf0FAAAArirRKctKpZIODw+j147jyLIsZTKZBFMFAACAtEm0T28ul1O73Va5XJYkPXnyhHl6AQAAELvEB7IVCoWkkwAAAICUS7R7AwAAAHAdJg56T09P9eMf/zhaDe2iH//4x1dOFAAAABCniYLenZ0dra2tybIsra2t6Tvf+U7P/pOTE2Wz2VgTCAAAAFzV2EHvW2+9pUajoQ8//FCdTkc/+tGPdHx83Bf4JrTAGwAAADDU2MsQv/rqq6pUKvrqV7/as317e1sbGxva39/XycmJ1tfX9Xd/93czSexFp6enWl1dXeplDM/OzvTw4UPdv39/rle4eeONN3RycpJ0MkYKgkBPnz7Viy++qJWVlaSTkwrkafzI03iRn/Eblaeff/65Pv74Y7366qt67jmGF2E2VldX9e6770avx5694cmTJwMXjTg6OtL29rb+/b//98rlcvGkEqlzcnKiDz74IOlkAACAJfHgwYOe12PfXlmWpaOjo4H7jo6O9KMf/UgHBwdXSx0AAAAwA2MHvQcHB/rwww/1ta99TZ988knf/qOjI/2v//W/4kwbAAAAEIuxuzesrq7q6OhIjx8/1iuvvDLwmFqtpsePH8eVNiDiOI7a7bYajYby+bwsy0o6SQAAYIFMvCLbnTt3rrQfGMb3fbmuq0wmI9M0o+3NZlPS+ep9vu/rzp076nQ6SSUTwIIZVrdguVAOwJBJzAXXdeW6rizLkud5KhaL0b52u616vS5JMgxD6+vrUSAMAJe5rG7B8qAcQCLoxRzwfV/1el25XE6GYciyLGWzWdm2Lel8EGWlUomOb7fbymQySSUXwIIYVbdgOVAOECLoReLCO/BulmWpWq32HVssFpklBMBYJqlbkF6UA4QIepG4XC6nRqPRs80wDPm+L9/3o22O4yibzTIfNICxjFu3IN0oBwhNPJANmIVqtar19XVJ590XwtkZ2u22DMOQ67rRY6lmsynDMGSaplzXle/7arfbKhQKks4fZd27d6+vkgOwfMapW6hD0o9yAImgF3Mgm82qVCr19NMNBxmYpinP85TP56N9vu8rCAJ5nqf19XWZpqlsNhtVVK7rRpUbgOU1Tt1CHZJ+lAOEpu7e8NOf/lRvvfWWvva1r0Xb/viP/1g//elP40gXloRt28pkMn0D046Pj6M7cdM01el0on9BEEiSPM9TJpOR4zg9f1+v15XNZq/vJADMnXHqFuqQ9KMcoNtUQe/BwYHu3bunjY0NHR8fR9vv3LnDaEhMpFwua2dnp2/7OFPKhBXW4eFhz7HHx8fM7gAkzHEcVatVFYvFvkFE3ceExw07Zlrj1C3UIelHOUCPYAq/+Zu/GTx+/DgIgiBYX1/v2Xfx9SydnJwEkoKTk5Nr+8x589lnnwXvv/9+8NlnnyWdlEu9/vrrfdvq9XowqAg2Go2B2wfpdDp9x05ZrIFU6XQ6QalUCkql0rV/dqPRCGq1WpQOwzD6jmm1WkGhUIheW5YV2+dPUrdQhyyWSco15SC9arVaUKlUgkKhENTr9aHHXYw9pmrpffLkiW7fvt23/fHjx9GjZ2Acg1bF2d/f75mX9zKe5/Wt3ha+jrvlCFgkruvqyZMniXz2OAvKhINTQ+FgoriMW7dQhyyWScs15SB9uldpLZVKPWN+Rpkq6M3n88rn8zo9PY22nZ6eqlgsRh3AgVEsy1K73e7Z5jiOJI1djgzD6PnhPDw8jB5HeZ4XT0KBBZTL5bSxsTHx300yhdOwY8dZUKbVavU0nqyvr8c2fdQkdQt1yGKZpFxTDtLpKqu0TjV7Q6VSUT6fjwrI3bt31Ww2VSgU9M4770zzllhSjUZDtm1HP36GYahWq43996ZpamtrS+VyWaZpamdnR/v7+6pWq9yAAROybVulUmns46vVqnK53MDWtNAkC8pcDFCuYty6hTok3SgH6WNZVtQPW5psldappyyr1Wp6/PhxFF1nMhnduXNn2rfDkjJNc6If2UEuPqaaJGhOku/7Ojo6Uq1Wi+5aRymXy9HNpu/72t3dHXu/67qqVCrKZrMyTVP1el13795dyMU+psm7arUq3/dlGIZarZb29vZ6WnbGydvQkydPrlxuR53DqPTELRx0Nond3V0Vi8Wh3ZEuW1BmY2Ojp2W33W5fGjxPapK6ZV7qkGnrhNCgchnub7VaknrPNfy8cL/neTo4OOi5LuI+h+su18tSDkb9zaj9o+rHuM8hrnIw6SqtUwW9zz33nLa3t7Wzs6NvfvOb07wFsNSazaaOj4+jydDHEf54dc8h2R1wjNrv+75c15XjODJNU7ZtzyTgvdg3Lm7T5l2hUOipZN98883oR21U3uXz+Z75O6vV6sStopOcwzjf9aB+jbdv357qx8PzPDUajalatPL5vMrlct/nDltQJvxhtSyrZ7Yfz/N6Wm/SYJJrYZpyPapcXiyjxWJR2Ww2CkJs25Zt21Eai8Wi8vn82IHWpOdw3eV6HkxaH05TDkb9zTjfy2X146SuqxxMtUrrNKPmGo1GUCwWg7W1teC5554Ltre3g48++miat7oSZm9Y7NkbcD4CNZPJjHWsYRhBp9Pp2dZ9CY/aX6vV+vbPQveI/FmaJO8GzQzQve2yvGu1WoGknv3hSO+r5uewcxj1XY6rUqmMNcp9d3c3aLVaE79/6OI5tFqtwDCM6F932k3TjM6tVqtFo7DD2R7SZJprYdxyPapcdjqdwLKsnv3hrAXhd21ZVk/5KJVKA2faiOscrrtcz4Np68NJ6rdx/2bY/lH147RmWQ7q9Xo0a0Oj0Rhaf8Uye0Mmk9H3vvc9tdttPXr0SK+88ooKhYKef/55/d7v/d40bwngEp7nRS1kF7muO3L/OMJW4Gq1Gm3zfV+bm5vTJntuGIahbDYbPU7vbn0ZJ2/D9+h+P0k985THJY7vMjy2Xq+rXq9Hg3cuO/Zia9Qk5cE0zZ6BJMMWlJHOH6OH55bL5ZTL5VQoFBaym02SximXx8fHPYOwwu84vA7q9XpPy9mjR49m1tqeRLnGeC6rH+MWRzkIV2nN5/NaW1vT5ubm2Om98jLE4Uonr732mkqlkiqViv7dv/t3V31bAF2GjR42DEO+74/cHzo6OtL6+rra7bZarVb06DPty3AeHBxoc3NTa2tr2t3d1cbGRvQobVTehQMkBlXUsxjVPe53OcrFwR6Xfd7F73jS8pDNZuW6LhP5X6PuAHZQubQsS51Op2d7GFQMChAcx5Hv+zPrx3rd5Rrju6x+jFsc5SC8qZ7GlYLeH/7whzo8PJTjODIMI5ozDVgU4w7c2dzcnMsRvGEAO2zAQbhfUhSQhD941WpV+XxetVot+pEsl8upXIbTMAzZtq16va5yuSzLsrS9vX3pQI0w70zTlGVZcl03ao1MYt7O7u8yTr7v9wVBk5aH9fX1aKAUrsc05TKcn7a73IcDjnzf75mV6brMqlxjfNPUj3G7rnIwVdC7vb2t9957T6urq9re3tbx8bH+4T/8h3GnDSm3srJyLZ8TXLJgShx3s47j6PDwcORxe3t7sbeEjaokuvdfDGy2t7dVLBbl+37PMpzdN67Hx8eXTvw96KYhHJTQbdhgg+vKO9u2lc1mowA/n89rc3Pz0kCtO+/q9bps21a73Y5aQKXBLWbS+XnZth1rIDirHwTP8/p+3CYtD6ZpjvU9Tmuc/Ey6PrnqtTCNScqlbdva2dkZOD9t90C4tbU1PX78eGDAswjlOslykEQZiMMk9eMsyoA0u/rtoqmC3vX1dX344Ye6d+9e3OnBErksGF0kYb/EWRoWXIWtdKP2S+eVVXc6wx81z/OUyWTk+76azWbPo8OLry8adNNw2RRWF11H3oV9yMLzME1TjUZDm5ubchxnaDB9sQW0O/gLH8NtbW0N/FvTNKcO0sf5Lq/DJOUhDLpmZZz8TLo+ueq1MK1xyqXjONrY2OgJeH3f1/7+fs/UVJZlRbO8DLouF6FcJ1kOkioDVzGqfrxYDq5SBsK/H+S66repgt7vfe97cacDSMSidG8wTVOGYQwcYBBWVpftDx9dtlqtvsEs3QO6LluGc1H70Q1qyZR+9d2Pk7fNZrOnog+DgmGP/zKZzNR9I8dJT5yGtdJOUh58359q9bdxXSU/02ycchl2eQjrr3AaKd/3VS6XVSwWe6aqkpSKco3xjKofL7rqtZh0ORgr6P0X/+JfKJ/P66tf/aqk80eNl9nf3796yoAJOY6jdrutRqOhfD4/1gWU9B34sEc6nufJcZyekdV7e3tyXTf68XIcpycQv2y/YRja3d3tqWTC1bTCCs9YsGU4x807y7JUKpX6Bvw0Go3o+x+Vt/l8XpVKJSpTlUollrIz7BxGpSdOpmkO/H4nKQ/DfjgxuUnqhFHlstlsqtlsKpfLRd9ZWJZM0+yrE8Lv+KrBxzyU60U3STkY9TeX7R+nfpzWPJaDlWCMZwFbW1v6zne+o2984xuSpNdee234G66s6Ec/+lF8KbzE6empVldXdXJyolu3bl3LZ86bs7MzPXz4UPfv39eNGzeSTs5QDx480AcffHDpMeFjtUwmM/FjjmazKc/zlMvl5Pu+7ty5M/XozusQVlyHh4dqNpva3d3tWR2tWq2qVCr19ZsKl8iUzqcXGrT60rD9vu/3TD81aPWmYrGojY2NqMvE/v5+z+j9ccz6cd40eRc+yr19+3Y0Srh7Mnbp8rxzXTdaYKHVaqlYLF7pUdyocxiVnrh1L1jQbdzykM/nr7ySV9yuUp/EZZJrYZpyfVm5DOvBQSPiw5/9i3VCOKPLtN/jvJVrKflyMGl9OE05GPU3o/aPUz9OYp7KQV/sMdFswHOGxSnSszhFvV6PFk+o1+sTT+h98W9M0wwajcZUacXVXNfiFIhPqVSKJnqfRi6XizE1V3fV+iQuXAvJmodyQBlIViyLU5yeng7c/sknn+iTTz6Z5i2xxHzfV71ejx61W5albDbbs0TpKJZl9dxNt9tt5gxNyCTfG+bD7u7u1K3zYb/QeRFHfRIXroXkzEs5oAzMl6mC3rW1tYHbw8crwCRc1+2bX9KyrJ7HbpMoFos6ODiII2mYQlKPknE1Ozs7E69w5fu+njx5MlcDkeKuT66CayE581IOKAPzZaqgNxjSDXhra2smy3Ii3XK5nBqNRs+2sF/RJCv1SOcd4udtDkRgEXT3/xtX2L9wnsRZn2BxUQ4wyERTlv3mb/6mVlZWtLKyoldffbVvfzjfJzCparUazfPZbrejlqPu1cbCVqhwQnbXdXsGb7iuGz3GCgd3cJcNjG/Sm8VBI8jnQRz1CRbfqHJAGVg+EwW9lUpFQRDotdde0zvvvNO33zRNVmbDxLLZrEqlUs8NU/ccqtJ55RUuixhWULVaTdlsVs1mU5J6VoryfT/xyeoBXL846hMCnsU3qhxQBpbTREFvuAJbLpfTN7/5zZkkCMvFtm1lMpm+JwTHx8c9/QTX19d7VhALK6/uaZbmeYoyALMXZ30StgK2222ZpjlX/ZZxuXHKAWVgSU0zBYTrusF7773Xt/2tt94K/uqv/mqat5wKU5Yt/pRlkgZOLWYYRlCr1Qb+jWEYQafTiTN5AFIgrvqk1Wr1TDVlWVas6cRsTVoOKAPpFcuUZW+99dbASYu3traYngNjC0fWXrwbbzab8n1/YP/CcJDNoPLnOI4cx1G1Wu0btQsg3eKsT8LxASHDMKhTFsSk5YAysFymCnobjYa2trb6tluWRaHARAb1m9rf3x86Z6jruj2PmMLy5nleNCdjoVCYuxHlAGYvrvqk1Wrp9u3b0fb19XVG/C+QScoBZWC5TBX0mqapx48f921vt9u6c+fOlROF5WBZVt/a3GEfqu5lTsNpyKTz/lbhaNzuqWe4KweWW5z1ySAX3xvzaZxyQBlYXhMNZAsVCgV9+9vfluM4+o3f+A1J56uxbW9v94ygB0ZpNBqybTu6ozYMQ7VarecY0zSVzWblOI4ODg5k23ZUiYWPqrgrBxBXfbKxsdFTf4QDmbAYRpUDysDymiro3d3dVavV0p07d6LV2Xzf15tvvqn9/f1YE4h0M01zZFeEi6Nwx10ulbtyYLnEVZ9YltUzPsXzPEbuL5BR5YAysLymCnql80JSLpd7Oo1P2rXBdV1VKhVls1mZpql6va67d++mbzWtX/5SeuMNqdWSNjakd9+VvvjFpFOVKtyVA4iLaZrRssztdlt7e3tJJwnXjDKQTlMHvT/96U91eHioZrOpH/3oR5KkP/7jP5ZlWfoH/+AfjPUevu/LdV05jiPTNGXbdvoC3q98RXr06Fev//qvpZdeku7elX7yk+TSlTLclQOIU+p+izAxykD6TDWQ7eDgQPfu3ZNpmjo+Po6237lzZ+Ipyx4/fqwgCNRqtXoGG6TCxYC326NH5/sRi+678mq1yl05AADoMVVLb7lcVqPR0CuvvNIT5H7zm99MX+A6rV/+cnjAG3r06Pw4ujrEgrtyAAAwzFRB75MnT6JRkSsrK9H2sNV2EkdHR1pfX1e73Var1bq08/mzZ8/07Nmz6PXp6akk6ezsTGdnZxN97sz9838ufeEL4x33H//j1B8Tnvfcnf8Fk5YLAACAOE0V9ObzeeXzeR0dHUXbTk9PVSwWJ2rpDUdPhgOOqtWq8vl83xQzof39fb399tt92z/88EO98MILk5zC7P3u757/G8fDh1f+uO71wufR06dPk04CAABYYivBlE1w+Xxe7733niRpc3NTzWZThUJBf/ZnfzZ1Ynzf19ramjqdzsBlZge19L788sv6+c9/rlu3bk39uTPxu78r/df/Ovq4f/bPrtzSW6/Xlc1mdePGjanfZ9a+8Y1v6C/+4i+STgYAAFgSDx480AcffBC9nnr2hlqtJs/z9Fd/9VeSppuyzHGcnn6YYaDreV7futmSdPPmTd28ebNv+40bN+Yv4PsP/+F8loZxjosh7XOZB126u8EAAABct6mDXum8W8K0c6H6vq98Pq9WqxW9RzjPairmV/3iF8+nJbtsMNvdu0sziG11dVUPHjxIOhkjBUGgp0+f6sUXXyRQjwl5Gj/yNF7kZ/xG5ennn3+ujz/+WK+++qqee26qiaSAkVZXV3tejxX0Pv/886pUKvr2t78tSXruuefGqhgymYwODg4GzttrGIZ2d3d7AtxqtapcLjewa8NC+slPhk9btmTz9L777rtJJ2EsZ2dnevjwoe7fvz/XLeeLhDyNH3kaL/IzfqPy9PT0VKurq3r06NH8dU9Eao0V9L7zzjva2tqKXo87aOro6Ej5fF4ff/zxwP17e3sql8vR6ydPngwdxLawfvITVmQDAABI2FhB7x/+4R/2vL53795Yb761taW1tbWh+8PW3tT74hel//Jfkk4FAADA0pq6T+8nn3yiSqUiz/MkSV/5ylf05ptv9jymOD4+ZsEAAAAAJG6q3uPvvfeeTNNUrVbT2tqa1tbW9Gd/9mdaW1vT//7f/zs67t69ez1z+QIAAABJmKql17Zt7e7u6p133unZXiwW9e1vf1uPRi2/CwAAAFyjqYLedrut73znO33bS6WS1tfXr5woAAAAIE5TdW/Y3t7W48eP+7Z/8skn9OEFAADA3BmrpXdvb69v21e/+lUVCoWebdVqVdvb2/GkDAAAAIjJWEFvo9Ho27a5udm3fXNzM55UAQAAADEaK+j98MMPZ50OAAAAYGZY8BoAAACpN1HQe3p6qr29Pd29e1fPP/+8nn/+eb366qv6vd/7PZ2ens4qjQAAAMCVjB30/vjHP9Yrr7yiWq2me/fu6Xvf+57eeecd3bt3T//5P/9nra2t6b//9/8+y7QCAAAAUxmrT+/jx4+Vy+VUKpX05ptv9u3/3ve+J9u2ZVmWPM/Tb/zGb8SeUAAAAGBaY7X0vvXWWyoUCgMD3lCpVNK3v/1t7e7uxpY4AAAAIA5jtfS6rjtw2rKLbNvW3bt3r5woAAAAIE5jtfQGQTDWm62srFwpMQAAAMAsjBX0Wpal9957b+Rx1WpV9+7du3KiAAAAgDiN1b3hnXfe0dbWlkzT1O/8zu8MPOaP/uiPVC6X1Wq1Yk0gAAAAcFVjBb2maero6EivvfaaNjc3ZVmW7t69q3a7rVarJcdx5Hmejo6O9Morr8w4yQAAAMBkxgp6pfMuDu12W7Ztq1arqVQqSToPiC3L0vHxsVZXV2eWUAAAAGBaYwe9kmQYhiqVyqzSAgAAAMzERMsQAwAAAIuIoBcAAACpR9ALAACA1CPoBQAAQOoR9AIAACD1CHoBAACQegS9AAAASD2CXgAAAKQeQS8AAABSj6AXAAAAqUfQCwAAgNQj6AUAAEDqEfQCAAAg9Qh6AQAAkHq/lnQCAAz3xhtv6OTkJOlkXEkQBHr69Km+//3va2VlJenkpAJ5Gq9FzM/V1VW9++67SScDWCgEvcAcOzk50QcffJB0MgDMmQcPHiSdBGDh0L0BAAAAqUfQCwAAgNSjewOQYo7jqN1uq9FoKJ/Py7KspJMEAEAiCHqBBef7vlzXVSaTkWma0fZmsylJKhQK8n1fd+7cUafTSSqZAAAkiu4NwAJzXVeu68qyLHmep2KxGO1rt9uq1+uSJMMwtL6+HgXCAAAsG4JeYEH5vq96va5cLifDMGRZlrLZrGzbliRZlqVKpRId3263lclkkkouAACJIugFFlTYytvNsixVq9W+Y4vFog4ODq4raQAAzB2CXmBB5XI5NRqNnm2GYcj3ffm+H21zHEfZbFa5XO6aUwgAwPxgIBuwwKrVqtbX1yWdd18IZ2dot9syDEOu60ZdH5rNpgzDkGmacl1Xvu+r3W6rUChIOu8uce/evb5AGgCANCDoBRZUNptVqVTq6acbDmQzTVOe5ymfz0f7fN9XEATyPE/r6+syTVPZbDYKel3XjQJoAADShu4NwAKybVuZTKZvYNrx8XHU2muapjqdTvQvCAJJkud5ymQychyn5+/r9bqy2ez1nQQAANdorlp6s9lsNMUSgOHK5fLAbgie52lvb+/Svw2D4sPDQ5VKpWj78fFxT8swFts4C5M4jiPpvDuMaZosXgIg1eYm6HUcp28kOoB+4XVysZW32WzK9/2xBqz5vq9ms9kT5Fx8jcU1zsIknuepXq9H09pls1m+/znAKorA7MxF94ZwQA2A8XSvvBba39/vmZf3Mp7n9a3eFr7m5nPxjbMwSTjIMRQOfERyum9WSqUST16AmM1F0Ht0dKTt7e2kkwEsBMuy+m4Sw8fU4aC0UQzD6Al4Dg8Po5Zjz/PiSSim0j3d3LTHjrMwSavV0u3bt6PX6+vrE3024scqisBsJd69IVxCdRzPnj3Ts2fPotenp6eSpLOzM52dnc0kffMuPO9lPf+4zVt+hoPPLmo0GrJtOwpaDMNQrVYb+31N09TW1pbK5bJM09TOzo729/dVrVbHDpwRP9u2e/pZj1KtVpXL5Qa2/IcmWZiEJ27Jsiyr5/fwslUUgyCYm3pqkFF16TynHemVeNDr+75M0xyrhWF/f19vv/123/YPP/xQL7zwwgxStzgYABivecnPp0+fDtxumuZEwdEgF7tCTBI0p43v+zo6OlKtVhv7uy+Xy1Frue/72t3d7dsvnbeoSv35fVG1Wo2mnBvX7u6uisXi0Pe+bGGSjY2Nnno3HMw2S7PI527DBkPbtq2NjQ1J5y3aV1moZdQ5TJLey4y6WXn69KkePnw41Xtfp2Hf86effnrNKQESDnonbVXa29vTH/zBH0SvT09P9fLLL+u1117TrVu3ZpHEuXd2dhZNNXXjxo2kk7Pw5i0/v//97yedhNRrNps6Pj6eaGxBGNB2z3HcHXxebLEtFouXzk7jeZ4ajcZUrez5fF7lcrkvuBq2MInv+9F227Z70jDJoKmL/cJHmUU+dxs0GDpccOWjjz6SYRhqNpva3Nwc+gTlqucwKr3lcllPnjzp+7vbt2/3fH/jrKL44osv6v79+1Odx3UYVZeGT2qBaxUkpNFoBI1GI3rd6XSCSZNzcnISSApOTk7iTt7C+Oyzz4L3338/+Oyzz5JOSirMW36+/vrrSSdhadRqtSCTyYx1rGEYQafT6dkW1l+dTiewLKtnf6PRCCQFrVZr4Pvt7u4O3TeOi+lutVqBYRjRv+661TTNKG21Wi2o1WpBpVIJarXaRJ9ZKBSmSmtc+dyt0+kElUqlb1+hUAhKpVLPtnq9PlmCBxh2DuOm9zL1ej1KY6PRGFou5r1uGFWX8vuNJCTW0ttut9VsNqM78/ARYNjH8CqPnwAkJ+1LHHueF7WWXuS6rra2tnR8fBwtAiL9araNYd24XNft664yST6apqlms9nzeRenKAuFda2kua5nR+Vzd6t0OBj6YveQarWqVqslz/OiluxZTQE2SXove49BqygCiEdiQe/FyqfZbKparU7d/wlA8pZhieNhs1sYhhEFPRcDzvDmflB3gDDPBm0bNx+z2axc1x066GkRjcrn0LCAMvz7cDo+0zRVLBZnNvftuOm9zGU3KwCuLvGBbNJ5/6XDw0NJ533hmCQdWExha1q5XJ54ieNxB3Ftbm7O5QwT6+vrQ/uqhnMoD2oFDAfzdps0H9fX13tacNPsYj4PGwwdBqGGYUR5WCqVBi7UcZ3pBZCcuQh6c7ncXD9mA+bJysrKtXzOsMeqjuPItu2BQdZVljged2GNy3TfQF9mb28v9lbRYYGNbdva2dkZGqh7ntcXDE+aj6ZpjnXe0xp0QxIO0uo2avBVHLrzeZzB0FtbW9H/h62ug1qHLyvXcaUXQLLmIugFML6k+/iZpnlpwJjkEsfXcQM9bMaCQS22juNoY2NjqpbpSfKx3W7PtPvIoBuSy6ZKi8OofG42mz0B7bh/bxjGwK4Io8r1KJOUCwDJIOgFMJFMJnPpnL6jljgeFvwuSvcG0zSjwOliMNN9bmE/3u5BaIPmwh3WSjtJPvq+H81Dmxaj8tl13ZGDoU3T7BlQKJ3n1aBgeVS5vmp6ASSPoBdYco7jqN1uq9FoxDLIZ9oljmfZajiOYY+hPc+T4zg9g2z39vbkum4U0DqO0xOIN5tNNZtN5XK56JwvHhMKA7OLJsnHQV0k5lVc+TzOYOhSqdSTb47jyLKsK3dtGXYOo8oFgGQ9l3QCAFyN7/tyHOfSgHKYZrMp6bw1slQqjex3O47uJY4dx9HOzo6kyRejuS6e56lcLqtSqajZbMq2bTmOE+13XbcvIN/d3Y3y3XEcPXr0KDomnFYsXAUs/Gfb9sDA1DCMgV0TJsnHR48eaXt7+6pZMVNx53M3x3G0v78v6bwPddj6m8vltLGxoXK5rHK5rEePHl1ptcVR5zBuegEkYyVIuoPgFZyenmp1dVUnJydLvSLbw4cPdf/+/blYQWzRzVt+PnjwQB988MHQ/eFcrpZl6fj4WLVabaIfWdd1e/5mY2NDtVotVVNfLYJwloZpW9nz+fy1LyM96z69uNyouiFpo+pSfr+RBFp6gQXl+77q9bpyuVy0rGw2m+1ZWnYUy7J6Apd2u03Am4Dd3d2pA8hyuTx2f+g4TVLOAGAeEPQCC8p13egxbsiyLFWr1aner1gs6uDgII6kYQo7Ozs9j8rH4fu+njx5kshAKWYkALBoCHqBBZXL5fqWow3nIR13BaiQ4zjXMscqhgvzfpK+2dVqtW/5YgDAYMzeACywarUaDYJqt9tRi1+73Y4GTYWth/V6PRrk02q1VCwWZZqmXNeNukc0m00ZhkErXkImvelg2XYAGB9BL7CgstmsSqVSTx/csG9nGLRWq1Vtb2/LMIwo6K3Vaspms9HMDd0zNvi+n/jiFwAAzAJBL7CAbNtWJpPpG3R2fHzc079zfX09avH1PC8Kirunbep0OrNPMAAACSPoBRZQuVzu688rnQe2e3t70evux+XhlGYAACwjgl5gwYQzNlxs5W02m/J9f2C/0HBw1KDFEcI+v+ESuSyZCgBII2ZvABbQoIFm+/v7Q+d6dV23J5gNA2fP86K5fsNV2QAASCOCXmDBWJaldrvdsy1sre1enjachkw678MbzvLQPaVZOHNDyDCMvrl/AQBIA7o3AAuo0WjItm3dvn1b0nmwerG/rmmaymazchxHBwcHsm07Co7DLhCtVit6D+l84Nukc/wCALAICHqBBWSa5siuCBdndxh3mduLrcgAAKQB3Rswv/72b6V/+S+lr33t/L9/+7dJpyh1NjY2el6Hg9kAAEgbgl7Mp9/+bemFF6R/+2+lDz88/+8LL5xvR2wsy9KjR4+i157nMXsDACCV6N6A+fPbvy39+Z8P3vfnf36+//33rzFB6WWapnZ2duQ4jtrtds8cvwAApAlBL+bL3/7t8IA39Od/fn7cF75wPWlKuUHz+gIAkDZ0b8B8+cM/jPc4AAAAEfRi3nz8cbzHAQAAiKAX8+bVV+M9DgAAQAS9mDd/9EfxHgcAACAGsmHefOEL0te/fvlgtq9/fWkGsa2ururBgwdJJ+NKgiDQ06dP9eKLL2plZSXp5KQCeRqvRczP1dXVpJMALByCXsyf998fPm3Z17++VNOVvfvuu0kn4crOzs708OFD3b9/Xzdu3Eg6OalAnsaL/ASWA0Ev5tP7759PS/aHf3g+aO3VV8+7NCxJCy8AAIgXQS/m1xe+IP2bf5N0KgAAQAowkA0AAACpR9ALAACA1CPoBQAAQOoR9AIAACD1CHoBAACQeszeAGBsb7zxhk5OTib6m3Di/+9///sLM/H/vCNP4zWP+bm6upqKebqBeULQC2BsJycn+uCDD5JOBpB6i74SIzCP6N4AAACA1CPoBQAAQOrRvQHAtXEcR+12W41GQ/l8XpZlJZ0kAMCSIOgFECvf9+W6rjKZjEzTjLY3m01JUqFQkO/7unPnjjqdTlLJBAAsGbo3AIiN67pyXVeWZcnzPBWLxWhfu91WvV6XJBmGofX19SgQBgBg1gh6AcTC933V63XlcjkZhiHLspTNZmXbtiTJsixVKpXo+Ha7rUwmk1RyAQBLhqAXQCzCVt5ulmWpWq32HVssFnVwcHBdSQMAgKAXQDxyuZwajUbPNsMw5Pu+fN+PtjmOo2w2q1wud80pBAAsMwayAYhNtVrV+vq6pPPuC+HsDO12W4ZhyHXdqOtDs9mUYRgyTVOu68r3fbXbbRUKBUnn3SXu3bvXF0gDADANgl4AschmsyqVSj39dMOBbKZpyvM85fP5aJ/v+wqCQJ7naX19XaZpKpvNRkGv67pRAA0AwFXRvQHAldm2rUwm0zcw7fj4OGrtNU1TnU4n+hcEgSTJ8zxlMhk5jtPz9/V6Xdls9vpOAgCQaom29Pq+r6OjI0lSq9WS53k6ODiQYRhJJgvAhMrl8sBuCJ7naW9v79K/DYPiw8NDlUqlaPvx8XFPyzAAAFeRaNBr27Zs244msC8Wi8rn89FcngDmXzhjw8VW3mazKd/3xxqw5vu+ms1mzwptF18Di44VCYFkJdq9wfM8OY4Tvd7Y2NDx8XGCKQIwje6V10L7+/s98/JexvO8vtXbwtcXp0EDFlH3ioSlUomnGEACEm3pvdii++jRo0vvfJ89e6Znz55Fr09PTyVJZ2dnOjs7m00i51x43st6/nEjPy8X9sPtZlmW2u12z7bwZjYclDaKYRg93ZoODw+jlmPP86ZMLTA/whUJw8VbwhUJhy3QEgTBQtdDo+rSRT43LK6VYNCvWAIcx1GlUlGtVhvap/e73/2u3n777b7tP/jBD/TCCy/MOIUA/vRP/1Q//vGP+7Z7nqdKpaLbt29LOg9ixw14Q8ViURsbGzJNU6Zpan9/v2c2ByBN1tbW1Ol0hu7/6le/qt///d+/xhRdr08//VTf+ta3dHJyolu3biWdHCyJxIPecDCb7/sjfygHtfS+/PLL+vnPf760F83Z2Vk0yv3GjRtJJ2fhkZ+X+8Y3vqG/+Iu/SDoZcyWsw2q12tjjEcrlcnRz7/u+dnd3J3rPaT5z1srlsqTzQcmSxuraMuo8wveUpCdPnvQMdHRdV5VKRdlsVqZpql6v6+7du1da9GSc9Fz2vY2rWCyOXKDl9ddf1w9/+MOp3n8ejKpLT09P9aUvfYmgF9cq8Xl6uwPdarWqtbU1PX78eGBr782bN3Xz5s2+7Tdu3Fj6AIU8iBf5OdjKykrSSZgrzWZTx8fH0cIa4wgDue75iIvFYhQkjnrPaT5zGhf7WV/Gtu2egDQM6i4LyEedRz6f72npr1arPZ/j+75c15XjODJNU7ZtXyngHZWeUd9buVzWkydP+v7u9u3bPcHxuCsSrqyspKIOGlaXpuHcsICChHQ6nWB3dzfodDrRtlarFUgKarXaWO9xcnISSApOTk5mlMr599lnnwXvv/9+8NlnnyWdlFQgPy/3+uuvJ52EuVSr1YJMJjPWsYZh9NR7QRAEg6riUe85yWdOo1AojHVcp9MJLMvqOadGoxFIClqt1si/H3Qe4W9B93t2Op2ebbVarS8f4zAsX8f93i5Tr9eDer0eBMF5Hl2WP4t+rY2qS/n9RhISm73B8zyVy+WeO2rf9yWJeXoBpJLneVFXrovimqUibP2sVqvRNt/3tbm5Gcv7D3J8fNwz4DBsIQ7r9EmF79WdT+H/JzHDTxzfW7giYT6f19ramjY3N8duSQcQj8S6N2QyGe3u7vZc9OGIbeYuBJBGw2aiMAxj6gDx4vtf95LOhmH0DcgKA8Fpg7ruoPlioNmdh0dHR1pfX1e73Var1erpYhGnOL63cEVCAMlJtE/v3t5ez0AF3/f10UcfJZgiAIuoWCyOddzm5uZczgYRBm5X5XmeLMtSuVxOdEnncI7maZ/amaYpy7Lkum7U9/Vii2p4fmGAXK1Wlc/nVavVpk/4hOL63gBcj0SDXsMwph79CmA+XNfgtuCSiWbGXQTjMo7j6PDwcORxe3t7Q+dWnVZcgdO0SzoPumkIB2p1G2cAlm3b2tnZufLNRb1el23barfbUeu1pL7/hra3t1UsFod2Q3AcR7ZtR7NLxIGAF1gsic/eAGCxXRaMLpJcLnel0f/jGPa43/f92Pp3TrOk86Cbhu6ZCcblOI42NjZia03vDtzDbgRbW1vRZ3V/X2Gg63newJsS0zSnvlm5ju8NwOwR9AJYeIvSvcE0TRmGMXA6sLjGMoxa0nlWYybC7gdh/oZTf00bFF5crSzs6hD2o83n82q1Wn2D5oZ9XiaTmbrrw3V8bwBmj6AXwFxxHEftdluNRkP5fH6soCKO7g1XMewxt+d5chynpxvX3t6eXNeNgkPHcQYG4qMenQ/bn8SSzs1mU81mU7lcLvqM7vMalA+hYeeRz+dVqVSi779SqUTfc9g1rjsArVarUVB8FcPSM+73BmB+EfQCiFW4aEAmk5m4la/ZbEo6by30fV937tyZ6xHvYTB3eHioZrMp27Z7VgULVw3rDvZ2d3dVLpflOI4k6dGjRz1B+6j3HLXfNE1tbW2pXC7LNE3t7Oxof39f1Wp1JkGa7/u6d++efN+Xbds9+8LzHpQPo86jUqmo2WzK8zy1Wi1VKpWe8nRxIPSTJ0+uNIhtVHpGfW8A5l/iyxBfxenpqVZXV5d6GcOzszM9fPhQ9+/fZ4WbGJCfl3vw4IE++OCDoftd15Xv+7IsS8fHx6rVahMFBq7r9vzNxsaGarVa7APHMNo0fXoRn1HX2rwbVZfy+40kJLY4BYB08X1f9Xo9esRsWZay2Wxf699lLMvqCbTa7TYBb0Im+d4AYBEQ9AKIheu6fXOpWpbVszLYJIrFog4ODuJIGqbArAQA0oagF0AscrmcGo1Gz7ZwpP2kq405jjPWnLAAAIyLgWwAYlOtVqPlbtvtdjTyvt1uR6Pqw4FA4eIDruuq1WqpWCzKNE25rht1j2g2mzIMg1ZHAMCVEfQCiEU2m1WpVOrpgxvOn9u9VOz29rYMw4iC3lqtpmw2G83c0L1ymO/7qVn8AgCQLIJeAFdm27YymUzfoLPj4+OeeXbX19d7Vs4Kg+J6vR4dM89TlAEAFhdBL4ArK5fLff15pfPAdm9vL3rd3Uc3nNIMAIDrQNAL4ErCGRsutvI2m035vj9wMFq4ateg1bPCPr/hErYs8woAiAOzNwC4skEDzfb394cubuC6bk8wGwbOnudFc/0WCgWVSqXZJBgAsHQIegFciWVZarfbPdvC1truZW/Dacik8z684SwP3VOahTM3hAzD6Jv7FwCAadC9AcCVNRoN2bat27dvSzoPVi/21zVNU9lsVo7j6ODgQLZtR8Fx2AWi1WpF7yGdD3ybdI5fAAAGIegFcGWmaY7sinBxdodhXR8uutiKDADANOjeAEzil7+Ufud3pL//98//+8tfJp2iVNnY2Oh5HQ5mAwDgqgh6gXF95SvSSy9J778v/fVfn//3pZfOtyMWlmXp0aNH0WvP85i9AQAQC7o3AOP4ylekrmCsx6NH5/t/8pPrTVMKmaapnZ0dOY6jdrvdM8cvAABXQdALjPLLXw4PeEOPHp0f98UvXk+aUmzQvL4AAFwV3RuAUd54I97jAADAtSPoBUZpteI9DgAAXDuCXmCUCzMKXPk4AABw7Qh6gVHefTfe4wAAwLVjIBswyhe/KN29e/lgtrt3l2IQ2+rqqh48eDDR3wRBoKdPn+rFF1/UysrKjFK2XMjTeM1jfq6uriadBCB1CHqBcfzkJ8OnLbt7d2mmK3t3itbss7MzPXz4UPfv39eNGzdmkKrlQ57Gi/wElgNBLzCun/zkfFqyN944H7S2sXHepWEJWngBAFh0BL3AJL74Rem//JekUwEAACbEQDYAAACkHkEvAAAAUo+gFwAAAKlH0AsAAIDUI+gFAABA6hH0AgAAIPUIegEAAJB6BL0AAABIPYJeAAAApB5BLwAAAFKPoBcAAACpR9ALAACA1CPoBQAAQOoR9AIAACD1CHoBAACQegS9AAAASD2CXgAAAKQeQS8AAABS79eSTsBVBEEgSTo9PU04Jck5OzvTp59+qtPTU924cSPp5Cw88jN+5Gn8yNN4kZ/xG5Wn4e92+DsOXIeFDnp/8YtfSJJefvnlhFMCAAAm9Ytf/EKrq6tJJwNLYiVY4Nuszz//XD/72c/00ksvaWVlJenkJOL09FQvv/yy/uZv/ka3bt1KOjkLj/yMH3kaP/I0XuRn/EblaRAE+sUvfqFf//Vf13PP0dMS12OhW3qfe+45ffnLX046GXPh1q1bVNYxIj/jR57GjzyNF/kZv8vylBZeXDdurwAAAJB6BL0AAABIPYLeBXfz5k3963/9r3Xz5s2kk5IK5Gf8yNP4kafxIj/jR55iHi30QDYAAABgHLT0AgAAIPUIegEAAJB6BL0AAABIPYJeAAAApN5CL06xzHzf19HRkWq1mur1etLJSYVyuSxJarVakqRKpZJkchZeWEal8zz1PE8HBwcyDCPZhKVINpvl+r8C13VVqVSUzWZlmqbq9bru3r2rXC6XdNIWnm3b2tjYkCStr6+Tp5gLBL0LqNls6vj4WL7vq91uJ52cVLBtW6VSKXpdLBYJKK7Itm3Zti3TNCWd52k+nydPY+I4jlzXTToZC833fbmuK8dxZJqmbNsmOLsi3/d17949ffTRRzIMQ81mU5ubm2KiKMwDujcsoEwmo0KhEAUTuBrf99VsNuX7frStWCzKdV15npdcwhac53lyHCd6vbGxoePj4wRTlB7c8Mbn8ePHCoJArVZLhUIh6eQsPNu2tbOzEz3RyWQy3OhibhD0ApKOj497AtzwhqI7EMZk6vW6dnd3o9ePHj2SZVkJpig9jo6OtL29nXQygD7ValW5XE6e50VPIrjuMS/o3oClZxiGOp1Oz7awsqY1PR6O48j3fdVqtaSTsvBc1yWIiNHR0ZHW19fVbrfVarV6ujlhMmHDQbPZlGmaMk0z6tZEmcU8IOgFBtjf31elUmHQ1RWFg9l831c+nyc/Y+D7vkzT5ClEDDKZjKRf3dxWq1Xl83luzqYUBr2GYUR5WyqVdOfOnb6GBSAJdG8ALgj7pNG/7+oMw1ChUIi6OaytrRGsXUH46BjxCFsjQ9vb29FTCUxva2sr+n/DMKIBg0DSCHqBLo7jaGNjo6cvKibn+75s2+4JHizL4sfvCprNZk8wgavrHmgpKXoSwQDW6QzrDmYYBnmKuUD3BuD/FwZjYQtvOEKefr2T8zxP5XJZxWIxCiTCAJguDtNpt9tqNptROQ3nky6XyzJNkxbgCYVdblqtVt/AVa756YQt557nRd0bpPN85YYN84Cgd4ExZVF8ms2mms1mNOpYOm8FoovDdDKZjHZ3d3uCh8PDQ2UyGQa0TMmyrJ68azabqlarPJWYkmEYfWU07D7Cjdn0SqVSdK1L5/WoZVk9QTCQlJWAGaMXTjj/6eHhoZrNpnZ3d1lF6Ap839edO3cG9uPj8pie7/uqVqvR63BkPAHF1YXXv+M42t3dVTab5WZiChfL6JMnT5i9IQbVajWqT8lTzBOCXgAAAKQeA9kAAACQegS9AAAASD2CXgAAAKQeQS8AAABSj6AXAAAAqUfQCwAAgNQj6AXQw/M85fN5ra2taW1tTfl8fuASotlsVrZtD32fcrmslZWVmaVzc3NTxWJxZu8PAEgXgl4AEdd1tbm5qbt376rRaKjRaMg0TW1ubkbL347LsixVKpUZpVTa29tTPp+f2ft3cxxH2Wz2Wj4LADAbLEMMQNL56lTZbFa1Wq1ndb9SqaSNjQ3l83k9fvx47BXVMplMLEuPuq6rYrGoVqvVs/06ViC0bVvValXr6+sz/ywAwGzR0gtA0nmAl8lkBgaThUJB6+vr2t/fTyBlySmVSup0Opd24wAALAaCXgCSzltULcsauj+Xy/V1cfB9X8ViUWtra9rY2JDjOD3vd7FPb/ex1Wq1Z1+5XNbGxoZWVlai7hT5fF7ZbFae52llZUUrKyvyfV9Sb5/iYrHY19Wh2Wz2fP5lnw0ASD+CXgCSzgew3b17d+j+jY0NNZvNnm1HR0cqFot6/Pixcrnc0EFvkqJ9jx8/Vr1el23b0fsVi0UdHh6qVqup0+moVCrJ933VajXVajWZpqkgCBQEwcDuFfl8vifglqRKpRK1Wl/22QCA5UCfXgCRdrs9dF/YwtqtUChE/XZLpZIcx1GlUlGpVOo5zvM8OY6jTqcjwzBkGIZKpZIODw9lmqaq1aparZZM05SkS1ucB7EsS4ZhyHGcKNA9OjrSwcHBpZ8dR59jAMBiIOgFIEkyTbNvsFi37qB0GMuyBrb0hq2qd+7c6dm+tbUl13VlGMbI9x5le3tbh4eHyuVyajab8n1fuVwuagEe9NkAgOVB9wYAks4D1otdBLodHR1N3ALbLZPJqNPp9Pyr1+tTv99FxWIxSn8Y/F7XZwMA5h9BLwBJ590TPM9TuVzu22fbtnzf7+u2cJHrugP7BWcymaj1ddA+3/eH9gUeVyaTkWEYcl1XjuNEC1dc9tkAgOVB0AtAkmQYhmq1mmzblm3b8jxPnuepWCyqXC6rXq/3DSKrVqtRQFksFuV5ngqFQt97m6apQqHQM9DNcRyVy+W+fb7vy3GcaGYG0zSj7a7rXhocFwqFKHgPW6Uv+2wAwPIg6AUQyeVyarVa8jxPm5ub2tzcVLvdVqvV6uvaYJqmtre3tb+/r7W1NR0fH6vRaAxdvKJSqSiTyWhzc1Nra2uqVCrRe4b/n81mo307OzuSfrXIxZ07d0a2NO/s7Mh13b7A+7LPvky1WtXKykoU0K+srGhjY2Pk3wEA5s9KEARB0okAkD6u6yqbzYoqBgAwD2jpBQAAQOoR9AKIleu60cISzIMLAJgXBL0AYlWpVLS2tibXdXVwcJB0cgAAkESfXgAAACwBWnoBAACQegS9AAAASD2CXgAAAKQeQS8AAABSj6AXAAAAqUfQCwAAgNQj6AUAAEDqEfQCAAAg9Qh6AQAAkHr/H4UTxndstqeiAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1kAAAGyCAYAAAAWHQ5bAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABceElEQVR4nO3dT4wj553f/09rtjORBE1Xc5DDxhKiqY6Qa5Y9k+Swu0g8RRmYhccbmezGCvElsUhkE8DBJu5y+5LVJS3Se9gFdhGTvchFgJ1u1gqSgp1gh2Vv4PgSdJNxLkEChzVaOLIO8ZDVPZCNUf9k/g5ElZpNslnFLv7rfr+AgcRiNfnwqech61v1PN9nqdPpdAQAAAAASMQzsy4AAAAAAFwmBFkAAAAAkCCCLAAAAABIEEEWAAAAACSIIAsAAAAAEkSQBQAAAAAJIsgCAAAAgAQRZAEAAABAgn5l1gWYpF/+8pf66U9/qhdeeEFLS0uzLg4AAIig0+noyZMn+pt/82/qmWe4Hgxg8VzqIOunP/2pXnrppVkXAwAAjOEnP/mJXnzxxVkXAwBiu9RB1gsvvCCp+yV948aNGZfmMycnJ3r48KFeffVVLS8vz7o4c4/6iof6iof6iof6iof6iu/k5ETvvvuuvvrVr4a/4wCwaC51kBUMEbxx48bcBVnPPfecbty4wY9uBNRXPNRXPNRXPNRXPNRXfEGdSWKoP4CFxUBnAAAAAEjQTO9k+b6v/f19VatV1Wq1vudLpZIkqdlsSpLK5fJUywcAAAAAcc0syGo0Gjo8PJTv+2q1Wn3P27atYrEYPi4UCspkMgODMQAAAACYFzMbLphOp5XP52WaZt9zvu+r0WjI9/1wW6FQkOu68jxviqUEAAAAgHjmNvHF4eGhPM9TOp2WpDAYOx14nfX06VM9ffo0fHx8fCypO4n25ORkcoWNKSjLPJVpnlFf8VBf8VBf8VBf8VBf8VFXAC6DuQyyDMNQu93u2ea6riQNvPMV2NnZ0Ztvvtm3/eHDh2GmonnC0Md4qK94qK94qK94qK94qC8AuFrmMsgaZGdnR+VyWYZhDN1ne3tbv/d7vxc+Pj4+1ksvvaRXX3117lK412o1ZTIZUvpGQH3FQ33FQ33FQ33FQ33Fd3Jyovfee2/WxQCAC1mIIMu2bW1ubiqfz5+73/Xr13X9+vW+7cvLy3P54zav5ZpX1Fc81Fc81Fc81Fc81BcAXC1zH2Q5jqO1tbWRARYAAAAAzIO5Xow4mIcVBFi+75NdEAAAAMBcm3mQNWiNLKm7jlaj0VA6nZbnefI8T5VKRalUasolBAAAAIDoZjZc0PM8OY6jvb09NRoN2batO3fuKJvNyvd93b17V77vy7btnr/b2tqaUYkBAAAAYLSZBVmmaWpra2tg0DQohTsAAAAALIKZDxcEAAAAgMuEIAsAAAAAEkSQBQAAAAAJIsgCAAAAgAQRZAEAAABAggiyAAAAACBBBFkAAAAAkCCCLAAAAABIEEEWAAAAACSIIAsAAAAAEkSQBQAAAAAJIsgCAAAAgAQRZAEAAABAggiyAAAAACBBBFkAAAAAkCCCLAAAAABIEEEWAAAAACSIIAsAAAAAEkSQBQAAAAAJIsgCAAAAgAQRZAEAAABAggiyAAAAACBBBFkAAAAAkCCCLAAAAABIEEEWAAAAACSIIAsAAAAAErTU6XQ6sy7EpBwfH2tlZUVHR0e6cePGrIsTOjk50YMHD3Tv3j0tLy9P9L2+8pWv6OjoaKLvMWmdTkcff/yxnn/+eS0tLc26OHOP+oqH+oqH+oqH+oqv0+noyZMn+uijj/TKK6/omWe4Hgxg/q2srOjtt98OH//KDMuCKTg6OtL7778/62IAAAAAl9b9+/d7HnN5CAAAAAASRJAFAAAAAAkiyAIAAACABDEnC0M5jqNWq6V6va5cLifLsmZdJAAAgLnG+RMkgixI8n1frusqnU7LNE1JUqPRkCTl83n5vq9bt26p3W7PspiYgUFtA8Bw9BkgefPYr4aVifMnBBgueMW5rivXdWVZljzPU6FQkCS1Wi3VajVJkmEYSqVS4RcHroZhbQPAYPQZIHnz2K/OKxPnTwgQZF1hvu+rVqspm83KMAxZlqVMJiPbtmVZlsrlcrhvq9VSOp2eYWkxTee1DQD96DNA8uaxX40qE+dPCBBkXWHBlZjTLMtSpVLp2VYoFLS7uzvNomHGorYNAF30GSB589iv4pSJ86erbanT6XRmXYhJOT4+1srKio6OjnTjxo1ZFyd0cnKiBw8e6N69e1peXp7oe92/fz/2YsRLS0tqt9syDEOO40iSstnsJIqHBXO6bQAYjT4DJG8e+9XZMnH+dPWcPecm8cUVV6lUlEqlJHVvaQcZcFqtlg4PD8Nb4Y1GQ4ZhhJM7XdeV7/tqtVrK5/OSurfQ7969q3q9PpsPg0SNahscf6DXeX3GMAy+N4ExzONvUZS+Puj8ie+Aq4Ug6wrLZDIqFos9Y4VPT97M5XLh//u+r+Cmp+d5SqVSMk1TmUwm/KJwXTf80sFiG9U2OP5Ar/P6jGmafG8CY5jH36IofX3Q+RPfAVcPc7KuKNu2lU6n+yZjHh4eyrIsmaapdrsd/js9qtTzPKXTaTmO0/P3tVpNmUxmap8BkzGqbXD8gV6j+ozE9yYQ1zz+FkXp68POn/gOuII6l9jR0VFHUufo6GjWRenxySefdN59993OJ598MvH3+uIXvzhwu6ROvV7v224YRqdarUZ67XQ63anVakMfYzFFbRscf6ArzvfpIvebarXaKZfLnXw+P7TM1Wo13G9RPhfm0zz+FnHuhPOcPecmyJqBWQdZtVqtMyi+rtfrA7cP0m63+/a95DH7wmq3251isdgpFosj943aNjj+uMwm0WeC113UflOv18OTyHa73TEMo2+fZrPZyefz4WPLsqZWPkxGtVqdSQAwj79FnDtdXdlstlOv1zv1er2ztbU1dL+z59wMF7yiBq2YvrOz07O2w3k8z+tb4fx0UgzMD9d19fjx48j7R2kbHH9cZpPoM9Ji95soC6wGk/0DQQIALCbf97WzsyPf92fy/vP4W8S509XkeZ7u3r0r27a1vb0d+e9IfHEFWZalVqvVsy1INRpMxBzFMIyeH9O9vb1wjLHneckUFInIZrNqtVqRfiijtg2OPy6zSfQZaXb9xvf9yKmuh+1rWVY450QavMBqs9nUzZs3w8epVGpmJ+i4uP39fW1ubs7kvefxt4hzp6tre3t7rFT8BFlXVL1el23b4Q+iYRiqVquR/940Td2+fVulUkmmaWpzc1M7OzuqVCqRv2wwn6K0DY4/8Jmo36ez6De2batYLEbev1KpKJvNDrxiH4izwOrZk1IshkajIcuywiBiFubxt4hzp6vp4OBA0mffZ1GPFYsRz8C8L0a8SHzf1/7+vqrVajiUZZRSqRT+/+PHj/tOQKI+32w2JSnyMIFhRn2GUqkUXvnyfV9bW1ux36NSqYz9t5fRuO3mosdhHsyizwTtzzAMNZtNbW9vX2gRUfpMNJVKJcwWG0ehUBj6vXbeAqtn6yyXy6lQKPTcAZu1XC6nzc1NmabZ1waj1lMmk+lpd1Fec9F+NxzHUTabDYMBFtS9uuL+ZkTpD+P8Dl2kjEn+fq+tralerw/8DWMxYlwajUajZyHCKHK5XM/6FJVKpedK76jnz14VLhQKfT+4SX6G4If59Hoap0+ASqXSwLkjN2/eXNiTw7Nj1pM2TrsZdRySMo+f/aJ9plQqKZ/P9/zAvfHGG7Gu/sb5DFexzwzieZ7q9fpYV8dzuZxKpVJffQxbYDUIoC3Lkm3bPWWIE2BNuv1L3fYz6O5MNpuN1CYdx+mbOzPqNRftdyMIrBzH0cHBgZrNpkzT7Bseitmbx9+MUf1hnNe8SBkv2h+CfhD0YcMwwnT8I002H8dskV1weAr3y6RarXbS6fTI/ZrNZkdSp91uh9uCTD/tdnvk8+12u2NZVs/zQVahZrM5kc9gGEbP+3U642UiKpfLkTKlzYPT2ckmKWq76XSSOw6jzNtnv2if6XQGZ5hLIuscfeZ8W1tbF/peOlu3zWazYxhG+O90nZqmGdb56RTuUVNaB6bR/gcd03K5HOlv2+12p1wu97Wn815zkX83Op1uO4p7HDE98/ab0elE72NxXjOKSfWHer3ek2HTNM2h+5JdEFdWMKn0bPYrqbuQ4Kjnz+4n9d76nkR5h01Cj5OFyHVd1Wo11Wq1mY6vX1RRjoPrunIcR5VKJXzO932tr69Pq5gTkUSfMQxDmUwm7COTvPJKn/mM67p99RynnZqm2ZM98LwF6pvNZljn2WxW2WxW+Xx+LoeYnS2T67q6fft2pL/d39/XxsZG7NdcxN+NYH/XdbW3t9dT/sv6fYdkXKSPJS2J/pBOp+X7vhzHkW3bse5AM1wQV8bpH7azHe70sJbznm+32z3bg046iZPGYZmGDMOI9eN8NisY4hl1HDzPUyqVkmmaPcPmXNdVKpWaZlETd9E+I0m7u7taX1/X6uqqtra2tLa2lvgwy7PvedZV6zNBmxy0LWo7zWQycl330g0RO/1d7Xle5CGNrusO3e+81zQMYyF/N6RuP6jX632vf1m/75CMcfvYJCTVH4LAMe6FI4IszEShUIi03/r6emIZd0zTlGVZcl037Cinr2SMen6QYH2Mi0zijyuVSpGxaw4ExyH4ASmVSj0npLVaTZlMZoYlvLgk+oxhGOHVv1KpJMuytLGxQZ+ZIN/3+07g47bTVCoVJmm4rIrFYuSAP6jTUSdmUV5zkX83LvP3HZIXp49N07R+EwiyrqClpaWpvE/nnMSVs+p0tVpNtm2r1WqFV+Okz668jHr+NNu2tbm5eW4QGNxeTvJkJckvhlm3hUHBdjAp9bRMJjPwCpLjONrb2xv5/tvb24lfkQ+OQ3CFbm9vr2dy++HhoXK53NC/X5TPftE+Y9u2MpmMqtWqPM9TLpfT+vr60D4x731Gmn2/GVVHnuf1ncDHbaemaUZqX+O6aPuXLtYHzi6kfJ6o6bWjvOao341JtH8puT4wzvddlM80731Kmn0ZF+U3IxCnj51nEX4ThiHIuoLOC36ugtM/DMFVydPjhUc9L3U7/dra2sgf3otkZBo2lGTQVepxzbotDAq242TtC+Z/TFKU4+D7friuTODs47MW4bMHxu0zwXj4oB5M01S9Xtf6+nqYIvqsee8z0uz7zbh1FKedBkHzpFy0/UsX6wPlcllra2sj92s0GpHnk4x6zSi/GxfN4jeNPhD3+y7KZ1qEPjXrMi7Sb4YUvY+Nsgi/CcMQZGEmZjFcUOr+EJzurMEwp+Cq76jng23SZ+lAg7ShgzpsOp0eO1V1sMbEoEQBizxfZNFEOQ5nn2s0GuHj8+ZyLIKL9JnDw8OBQ6LO6//0mdFG1dGwu1Bx2qnv+4mcIM0r13UjJWpotVpqNBrh935wNX3Q+lHnvWbU342LtH9pOn0g7vfdRT/TNCxCGRdN1D42yiL/JpBdEBcSZBgqFAqxMheVy+VI/6IEWMNu+3qe17NIqtRd/+V0Ocvlcs9V+FHPNxqN8KQymNBZqVQufMV32GfY3t7uKY/jOKwKn5A47WbUcTAMoyeY2NvbCwOPYRNvZ2lafSZYT+nsPJZ6vX7hK6r0meFM0xzY7uK000FDDi+TYVkuz/YBy7K0tbUV/gsuEGxtbfW14WGvedl+Nxbt+25ejXv+NAtxfjNOP3fe3aKkh+zN5W9C5ETxC4h1sqKtk9VutzvVajX2mh31ej1cP6PdbncMwxirjONqNpudYrHYSafTHUl963mUy+W+9QxqtVqnWCx2yuXywHVkzns++IyS+v5N6jN0Ot01J4K1Z7a2tsZ+ryjGbQtJmvS6H+O0m05n9HHI5/PhPvV6vZPNZiOvv3P6NSZp2n2m0+m2qa2trXCfYrHYt2ZJkp+h05lun+l05qPfnDVsLbKo7TSbzV7oOI1jWmv+dDrdtW7q9Xrf9mH9v9PprsOTzWbDdnd67Zxhr3lZfzeS+L47zzz2qbMuUsakzp/m8TcjMKyPRWm/SZax05neb8LZc26CrBmYpyCrVqt1qtVqp91ud2q1WqwOe3b/YR0Ki+EibSFJs3rfeXCVP/uimpd+c1axWOwLAuLIZrMJliaaeak7zNa89qnTLlrGpM6f5rFurjKCrDkwL0FWcIX5tItE+dO+k4XkJN0WLmKer1xO2lX+7ItonvrNIOMGShcN0MZF+8e896lOZzJlHPf8iT4zX86ec890Tpbv+6pUKkPXViiVSqpUKqpUKkPHfC6cTz+VfvjD7v//8IfdxzMSrOZ+mmVZPau4R1UoFLS7u5tU0TBlSbaFi5pGxp95dZU/+yKap34zyObmphzHifU3vu/r8ePHM0kUQvvHvPcpKfkyXuT8iT4z35Y6ndnkpGw0Gjo8PJTv+9rb2+tbVTwIqra2tiR1G3W1Wo2V3vX4+FgrKys6OjrSjRs3kiv8uN55R/ra13Ty+LEefPe7uvc7v6PlmzelP/oj6bXXJvKW9+/f1/vvvx/rb5aWltRutyNPeg5+xKeZGhTTEbctAJivfuM4jtLpdOSTsVKpFP7uAvNinvrUMOOUkfOny+XsOffMUrin02ml0+mhV9l2dnb06NGj8LFlWcpkMnO5cnQk77wjZbNSpyM9++xn2z/8sLvdcSYWaJ3ndIajVqsVXr1stVoyDCM8PsGCo67rqtlsqlAoyDRNua4rwzDCLGKGYXBlZUGNaguSRrYH4KqZ934T9+SNAAuzNu99Kqkycv50Bcxk0OIp1Wq1k06ne7Y1m82BmXckxRonPjdzsv6//6/TefHFTqcbYnU+efbZ7pysZ5/tblta6nReeqm7X8LOm5NlWVbfRMt8Ph/WfblcDrNL5fP5cHy/ZVlhRh3DMMJ/c9CcMKZRbaHTGd0egKuGfgMkaxH6VBJl5Pzpcjp7zj2XixEPW2fBMIy+9VZOe/r0qZ4+fRo+Pj4+liSdnJzo5OQk0TLG8sMfSo8fh3ewTs78V5L0s59JP/iB9Ou/nuhbd4aMBrVtO7ybeNrh4WF4RSaVSoVXZDzPC9cHqdVq4f7tdjvR8mL6orQFKVp7CK7cBYtsXqYFYIHT6DdAshahTyVZRs6fLr+5DLKGSaVS5y5etrOzozfffLNv+8OHD/Xcc89Nsmijffe7fZtq/+E/9G44PpYePEj0bT/++OOB20ulUt88OKn7ZbC9vS2pd5jJ4eEhq6FfUlHagjS6PXiep1qtFg7pzWQynCzi0qLfAMlahD6VVBlxNSxUkDVqdejt7W393u/9Xvj4+PhYL730kl599dXZJr744Q+l3/qt8OHJs8+q9h/+gzL/9J9q+Re/+Gy/P//zxO9k/emf/mnftiArztkrMY1GQ77vD1zFXtLAyZxcgV1scduCNLw9BOPLA4ZhyHVd2gQuHfoNkKxF6FNJllHi/OkqmMsga9jEP9/3z50UeP36dV2/fr1v+/LyspaXlxMrX2y/+ZvSzZvdJBenhu8t/+IX3SBraUl68cXufteuJfrWS0tLA7cPqsednZ2BiUXOfjkFj7kCeznEaQvS8PbQbDZ18+bNcHsqlTp3eC+wyOg3QLIWoU8lVUbOn66Gma6TNYxpmjIMY+DcrIVshNeuddO0S92A6rTg8R/+YeIB1jCWZfXdFQyuqOTz+fBxsH5ZrVYLs+j4vh9+WQ27WoTFEaUtBNtGtYdBRt19BhYR/QZI1iL0qSTLyPnT1TDzO1nDGv729rZc1+056T/diBfOa69107R/7WvdJBiBF1/sBlhTTt9er9dl23Z4tccwjJ4xw6ZpKpPJyHEc7e7uyrbtvvUcuAJ7OYxqC1K09rC2ttZz/IMhEMBlRL8BkrUIfSqpMnL+dDXMbDFiz/PkOI729vbUaDS0tbWlO3fu9IxpLZVKYcc4ODhQsViM9R5ztxixJH36qU5+8AM9OD7WvRs3tDyBIYKnjbMYcVTBF02wrkqhUND6+vpiB8MYm+d5sm07/MFZX18fOEEYwGfoN0CyFqFPcf50Oc3NYsSmaWpra+vchQ9PP3dpVsO+dq2b3OLBg+5/pzREcBK4AovTTNPU5uamHMdRq9XqybQEYDD6DZCsRehTnD9dDTMfLojFZVmWbNsOH3uet5hz5pCYS3MxBJgi+g2QrHnvU5w/XQ0EWRjbIlwtAgAAmCecP10NBFm4kHm/WgQAADBvOH+6/OYyhTsAAAAALCqCLAAAAABIEEEWAAAAACSIOVmX3MrKiu7fvz/rYlxIp9PRxx9/rOeff15LS0uzLs7co77iob7iob7iob7i63Q6evLkiT766CO98soreuYZrgcDmH8rKys9jwmyLrm333571kW4sJOTEz148ED37t3T8vLyrIsz96iveKiveKiveKiv+E5OTuQ4jl5//XUdHBzoxo0bsy4SAMTG5SEAAAAASBBBFgAAAAAkiCALAAAAABJEkAUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQQRZAAAAAJAggiwAAAAASBBBFgAAAAAkiCALAAAAABJEkAUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQQRZAAAAAJAggiwAAAAASBBBFgAAAAAkiCALAAAAABJEkAUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQQRZAAAAAJAggiwAAAAASBBBFgAAAAAkiCALAAAAABJEkAUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQQRZAAAAAJAggiwAAAAASBBBFgAAAAAkiCALAAAAABJEkAUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQQRZAAAAAJAggiwAAAAASBBBFgAAAAAkiCALAAAAABJEkAUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQQRZAAAAAJAggiwAAAAASBBBFgAAAAAk6FdmXYBRKpWKfN+XYRhqNpva3t6WYRizLhYAAAAADDTXQVapVFI+nw+DKt/39cYbb6harc62YAAAAAAwROzhgsfHx/r+97+v4+Pjgc9///vfv3ChArVareeulWEY8n0/sdcHAAAAgKTFCrI2Nze1uroqy7K0urqqb37zmz3PHx0dKZPJJFY4wzCUyWTCwMrzPJmmmdjrAwAAAEDSIg8X/MY3vqF6va6HDx/q9u3bOjg4UKlU0je/+U39u3/378L9Op1OYoXb3d3V+vq6VldXtbW1pbW1NZXL5aH7P336VE+fPg0fB3fbTk5OdHJykli5LiooyzyVaZ5RX/FQX/FQX/FQX/FQX/FRVwAug6VOxKjolVdeUblc1uc///me7RsbG1pbW9POzo6Ojo6USqX06aefJlbASqWiWq0mx3FkWZaq1erQxBe///u/rzfffLNv+3e+8x0999xziZUJAABMzs9//nO9/vrrOjo60o0bN2ZdHACILXKQlUql9MEHHwz8stvY2NAXvvAFZbPZRIMs27aVyWRkWZY8z1Mul5Pv+2o2mwP3H3Qn66WXXtLPfvazufqSPjk5Ua1WUyaT0fLy8qyLM/eor3ior3ior3ior3ior/hOTk703nvvEWQBWGiRhwtalqX9/X199atf7Xtuf39fGxsbarfbiRXM8zz5vi/LsiRJpmmqXq9rfX1djuMom832/c3169d1/fr1vu3Ly8tz+eM2r+WaV9RXPNRXPNRXPNRXPNQXAFwtkRNf7O7u6uHDh/rCF76gDz74oO/5/f19/bf/9t8SK5jneQOHBRYKhcTeAwAAAACSFjnIWllZ0f7+vr797W/r5ZdfHrhPtVrV//k//yeRglmWpUaj0ZeyvV6vD7yLBQAAAADzIPZixLdu3brQ83FUq1Xt7Ozo5s2b4RpZxWIxsdcHAAAAgKTFDrKmyTAMgioAAAAACyXWYsQAAAAAgPMRZAEAAABAggiyAAAAACBBBFkAAAAAkCCCLAAAAABI0NhB1o9+9CN94xvf0Be+8IVw2x/8wR/oRz/6URLlAgAAAICFNFaQtbu7q7t372ptbU2Hh4fh9lu3bsm27cQKBwAAAACLZqwgq1QqqV6v64033ujZ/uUvf7kn6AIAAACAq2asIOvx48e6efNm3/ZHjx6p0+lcuFAAAAAAsKjGCrJyuZxyuZyOj4/DbcfHxyoUCsrn84kVDgAAAAAWzVhBVrlc1gsvvCDDMNRut3Xnzh2trq5qbW1Nb731VtJlBAAAAICF8Svj/mG1WtWjR4/UaDQkSel0Wrdu3UqsYAAAAACwiMYKsp555hltbGxoc3NTX/7yl5MuEwAAAAAsrLGGCx4eHsowDP2zf/bPdO3aNW1ubur73/9+0mUDAAAAgIUzVpCVTqf17W9/W61WSwcHB3r55ZeVz+d17do1/e7v/m7SZQQAAACAhTFWkHVaOp1WsVhUuVzW3bt3VS6XkygXAAAAACykCwVZ77zzjjY3N3Xt2jVtbGxofX2dxYgBAAAAXGljJb7Y2NjQn/3Zn2llZUUbGxs6PDzUr/3aryVdNgAAAABYOGMFWalUSg8fPtTdu3eTLg8AAAAALLSxgqxvf/vbSZcDAAAAAC6FSEHWP//n/1y5XE6f//znJUnb29vn7r+zs3PxkgEAAADAAooUZB0cHCiTyYSP6/X60H2XlpYuXioAAAAAWFCRgqyzGQMfPnw4kcIAAAAAwKIbK4X78fHxwO0ffPCBPvjgg4uUBwAAAAAW2lhB1urq6sDtzWZThULhQgUCAAAAgEU2VpDV6XQGbr99+zaLEQMAAAC40mKlcP/bf/tva2lpSUtLS3rllVf6nvc8T+l0OrHCAQAAAMCiiRVklctldTodvfrqq3rrrbf6njdNU7/2a7+WWOEAAAAAYNHECrLu3r0rScpms/ryl788kQIBAAAAwCIba05WoVDQO++807d9e3tbP/rRjy5aJgAAAABYWGMFWd/4xjdkGEbf9tu3b8u27YuWCQAAAAAW1lhBVr1e1+3bt/u2W5Yl13UvXCgAAAAAWFRjBVmmaerRo0d921utlm7dunXhQgEAAADAohoryMrn8/rqV7+qv/qrvwq3ffDBB9rY2FAul0uscAAAAACwaGJlFwxsbW2p2Wzq1q1bWl1dlST5vq833nhDOzs7iRYQAAAAABbJWEGW1F0zq1QqhXOw0uk0QwUBAAAAXHljB1k/+tGPtLe3p0ajob/4i7+QJP3BH/yBLMvS3/27fzep8gEAAADAQhlrTtbu7q7u3r0r0zR1eHgYbr916xYp3AEAAABcaWMFWaVSSfV6XW+88YY6nU64/ctf/nJP0AUAAAAAV81YQdbjx4918+ZNSdLS0lK4/dGjRz1BFwAAAABcNWMFWblcTrlcTsfHx+G24+NjFQoF5fP5xAoHAAAAAItmrCCrXC7rhRdekGEYarfbunPnjlZXV7W2tqa33nor6TICAAAAwMIYO7tgtVqV53n67//9v0sihTsAAAAASBcIsiTJNE2ZpplUWQAAAABg4UUaLnjt2jX96Z/+6Wd/9Mwzunbt2sh/d+7c0Y9+9KNJlR0AAAAA5k6kO1lvvfWWbt++HT6u1WqRXnx/f1+5XE4//vGPxysdAAAAACyYSEHW17/+9Z7Hd+/ejfTit2/f1urqavxSAQAAAMCCGntO1gcffKByuSzP8yRJf+/v/T298cYbunHjRrjP4eGhstnsxUsJAAAAAAtirBTuf/ZnfybTNFWtVrW6uqrV1VX9+3//77W6uqr/8T/+R7jf3bt3tb+/n1hhAQAAAGDejXUny7ZtbW1t9a2JVSgU9NWvflUHBweJFA4AAAAAFs1YQVar1dI3v/nNvu3FYlGpVOrChQIAAACARTXWcMGNjQ09evSob/sHH3zAHCwAAAAAV1qkO1nb29t92z7/+c8rn8/3bKtUKtrY2EimZAAAAACwgCIFWfV6vW/b+vp63/b19fVkSgUAAAAACypSkPXw4cNJlwMAAAAALoWx5mQBAAAAAAaLFWQdHx9re3tbd+7c0bVr13Tt2jW98sor+t3f/V0dHx9PqowAAAAAsDAiB1nf//739fLLL6tareru3bv69re/rbfeekt3797Vf/yP/1Grq6v6y7/8y0mWFQAAAADmXqQ5WY8ePVI2m1WxWNQbb7zR9/y3v/1t2bYty7LkeZ7+1t/6W4kXFAAAAAAWQaQg6xvf+Iby+fzAACtQLBbl+762tra0t7eXWAElybZtra2tSZJSqRRrcQEAAACYW5GCLNd1B6ZxP8u2bd25c+fChQr4vq+7d+/qe9/7ngzDUKPR0Pr6ujqdTmLvAQAAAABJijQnK2pQs7S0dKHCnGXbtjY3N2UYhiQpnU6rVqsl+h4AAAAAkKSlToQIamNjQ3//7/99/et//a/P3W97e1vNZlP7+/vJFG5pSc1mU5LkeZ4sy4r198fHx1pZWdHR0ZFu3LiRSJmScHJyogcPHujevXtaXl6e6Ht95Stf0dHR0UTfY9I6nY4+/vhjPf/884kH8pcR9RUP9RUP9RUP9RVfp9PRkydP9NFHH+mVV17RM8+w2sy8W1lZ0dtvvz3rYgBzJdJwwbfeeku3b9+WaZr6x//4Hw/c51vf+pZKpVIYFF2U53mSpEajIdM0ZZqmCoWCcrnc0GDr6dOnevr0afg4SCt/cnKik5OTRMqVhKAs0yiT7/v6T//pP038fQAAwNV0//79WRcBmDuRgizTNLW/v69XX31V6+vrsixLd+7cUavVUrPZlOM48jxP+/v7evnllxMpWBBkGYahdDotqZtc49atW2q32wP/ZmdnR2+++Wbf9ocPH+q5555LpFxJmsbQx48//nji7wEAAADgM5GGCwZ835dt2/re974XBkGmacqyLBWLRa2srCRWMNd1lclk1G63wzlZUncIYa1WG3g3a9CdrJdeekk/+9nP5m64YK1WUyaTmfhwwddee407WQAAYGLu37+v999/f9bFAOZKpDtZAcMwVC6XJ1WWHqZpDi1DEOCddf36dV2/fr1v+/Ly8sSDmXFMo1zMAQAAAACmK1aQNU3BPCzP88LhglL3btrt27dnWLKrw3EctVot1ev1c+fCAQAAAPjMXKfsKRaLPQsbO44jy7J6gi5cnO/74by6QKPRkCTl83kVi0XlcrlZFQ8zNKhtAFcN/QAAENdcB1nZbFZra2sqlUoqlUo6ODhgnayEua4r13VlWZY8z1OhUJAktVqtsK4Nw1AqlQoDL1wNw9oGcJXQDwAA45jb4YKBfD4/6yJcWr7vq1arqVgsSpIsywqTmxSLxZ7hga1WizuIV8iotgFcBfQDAMC45vpOFiYruEJ7mmVZqlQqPdsKhYJ2d3enWTTMWNS2AVxm9AMAwLgIsq6wbDarer3es80wDPm+L9/3JXXnwWUyGWWz2RmUELMSpW0Alx39AAAwrrkfLojJqlQqSqVSkrpDAoMhgq1WS4eHhzIMQ5ZlqdFoyDCMMLW+67ryfV+tVisc0un7vu7evdt3UoLFNKptcPxxFZzXDwzD4LsQADAQQdYVlslkVCwWe+ZanZ7UfTqjoO/7Ctat9jxPqVRKpmkqk8mEJxau64YnI1hso9oGxx9XwXn9IFhihL4AABiE4YJXlG3bSqfTfcksDg8PZVmWTNNUu90O/wUBlqRw7TLHcXr+vlarKZPJTO0zYDJGtQ2OP66CUf1A4rsQADAcd7KuqFKpNHAoi+d52t7ePvdvgxOMvb29ngxbh4eHrKd1CYxqGxx/TEOUxdAdx5HUHbpnmmaiC6ZH+Y6kLwAAhlnqnL5FcckcHx9rZWVFR0dHunHjxqyLEzo5OdGDBw907949LS8vT/S97t+/r/fff79nm+u6ymQyOnvoG42G1tfX+7YP4vu+VldXe/ZdWlqK9LeYLt/3w2xoW1tb5+4btW1w/C+/OO0maY1GQ57nKZvNyvd93bp1S+12u2cfz/NULBZVLpcldYf2JbWOYpzvSPoCLrNcLhdeVDh7MeG0QecawFXHcMErKkhgcdrOzk54wjKK53k9r9FoNHqSYmB+uK6rx48fR94/Stvg+F9+cdtNkqIshu66rgzDCB8HSSiSEvU7kr6Ay8zzPN29e1e2bY8c5QKgF8MFryDLstRqtXq2BcNuoi7+bBhGzwnO3t5eOCfB87xkCopEZLNZtVqtSCmno7YNjv/lF6fdnOb7fk/bGGdfy7JGLobebDZ18+bN8HEqlUosrXqc70j6Ai6z7e1tlnABxkSQdUXV63XZth2epBiGoWq1GvnvTdPU7du3VSqVZJqmNjc3tbOzo0qlEjlQw3yK0jY4/hjEtu2hw4kGqVQqymazA+8aBeIshn42MLqIqN+R9AVcZgcHB5I+61u0aSA6gqwryjTNWCdDg5wdNhMnSEuK7/va399XtVqNPB+jVCqF///48eO+ehj1/DjveZ5Rr1cqlcIr5b7vT3x+TNS2MQ/Hf1y0m+RVKpWeNP9RbG1tqVAoDB2mfN5i6Gtraz13roLkF0mJ8x15FfvCNNvWpFzke6DZbErqP/ZS92LD2tqapO4d1mF3gpKYRzjp74HTfWBtbU0bGxuR71QDVx1BFhZWo9HoWRQ3ilwu17OeTaVS6bn6Pur5cd7zIp8h+EE/vf7O6ZPSUqk0cN7MzZs3F/bE5+wcl6TRbpJvN57nqV6vj3WVO5fLqVQq9b1vMOfq7GLowRBDy7Jk23ZPGZLMLjgP5rEvjGpbSZnHz372Tm2hUOgJlIJFqL/3ve/JMIxzk0k5jnPhOXuT/h5wHEcHBwfhZzYMI1y2AEAEnUvs6OioI6lzdHQ066L0+OSTTzrvvvtu55NPPpn4e33xi1+c+HvMWrVa7aTT6ZH7NZvNjqROu90Ot7Xb7XDbqOfHec+ohr2eYRh97z1Oty2Xy51isThu8aYqn89P5X1oN6NFbTdbW1udZrMZ+/UDZz9Ds9nsGIYR/jtddtM0w89WrVY71Wq1Uy6XO9Vqdez3n1fz1hc6neTa1ijz9tnb7XbHsqyez16v1zuSwrafz+f7+kutVhv4WuVyObF6m9T3QL1e7ym/aZpD970K5xpAXGQXxJURTEI/m5FM6q5rM+r5afM8b2higDhXQF3XVa1WU61WCyfvIzrazeh247pu310H13XlOE6YBl7qXulfX1/v+3vTNHuyB563GHqz2Qw/WzabVTabVT6fZ3L+FERpW3GO+6I53d+lzzJQBsNWgzmGnueF9THo7ur+/r42NjYmWtYkvgfS6bR835fjOLJtO7ElEoCrguGCuDJO/yCe/eE5PdRo2PPTNuw9DcOIlUXtbKY2xEO7Gf1+qVRq4DbTNHuGUbqu27ev1J2b4rouw5Dm3Ki2Ffe4LxLDMPrWaguCFdM0w7oJUvibpqlCodC3kLbrulP5Pk7qeyC4eMFFDCA+gixMXdTJ8evr64lmMjJNU5ZlyXXd8Afj9BW9Uc/Pi1QqlWgWNZyPdnM+3/f77mIFwWepVOoJnGq1mjKZzMCyBYkEsHiCthX3uC+6YN00wzDCu9aGYYSfvVgs9i2kHfSXpJYbiIvfD2B6CLKuoKWlpam8T2fAZF9pcDamaanVarJtW61WK7ziKn12t2LU83EEQyySPnlM8gdy1m1hUMAdTM4+bViGOcdxtLe3N/L9t7e3L3SXZJrtRppM25nUiZXneX138IIr9Xt7ez2JAg4PD5XL5fpewzTNSMdxXFHqk74wvqBtxT3ugUX87LZta3Nzs+9C4O3bt8P/D+4aBXev4qb1X6TvAQD9CLKuoGE/8lfF6R//4Gri6R/GUc9HZZrmhX7Qh52gD7pzMK5Zt4VBAXecTGXBnJxpmFa7kS7WdqbRbqLwfV+NRqNnaNTZx4EgOJ2UKPVJXxgtStuKc9wDi/DZT3McR2traz0B07C6CTLyNRqN2N8Hl+F7ALjKCLIwdbMaLih1f+xP/2gFQ7yCK/Gjno8jnU5faL0c0zTDH+izP4rMsZquabYb6WJtZ9rtZthdqLPvH8xVkfrnpfi+H64rNAkX7YvoitK24hz3RRQMBQ5+m4L06cE8rLMpzn3f1+3bt9VqtdRoNMK/D+5OBYtYDwoSF+l7AEA/gixciOM4arVaqtfrfRN8h0l6uOCw4Q+e58lxnJ71d3K5nMrlcljOcrncU55Rz496z3ENe73t7W25rhv+oDuOk3jgeVXRbpJxetL/aYZh9ASZe3t74cnn2f0HDTnE9MTpC6PaVpzjPg/ifPZGo6FGoxFmEJR6P3+xWOz5vI7jyLKs8PHZu3uVSiWR9Qzn4XsAwAAzTB8/cayTFW3tina73alWq7HXuanX6+HaNO12u2MYxlhlHFez2ewUi8VOOp3uSOpsbW31rJVTLpf71vWo1WqdYrHYKZfLA9f2GfX8qPdM+jN0Op1OsVgM1wPa2toa+72iGLctJGnS6+PQbpJnWdbA7cG6QdVqtVOv1zvZbLZTLpf79stms33r+cwafWFwX+h0RretqMf9PPP22YPfOEl9/04L1pYrFotD+121Wu1ks9nwfQetpZXEZ+h0pvc9wDpZQL+lTufyTtA5Pj7WysqKjo6OdOPGjVkXJ3RycqIHDx7o3r17Wl5enuh73b9/X++///7Q513Xle/7sixLh4eHqlarke80ua7bs//a2pqq1SppmBfURdpCkuLMxcB8CLLJjTsMKZfLzdVwPvrC7F3lz76IRp1rAFcRixFfYb7vq1arhXNHLMtSJpORbduR/t6yrJ4fwVarRYC1oC7aFpI0i/fExWxtbY19QlwqlSLP05wG+sJ8uMqfHcDlQJA1bZ9+Kv3wh93//+EPu49nxHXdvvV8gjSzcRUKBe3u7iZVNExZkm3hosh8tZg2NzflOE6sv/F9X48fP56rifj0hflwlT87gMuBIGua3nlHevll6bd+q/v4t36r+/idd2ZSnGw2q3q93rMtWNcjzkKJjuMMXb8EiyGptoCrK+j/cZIbVCqVntT384C+AABIAtkFp+Wdd6RsVup0pGef/Wz7hx92tzuO9NprUy9WpVIJ16dptVrhFeVWqyXDMMIr08Fiq67rqtlsqlAoyDRNua4bDqlpNBoyDIMrkAtqVFuQNLI94GqLe6Elicxqk0BfAABcFEHWNHz6qfS1r3UDrLM6HWlpSfpX/0r60peka9emVqxMJqNisdgzjyqYG2GapiqVijY2NmQYRngiUa1Wlclk1Gg0JHUnrAd835/5gp4Yz6i2IGlke+DEEpcBfQEAkASyC07Df/kv0j/6R+HDk2ef1YPvflf3fud3tPyLX3y231/+pfQP/2Gibz0s408wqfjsUJ319XWlUinVajU5jhNemc5kMioUCgwJnGNf+cpXdHR0FPvv/t//+3+SpL/xN/5Gz/a/+qu/0jPPPKOXXnpJkvTkyRO98MILkqSf/OQnMgwjfBx48uSJJOnTTz/VX/trf03PPfdc7PIAs0JfwGW3srKit99+O/HXJbsg0I87WdPw0UfJ7peAUqnUN+9A6s6n2N7eltQ79CdIY4z5dXR0xI8cAGCo+/fvz7oIwJVBkDUNv/qrye53QUHmrLPp1huNhnzf77tbFUxkD+YinBbMS2i1WjJNc66yhAEAAACzQHbBafiN35BefLE792qQpSXppZe6+03JoDkDOzs7A9e6cV23J3gKgjTP88L1ZPL5/NxlCQMAAABmgSBrGq5dk/7oj7r/fzbQCh7/4R9OLemFZVlqtVo924I7Uvl8PnycyWQkdbNnBZm2TqcxDjILBgzD6FtfBovBcRxVKhUVCgWOIQAAwAUxXHBaXnutm6b9a1+THj/+bPuLL3YDrCmnb6/X67JtWzdv3pTUDZBOz7kyTVOZTEaO42h3d1e2bYeBWDCcsNlshn8vSalUinVk5pzv+3JdV+l0OrybGWSKzOfz8n1ft27dUrvdnmUxAQAAFhpB1jS99lo3TfsPfiAdH0t//ufSb/7mVNO2B0zTPHd4Xzqd7pmzNWgY4SBn75BhfriuK9/3ZVmWDg8PVSwWVS6X1Wq1wmGfhmEolUqp0Wj0zdkDAABANAwXnLZr16Rf//Xu///6r88kwErK2tpaz+Mg+QXmj+/7PYGUZVnKZDKybVuWZfUE0a1WiwALAADgAgiyMDbLsnRwcBA+9jyP7IJzynXdvrlWlmWpUqn0bCsUCtrd3Z1m0QAAAC4dhgtibKZpanNzU47jqNVqhetrYf5ks9m+1PyGYYSJTAzDCJOdsOA0AADAxRBk4UI4IV8clUolzBLZarXCu46tVkuHh4fhMMJGoyHDMMKhn8FcrlarFWaf9H1fd+/eHbigNQAAwFVHkAVcAZlMRsVisWeuVaFQCP8/l8uF/+/7vjqdjqTuENBUKhVmmwyCLNd1w4ANAAAAvZiTBVxytm33ZYuUpMPDQ1mWJdM01W63w39BgCV1g6x0Oi3HcXr+vlarheuoAQAAoBd3soBLrlQqDRzW53neyHl0wZDCvb29npT/h4eHPXe/cHkFcy7r9bpyudzA5DbBGnpBhlES4AAArjqCrEtuZWVF9+/fn3UxLqTT6ejjjz/W888/r6WlpVkXZ259+OGHfduCjIJn72I1Gg35vh9pTp3v+2o0Gj0nzmcf43KKslC153mq1WrhMgCZTIa2saAcx5Fpmjo8PJSkcHgw5luc4/bhhx9O5JxgZWUl8dcEFh1B1iX39ttvz7oIF3ZycqIHDx7o3r17Wl5ennVx5tawH85Ba5ft7OxEXmDa87ye12g0Gj1JMTihvryiLFTtuq4MwwgfG4ZBu1hAvu9rZ2dH9XpdpmlqdXWVIGsBxD1un/vc5/T+++9PsYTA1UWQBVxilmWp1Wr1bAuGdkU9gTIMo+ckem9vLzzJ9jwvmYIicUFq/ovsb1lWT7A0aKHqZrOpmzdvho9TqZR83x+nyJghwzDCYcWsebg4OG7A/CLIAi6Jjz76aOD2er0u27bDE2HDMFStViO/rmmaun37tkqlUrg22s7OjiqVCle655Rt2z1z6KKoVCrKZrMD73xK8RaqPhvYY3FUKhXVarVY3xGYPY4bMH8IsoBL4pe//OXA7aZpxj7hPuvs0MKr/EPu+7729/dVrVZVq9Ui/U2pVArvEvm+r62trcivGTwnde8aeZ6n3d3doXepKpVKT3r+qLa2tlQoFAYOIz1voeq1tbWeO1dB8ot5U6lUwrt1zWZT29vbI+/0nXfcXNdVuVxWJpORaZqq1Wq6c+dOTx2VSqXw/x8/fnzhfjiq7Y1qZ1Hk83mZpinbtiMPKZ6UKHV8Vi6X0+bmpkzT7Du+Qbsc1RbG6ePnuWrHDUAXQRYARNRoNHR4eBguzhxFcKJ9eo2x08HMqNe0bVu2bYcniIVCQblcbuDJmud5qtfrY99hzOVyKpVKfcHEoIWqg5NUy7Jk23ZPGeIOWTo77y9ppVJJ+Xy+50T2jTfeOPdiwajj5vu+XNcNkw7Ytt1z8p/L5XrWlqtUKmPdYQyMaiejylsqlfT48eO+v7t582Z4vE8f01wuNzSb5EXEOdaj6niQRqMRDok+LZvNqlqtjmwL4/TxUeW5DMcNwBg6l9jR0VFHUufo6GjWRenxySefdN59993OJ598MuuiLATqK5p0Oj3rIlwZ1Wo1cn0bhtFpt9s92wZ99Q57TcuyOsViMXxcLBY7hmEMfK+tra1Os9mMVK5hTpeh2Wx2DMMI/50ut2ma4eeqVqudarXaKZfLnWq1Gvs98/n8hco8imVZkbadNuq4VavVvucDzWazI6nn+Xa73bdtHMPaSdR2Nky5XO5sbW2Fj03T7NTr9bHLOUycY31eHQ9zuq8EyuVy+P9R20KcPh7FvBy3L37xi5FfG8DFsBgxgJkJrlJXKpVwm+/7Wl9fn2GpkuN53tAEFEF6/VFqtVrPnaWDg4OhV6ld1x14lyBOPZumGaZuP2+h6mazGX6ubDarbDarfD4faVmAaTMMQ5lMJhzWOOpuykWPW5AQ5mzWRUlhmu0kJdHONjY2dOfOHbmuK9u2VSgU+rJILkJfPdv+XNfV7du3w8dx28IkTeO4AZgdhgsCmAnP85RKpWSaZs+wKtd1lUqlZly6ZAzLvmgYxlgZ+BzHke/7A4e5BfU5bHvUes5kMnJd91KdqO3u7mp9fV2rq6va2trS2traufNWoh63/f19pVIptVotNZvNcChgcNI+6AR6Ehk5k2hnhmGEAcrZIH6WfXVYHQ9zOmDyPK9v+GrctjBJkz5uAGaLIAvATAQnP6VSqeeEvlarKZPJDP27qEkd1tfX5zb7YXDSGFUwcd73feVyuYFXvn3fH3hFPm49p1IpNZvNyGVbBIZhyLZt1Wo1lUolWZaljY2NWCnupd7jFtTl6WQKuVxO1WpVpmnKsiy5rhueAEe9M5GkuO1smHH76kWdV8dRFIvFvgAqqbYwSUkdNwCzRZAFXGJLS0tTeZ/Tw8gCjuPItu2hJ+zBVde9vb2eq9OHh4fK5XJD3yupq86O42hvb2/kftvb24nf1Yl7AmUYRk8ChdXVVT169KjnxNDzvIEninHr2TTNSPUyrkFBcjDZ/7Rh2QzHOW62bSuTyaharcrzPOVyOa2vr8cOJk8ft7MB7cbGhgqFQnj3qlarybZttVqt8C7QoL87/bnO6y/jSOpEfdy+etFjPaqOzxMMeT0rqbYQmOfjBmC2CLKAS2xQ8DMtpmmODE5831ej0egZ5nL28aQE84gmadgJ9bC7ToP229nZ6UkxbVlWmHUtavnj1HMQFEzKoCB5WOr4QeIet2DeS/BZTdNUvV7X+vq6HMeJdHIfOH3czv5tcHw8zwvb/emAJBj+dXp+0Nn3HDeYv2g7i2KcvnrRYx2ljs9777W1tZ5t47SFUeb9uAGYHYIsABORTqdHDus5O+m80WiEj13XHXgCt0jDBYO1egZNro8SSHqep1KppEKh0JNyWlLflfzz7kDFqWff9/tOThfZsDt857WjUcctGLbZbDZ75l8Ffyt16/hs4ohsNjv0DkyU/jJueZMwTl+9iCh1fB7XdfuScozTFkaZ9+MGYHYIsgBE4jiOWq2W6vV6YuuwGIbRc9Kzt7cXnpgOmxQ+DwttDhvO43meHMfpyQa4vb0t13XDgM9xnIHB36DXTKfT2tra6jkBC+robP2bpnnuRPqo9TzsRHRRWZalYrHYN8SsXq+HbSnucTMMo++4VCqVniAql8upXC6Hx6lcLifSdoe1vajtbFzj9NWLvt+oOh503ALDApdRbSGQ9JC9WR03ALOz1JnleKIJOz4+1srKio6OjnTjxo1ZFyd0cnKiBw8e6N69e1peXp51ceYe9RXN+vq66vX6ufsEw8zS6XSs4SiNRkOe5ymbzcr3fd26dUvtdvuiRZbUvYq8trYm0zRlmqZ2dnZ6MpjNk+Ckbm9vT41GQ1tbW7pz5044zKhSqahYLPbNzyiVSmF9Hxwc9AwjG/Wavu/3pM0OMqwNCoQymczARYql6PWcy+W0u7s71UArzhCycQTDLm/evBlmbju9IO04x+3scXn8+HHP867rhos3N5tNFQqFCw0BG9VORpU3CUn01TjHelQdDztukrS2tqZqtdo3lG9UW4hSz3HMw3E77f79+3r//fcn9voAPkOQNQMEDfFQX9GMCrJc1w3nIxweHqparUY+2XFdt2f/YScwmK0g+9tF7jLGyd6WlEkHWZgfHOvZIsgCpofFiIErwPd91Wq1cKiNZVnKZDKybTvS31uW1XNi1Gq1CLDm0NbW1oVOYIP5X9MWtR1i8XGsAVwVCxVkTXI9DiBxn34q/Zf/In33u93/fvrpzIrium7fOj2WZfUMxYmqUChod3c3qaIhYZubm3IcJ/bf+b6vx48fz2TCPZnUrg6ONYCrYmGCLMdxZrKYIzCWd96RXn5Z+kf/SHr99e5/X365u30Gstls31DCYD5CkLErCsdxhq5pg/kQHJu4yQiC+S0AAODiFiK7oO/7LM6HxfHOO1I2K52d7vjhh93tjiO99trUi1WpVML1j1qtVnjHotVqyTCM8O5HsIiq67o9E/Zd1w2HGgYT+rkqPZ/GCYIHZWgDAADjWYgga39/P1zpHZhrn34qfe1r/QGW1N22tCT9q38lfelL0rVrUytWJpNRsVjsmUcV9CfTNFWpVLSxsSHDMMIgq1qtKpPJqNFoSOomRAj4vj/ThY4BAADm2dwHWXEWOXz69KmePn0aPj4+PpbUzU53cnIykfKNIyjLPJVpni1Uff3wh9Ljx9Kzzw7f52c/k37wA+nXf30qRbJtW+l0ui9RxeHhYdi3UqlUTwrjIAA7nQ48qZTtAAAAl93cB1m+78s0zUjzRnZ2dvTmm2/2bX/48KGee+65CZTuYoatZ4PBFqa+vvvd0fscH0sPHiT6tr/85S8Hbi+VSgNTu3uep+3tbUm9w8uC9O4AAAAYz1yvk1WpVMJFDn3f1+rq6rlDlAbdyXrppZf0s5/9bO7WyarVaspkMqz7FMFC1dcPfyj91m+N3u/P/zzxO1n/4B/8g75gynVdZTKZvn7TaDS0vr7et93zPK2vrw+8axXM2Wq1WjJNcyZZ6AAA42OdLGB65vZOVqPR0O3bt2P9zfXr13X9+vW+7cvLy3N5cj6v5ZpXC1Ffv/mb0s2b3SQXgy4ILC1JL77Y3W9Kc7IGJafY2dkZuJ7S2eG5wWPP81Sr1cK/yWQyBFkAAABDzG2Q1Wq11Gg0wrTtzWZTUnfok2mapJDGfLp2TfqjP+pmEVxa6g20lpa6//3DP5xagGVZVl9mzuCOVHCX2HEclctl1Wo11Wq1MAPh6fTuQWbBgGEYseZLAgAAXCVzG2RZltVzAtdoNFSpVEgzjPn32mvdNO1f+5r0f//vZ9tffLEbYE05fXu9Xpdt27p586akboB0es6VaZrKZDJyHEe7u7uybTsMxIKLGc1mM/x7qZsoI876WgAAAFfJ3AZZpzmOo729PUndTGkMVcLce+21bpr2//pfpY8+kn71V6Xf+I2ppm0PmKZ57iKzZzMPDhpGOAhr1wEAAAy2EEFWNptleCAWz7Vr0j/8h7MuRSLW1tZ67lwFyS8AAADQ75lZFwDA/LMsSwcHB+Fjz/O4mwwAADDEQtzJAjBbpmlqc3NTjuOo1WqF62sBAACgH0EWgEgYsgsAABANwwUBAAAAIEEEWQAAAACQIIIsAAAAAEgQc7KASfrFL6Svf1368Y+lV16RvvUt6dlnJ/JWT58+1f3792P9zf/+3/9br7zyip55Jvr1lidPnkiSXnjhhVjvhel68uSJ/vpf/+taXl6OtH+r1VIqlZpwqTCPPvzwQ33uc5+bdTFCnU5HT5480UcffRT7+wnnW1lZmXURgCuDIAuYlN/+bem99z57/PCh9Cd/0l2k+N13E3870zT1/vvvD32+UqmEJ9GtVkuWZWltbU3/+T//53DNK8dxJEm1Wk22bct1XTWbTRUKBZmmKdd1JXVTujcaDRmGwXpZwIK7f//+ud8d03ZyciLHcfT666/r4OBAN27cmHWRACA2gixgEs4GWKe99173+QkEWsNkMhkVi0Wl0+lwW6FQkKQwSKpUKtrY2JBhGGGQVa1Wlclk1Gg0JEm5XC78e9/31el0pvYZAAAAFgVBFpC0X/xieIAVeO+97n4TGjp4mm3bSqfTPQGWJB0eHvYsKJxKpWQYhqTuYsNBEFar1cJ92u123+t/5Stf0dHR0QRKDuC0lZUVvf3227MuBgAgAoIsIGlf/3r0/f74jydbFkmlUkn1er1vu+d5PYsKn14H6/DwUNVqNdLrHx0dzdVQI+CyijvnEgAwOwRZQNJ+/ONk97uAYA7V2btYjUZDvu8PXGDY8zxJCu9qnRbM2Wq1WjJNs+dOGAAAALpI2QMk7ZVXkt3vggYlptjZ2VG5XB64v+u6PcFTEKh5nqdaraZsNqt8Pq9isTiZAgMAACw4giwgad/6VrL7XYBlWWq1Wj3bgrtR+Xy+Z1smk5HUnYMVZCH0fV++70vqBlun724ZhhEGYAAAAPgMwwWBpD37bDdN+3nJL770pakkvZCker0u27Z18+ZNSd3g6Ox8K9M0lclk5DiOdnd3Zdt2GIwFQwqbzWb4GlI3UUYQgF2E4zhqtVqq1+vK5XIMQQQAAAuPIAuYhHffHZ7GfULrZA1jmubIoX1nsw8OG0p41tm7ZMP4vi/XdZVOp3uGLwap4fP5vHzf161btwZmMAQAAFgkDBcEJuXdd6Wf/1z6F/9CevXV7n9//vOpBlhJWltb63kcJL8YxXXdcJ7X6dTwwWsEKeINw1AqlQoDLwAAgEVFkAVM0rPPdtO0/8VfdP87pSGCk2BZlg4ODsLHnueNHNrn+36YLMMwDFmWpUwmI9u2w9c8fdes1Wr1ZUIEAABYNARZACIxTVObm5tyHEeVSqVnja1hgrtYp1mWpUql0rdvoVDQ7u5uYuUFAACYFeZkAYhs0Lpao/Y/+zeGYYRZC4NshUF2w7ivDwAAMI8IsgBMVKVSCVPCt1qtcIhhq9UK08AHQwkbjYYMw5BpmnJdV77vq9Vqhenmfd/X3bt3Va/XZ/Z5AAAARiHIAjAxmUxGxWKxZ55VkPjCNE15nqdcLhc+5/u+Op2OPM9TKpUKU8sHQZbrumHABgAAMK+YkwVgImzb7ksNL0mHh4fh3SzTNNVut8N/nU5HUjepRjqdluM4PX9fq9XCRZMBAADmFUEWgIkolUra3Nzs2342jfsgQRC2t7fXs+/h4SHZBwEAwNxjuCBwSaysrOj+/fvqdDr6+OOP9fzzz2tpaWni7/vhhx/2bQsyCp4NiBqNhnzfj5Tgwvd9NRqNnjTxZx8Di85xHLVaLdXrdeVyuXPb94cffqj79+8nXoaVlZXEXxMArjqCLOCSePvttyVJJycnevDgge7du6fl5eWJv++wk75BCxXv7Oz0rIt1Hs/zel6j0WiEj4PFjYFFFiy8nc/n5fu+bt26pXa7PXT/z33uc3r//fenVTwAwAUwXBBA4izLUqvV6tnmOI4khUksRjEMI0zxLnWHDgZ3xjzPS6agwAy1Wi3VajVJ3faeSqXCwAsAsNi4kwVgIur1umzb1s2bNyV1TyKr1WrkvzdNU7dv31apVAoXQt7Z2VGlUokcqAHzzLKsnjuyrVaLOYcAcEkQZAGYCNM0VSwWL/QaZ4cWxgnSZsn3fe3v76tarYZ3KkYplUrhnTvf97W1tRX5+Vwup83NTZmm2XP3Txo8bHNakq6H4PUkqdlsyvM87e7u9n3mQCaTify+436GUcctqkKhoN3d3YsUFQAwRwiyACBBjUZDh4eH4ULKUZRKJUnqWQ+sUCiEQeao5xuNRjgc87RsNptoYHp2ntx5JlEPtm3Ltu2wDIVCQblcbmDw4zhOmIBlXKM+Q5Tj9vjx476/u3nzZk8w5jiOMplMpIQwAIAF0bnEjo6OOpI6R0dHsy5Kj08++aTz7rvvdj755JNZF2UhUF/xTLu+vvjFL07lfRZNtVrtpNPpSPsahtFpt9s9205/PY96vlgs9r1muVyOXtiI8vl87L9Jsh4sy+r5rMVisWMYRt/rtNvtTrlc7iT1EzfsM4wqbxS1Wq1Tq9U6nU6nU6/XO81mc+i+V6WvffLJJ53vfOc7c/n7DQBRkfgCAGbI8zz5vj9wyJvruiOfl9R3B8R1Xd2+fbvnseM4qlQq4Tbf97W+vp7Mh0hAlM9Zq9V67gAdHBwMzDK5v7+vjY2NiZVVilbeKK+Ry+WUy+W0urqq9fX1mQ7vBAAkhyALAGZoWKZEwzDk+/7I56XeeVee58nzvJ5MjKlUSul0umeOnOu6SqVSCX2Ki4vyOU9zHEe+7/fNY5pWev+45R3ENE212+3wX6fTSbCEAIBZYk4WgEupUChE2m99fX0usxWmUim1Wq2hSR2C588qFos9CUM8z5NlWSqVSj2Z62q1mjKZTOLlTtrZzxkkovB9X7lcrq9+fN+XaZqRA52kDTsuAICrhSALQOKWlpam8j7nXfmPuujxeRzH0d7e3sj9tre3E0+9PepEfdDzg9ZYCu7q7O3t9dzJOjw8VC6XG/r6g4LUILHDaZNO2HD2cxqGEQbFlUpFq6urevTokQzDiJ3e33Ec2batZrM5sfICAK4mgiwAibssw56y2ezEM74Nm4MT3JEZ9fxp5XJZa2trA/dtNBo9w+jOPj5rUJB6OnNe0kZ9Tt/3tbOzo+3t7fDulWVZ8n1fruuG66rFfc9xg+M4xwUAcPUQZAG4lBZluGCwttWg9OhBEDTq+YDrugOTWZz920ajET6e1hymUUbVQ6PRUKlUUqFQ6FmXSurWT6vVUqPRCJNOBHengsWsBwXL6XR67BT3UY4bAODqIsgCMPccx1Gr1VK9Xlcul4t0EjupOy5RDRs25nmeHMfpyZK3vb0t13XDYM9xnJ7Ab9Tzp1970F0UwzB65i7t7e31JMaYpKTqIZ1Oa2trq+fzBZ8jaA9n79RVKpWxFweO8hmiHhcAwNVDdkEAE+f7vhzHGeuEPphnlM/nVSwWz51HNA88z1OpVFK5XFaj0ZBt2z0LBbuu2xcAbm1thXXkOI4ODg569hn1fMA0zYEZA4OhdKVSSY7jaHNzU5Jiz2GKYxL1sL29rVKpFP7zfV/f+973+t7bcRzt7OxI6i5gPO6ixKM+Q9TjAgC4epY6l2XyxADHx8daWVnR0dGRbty4MevihE5OTvTgwQPdu3dPy8vLsy7O3KO+4pl2fd2/f1/vv//+0Odd15Xv+7IsS4eHh6pWq7FORF3X7fmbtbU1VavVxBNNYLRJzsnCaKP62mVxcnIix3H0+uuvz93vNwBExZ0sABPj+75qtZqy2awMw5BlWcpkMrJtO/JrWJbVc2LfarUIsGYkznEDAOAqI8gCLqt/82+kL3xB+pf/UvrFL2ZSBNd1+4ZqWZalSqUy1usVCoW+xWcxPWTNAwAgGoIs4LJ5/fXuf3d3pYcPpT/5E+m556Tf/u2pFyWbzaper/dsMwxDvu/HXizWcZyJr8kEAACQBLILApfJb/92N7D6J/+k/7n33us+/+67Uy1SpVIJkzG0Wq0wA1yr1Qoz3gXJBGq1WpiooNlsqlAoyDRNua4bDjdsNBoyDIO7KgAAYG4RZAGXxS9+0Q2knn12+D7vvdfd77x9EpTJZFQsFnvmUAXrVwVBUqVS0cbGhgzDCIOsarWqTCYTZhY8nVHQ9/1Ls9gxAAC4nAiygMvi61+Pvt8f//Fky6JukoR0Ot2XpOLw8LBnPaNUKhXe0fI8LwzCarVauE+73Z54eQEAAJJCkAVcFj/+cbL7XVCpVOqbjyV1A6nt7e3w8ek5VkGKdwAAgEVGkAVcFq+80p2PFWW/CQsyCp69i9VoNOT7/sDkFcFCxcFdrdOCOVutVkumafbcCQMAAJg3ZBcELotvfSvZ/S5oUGKKnZ2doYvZuq7bEzwFgZrneeFaW/l8XsVicTIFBgAASAhBFnBZPPus9KUvnb/Pl740laQXlmWp1Wr1bAvuRuXz+Z5tmUxGUncOVpCF8HSK9yCzYMAwjL61twAAAOYJwwWBy+Tdd6VTmfh6fOlLU03fXq/XZdu2bt68KakbHJ2db2WapjKZjBzH0e7urmzbDoOxYEhhs9kMX0PqJsqIu8YWAADANBFkAZfNd74jPXggvfGG9L/+V3cO1re+NbW07QHTNEcO7TubfXDYUMKzzt4lAwAAmCcEWcBl9Qd/IC0vz7oUF7a2ttZz5ypIfgEAADCvmJMFYK5ZlqWDg4Pwsed5ZBcEAABzjTtZAOaaaZra3NyU4zhqtVo9a2wBAADMI4IsAHNv0LpaAAAA84rhggAAAACQIIIsAAAAAEgQQRYAAAAAJGju52SVSiVJ3QVJpejr6ACYjpWVFd2/f3/WxRhbp9PRxx9/rOeff15LS0uzLs7co77iSbK+VlZWEioVAGDS5jrIsm27ZzHTQqGgTCajWq02w1IBOO3tt9+edREu5OTkRA8ePNC9e/e0fAnWFZs06ise6gsArqa5HS7o+74ajUbPIqSFQkGu68rzvNkVDAAAAADOMdd3sg4PD+V5ntLptKTuejmSegKv054+faqnT5+Gj4+PjyV1rySenJxMtrAxBGWZpzLNM+orHuorHuorHuorHuorPuoKwGWw1Ol0OrMuRFSO4yiXy6ndbsswjL7nf//3f19vvvlm3/bvfOc7eu6556ZQQgAAcFE///nP9frrr+vo6Eg3btyYdXEAILaFCrLW19dVKBSUz+cHPj/oTtZLL72kn/3sZ3P1JX1ycqJaraZMJsMY/Qior3ior3ior3ior3ior/hOTk703nvvEWQBWGhzPVzwNNu2tbm5OTTAkqTr16/r+vXrfduXl5fn8sdtXss1r6iveKiveKiveKiveKgvALhaFiLIchxHa2tr5wZYAAAAADAP5ja7YMB1XUkKAyzf98kuCAAAAGBuzXWQ1Wg01Gg0lE6n5XmePM9TpVJRKpWaddEAAAAAYKC5HS7o+77u3r0r3/dl23bPc1tbWzMqFQAAAACcb26DLMMw1G63Z10MAAAAAIhlboOsJATZ6YNFiefFycmJfv7zn+v4+JhsUxFQX/FQX/FQX/FQX/FQX/EFdSZ99jsOAIvmUgdZT548kSS99NJLMy4JAACI68mTJ1pZWZl1MQAgtoVajDiuX/7yl/rpT3+qF154QUtLS7MuTihYJPknP/kJiyxGQH3FQ33FQ33FQ33FQ33FF9TZ//yf/1N/5+/8HT3zzFzn6AKAgS71naxnnnlGL7744qyLMdSNGzf40Y2B+oqH+oqH+oqH+oqH+orvc5/7HAEWgIXFtxcAAAAAJIggCwAAAAASRJA1A9evX9e//bf/VtevX591URYC9RUP9RUP9RUP9RUP9RUfdQbgMrjUiS8AAAAAYNq4kwUAAAAACSLIAgAAAIAEEWQBAAAAQIIIsgAAAAAgQZd6MeJ5VCqVZBiGJMn3fW1tbc22QHPM933t7++rWq2qVqvNujgLoVQqSZKazaYkqVwuz7I4cy1oX1K3vjzP0+7ubtg/cb5MJkO/PIfruiqXy8pkMjJNU7VaTXfu3FE2m5110eaabdtaW1uTJKVSKeoLwMIiyJqi4AQ4n89L6v4IFwoFToQHaDQaOjw8lO/7arVasy7OQrBtW8ViMXxcKBQ4ET6HbduybVumaUrq1lcul6O+InAcR67rzroYc833fbmuK8dxZJqmbNsmYDiH7/u6e/euvve978kwDDUaDa2vr4sEyAAWFcMFp2hnZycMsCTJsixVKpUZlmh+pdNp5fP58AQY5/N9X41GQ77vh9sKhYJc15XnebMr2BzzPE+O44SP19bWdHh4OMMSLQYufET36NEjdTodNZvNnu9+9LNtW5ubm+Gd5HQ6zQUPAAuNIGtKPM+T7/sDhyJxRRhJODw87AmoggD1dOCFz9RqtZ7hugcHB7Isa4YlWgz7+/va2NiYdTFwyVQqFWWzWXmeF/4m0h8BLDKGC07JsLsJhmFwEowLMwxD7Xa7Z1twosLdwNEcx5Hv+6pWq7MuylxzXZcT3xj29/eVSqXUarXUbDZ7hvPiM8HvY6PRkGmaMk0zHL5LewOwqAiyZiz4AQaStrOzo3K5TCKHcwTJL3zfVy6Xo65G8H1fpmlyYSiCdDot6bOLHJVKRblcjkB+gCDIMgwjrLdisahbt271XTwCgEXBcMEZI8DCJATzG5gHcj7DMJTP58Nhg6urqwQQQwTDuRBNcEcmsLGxEd4xxWC3b98O/z8Y5cFwegCLiiBrSoYN2QquDANJcRxHa2trLA9wDt/3Zdt2zwmvZVmc1A3RaDR6ToAx2umkKpLCu6Qkouk37DfQMAzqC8DCIsiaEtM0h/5gMOYcSQkChOAOlu/7nKQM4HmeSqVSz53kIOBiyGC/Vqsl13VVKpVUKpVk27ak7rIUZ4MJKBx+errvBe2Li2r9grt+Z7+rfN8nuAewsJiTNUXb29tyXTc8AXYch+FcIzCcMrpGo6FGoxFm6JJoY8Ok02ltbW31nPDu7e0pnU5z0WMAy7J66qXRaKhSqXC3dAjDMPraVzDckiB+sGKxGPZBqfvdZVlW+BgAFs1Sh5X+pqpUKoU/vAcHB2SbGiJYw2hvb0+NRkNbW1u6c+cOc0KG8H1ft27dGjjfgy4+mO/7PevUBdnfOAk+X9AvHcfR1taWMpkMgekAZ9vX48eP+b4foVKphN9h1BeARUeQBQAAAAAJYk4WAAAAACSIIAsAAAAAEkSQBQAAAAAJIsgCAAAAgAQRZAEAAABAggiyAAAAACBBBFkAYvE8T7lcTqurq1pdXVUulwsXPz4tk8nItu2hr1MqlbS0tDSxcq6vr6tQKEzs9QEAAIYhyAIQmeu6Wl9f1507d1Sv11Wv12WaptbX1+W6bqzXsixL5XJ5QiWVtre3lcvlJvb6pzmOo0wmM5X3AgAA8+9XZl0AAIvB931lMhlVq1Vls9lwe7FY1NramnK5nB49eiTDMCK9XjqdVjqdvnC5XNdVoVBQs9ns2X66jJNi27YqlYpSqdTE3wsAACwO7mQBiMS2baXT6YHBSz6fVyqV0s7OzgxKNjvFYlHtdvvcYZEAAODqIcgCEInrurIsa+jz2Wy2b8ig7/sqFApaXV3V2tqaHMfpeb2zc7JO71upVHqeK5VKWltb09LSUjg8MZfLKZPJyPM8LS0taWlpSb7vS+qdE1YoFPqGDjYajZ73P++9AQAA4iDIAhCJ53m6c+fO0OfX1tbUaDR6tu3v76tQKOjRo0fKZrNDk2RICp979OiRarWabNsOX69QKGhvb0/ValXtdlvFYlG+76taraparco0TXU6HXU6nYHDFXO5XE+AJ0nlcjm8K3feewMAAMTFnCwAkbVaraHPBXeQTsvn8+G8q2KxKMdxVC6XVSwWe/bzPE+O46jdbsswDBmGoWKxqL29PZmmqUqlomazKdM0JencO2qDWJYlwzDkOE4YWO3v72t3d/fc905izhgAALh6CLIARGKaZl9yidNOB0HDWJY18E5WcNfo1q1bPdtv374t13VlGMbI1x5lY2NDe3t7ymazajQa8n1f2Ww2vMM16L0BAADGwXBBAJFYltU35O60/f392HeYTkun02q32z3/arXa2K93VqFQCMsfBFvTem8AAHC1EGQBiKRYLMrzPJVKpb7nbNuW7/t9wwDPcl134LyudDod3l0a9Jzv+0PnckWVTqdlGIZc15XjOOFCxee9NwAAwDgIsgBEYhiGqtWqbNuWbdvyPE+e56lQKKhUKqlWq/UlnahUKmEAUygU5Hme8vl832ubpql8Pt+TGMNxHJVKpb7nfN+X4zhh5kDTNMPtruueG4zl8/kwWAzuup333gAAAOMgyAIQWTabVbPZlOd5Wl9f1/r6ulqtlprNZt9QQdM0tbGxoZ2dHa2ururw8FD1en3oYsXlclnpdFrr6+taXV1VuVwOXzP4/0wmEz63ubkp6bNFjW/dujXyTtrm5qZc1+0L9M577/NUKhUtLS2FAeTS0pLW1tZG/h0AALjcljqdTmfWhQBw9biuq0wmI76CAADAZcOdLAAAAABIEEEWgKlyXTdcSJh1qAAAwGVEkAVgqsrlslZXV+W6rnZ3d2ddHAAAgMQxJwsAAAAAEsSdLAAAAABIEEEWAAAAACSIIAsAAAAAEkSQBQAAAAAJIsgCAAAAgAQRZAEAAABAggiyAAAAACBBBFkAAAAAkCCCLAAAAABI0P8PkkIYipLFumsAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -39564,14 +5552,35 @@ }, { "cell_type": "code", - "execution_count": 141, + "execution_count": null, "id": "62135a2c", "metadata": {}, "outputs": [], "source": [ "eq_compl_5 = epde_search_obj.get_equations_by_complexity(5)[0]\n", - "eq_compl_3_5 = epde_search_obj.get_equations_by_complexity(3.5)[0]\n", - "eq_compl_7 = epde_search_obj.get_equations_by_complexity(7)[0]" + "eq_compl_4 = epde_search_obj.get_equations_by_complexity(4)[0]\n", + "eq_compl_10 = epde_search_obj.get_equations_by_complexity(10)[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "243b7813", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 48, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eq_compl_4" ] }, { @@ -39586,129 +5595,82 @@ }, { "cell_type": "code", - "execution_count": 147, - "id": "a556d63f", + "execution_count": null, + "id": "43f92e40", "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from epde.interface.solver_integration import BoundaryConditions, BOPElement\n", + "\n", + "def get_ode_bop(key, var, term, grid_loc, value):\n", + " bop = BOPElement(axis = 0, key = key, term = term, power = 1, var = var)\n", + " bop_grd_np = np.array([[grid_loc,]])\n", + " bop.set_grid(torch.from_numpy(bop_grd_np).type(torch.FloatTensor))\n", + " bop.values = torch.from_numpy(np.array([[value,]])).float()\n", + " return bop" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a556d63f", + "metadata": { + "scrolled": true + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Using explicitly sent system of equations.\n", - "dimensionality is 1\n", "grid.shape is (320,)\n", - "Shape of the grid for solver torch.Size([320, 1])\n", - "Grid is torch.Size([320, 1])\n", - "torch.Size([1])\n", - "[2023-10-27 18:55:25.204678] initial (min) loss is 114.25733184814453\n", - "[2023-10-27 18:55:25.338582] Print every 1000 step\n", - "Step = 0 loss = 114.257332 normalized loss line= -0.000000x+1.000000. There was 1 stop dings already.\n", - "[2023-10-27 18:57:06.004703] Print every 1000 step\n", - "Step = 1000 loss = 0.000560 normalized loss line= -0.002775x+1.563721. There was 1 stop dings already.\n", - "[2023-10-27 18:57:40.499169] No improvement in 100 steps\n", - "Step = 1356 loss = 0.000065 normalized loss line= 0.060959x+4.262471. There was 1 stop dings already.\n", - "[2023-10-27 18:57:50.585647] No improvement in 100 steps\n", - "Step = 1456 loss = 0.000622 normalized loss line= 0.017430x+-0.053238. There was 2 stop dings already.\n", - "[2023-10-27 18:58:00.498611] No improvement in 100 steps\n", - "Step = 1556 loss = 0.174487 normalized loss line= 0.000057x+-0.000060. There was 3 stop dings already.\n", - "[2023-10-27 18:58:17.368791] No improvement in 100 steps\n", - "Step = 1731 loss = 0.195137 normalized loss line= -0.000015x+0.002732. There was 4 stop dings already.\n", - "[2023-10-27 18:58:36.462726] No improvement in 100 steps\n", - "Step = 1930 loss = 0.000100 normalized loss line= 0.476587x+-7.774933. There was 5 stop dings already.\n", - "[2023-10-27 18:58:43.472264] Print every 1000 step\n", - "Step = 2000 loss = 0.000019 normalized loss line= 2.565460x+-41.852368. There was 6 stop dings already.\n", - "[2023-10-27 18:58:51.098915] No improvement in 100 steps\n", - "Step = 2078 loss = 0.001526 normalized loss line= -0.006493x+0.472462. There was 6 stop dings already.\n", - "[2023-10-27 18:59:26.061702] No improvement in 100 steps\n", - "Step = 2439 loss = 0.001744 normalized loss line= 0.161112x+-4.935905. There was 7 stop dings already.\n", - "[2023-10-27 18:59:44.465505] No improvement in 100 steps\n", - "Step = 2630 loss = 0.068704 normalized loss line= 0.000317x+-0.004612. There was 8 stop dings already.\n", - "[2023-10-27 19:00:05.606421] No improvement in 100 steps\n", - "Step = 2845 loss = 0.120123 normalized loss line= 0.000299x+-0.008078. There was 9 stop dings already.\n", - "[2023-10-27 19:00:15.466881] No improvement in 100 steps\n", - "Step = 2945 loss = 0.000015 normalized loss line= -1.213092x+1979.490185. There was 10 stop dings already.\n", - "[2023-10-27 19:00:21.108867] Print every 1000 step\n", - "Step = 3000 loss = 0.000005 normalized loss line= -4.076946x+6652.649545. There was 11 stop dings already.\n", - "[2023-10-27 19:00:40.201630] No improvement in 100 steps\n", - "Step = 3167 loss = 0.001589 normalized loss line= 0.012293x+-0.385413. There was 11 stop dings already.\n", + "target_form shape is torch.Size([320, 1])\n", + "[2024-04-10 13:54:00.819254] initial (min) loss is 157.35760498046875\n", + "[2024-04-10 13:55:28.529477] No improvement in 1000 steps\n", + "[2024-04-10 13:55:28.529590] Step = 6462 loss = 0.091874 normalized loss line= -0.000095x+0.885530. There was 1 stop dings already.\n", + "Step = 10000 loss = 0.071435.\n", + "Step = 20000 loss = 0.012310.\n", + "[2024-04-10 13:58:51.330534] No improvement in 1000 steps\n", + "[2024-04-10 13:58:51.330632] Step = 21213 loss = 0.009663 normalized loss line= 0.000176x+1.206889. There was 2 stop dings already.\n", + "[2024-04-10 13:59:33.872058] No improvement in 1000 steps\n", + "[2024-04-10 13:59:33.872400] Step = 24264 loss = 0.008175 normalized loss line= 0.004258x+1.090927. There was 3 stop dings already.\n", + "[2024-04-10 13:59:51.966097] No improvement in 1000 steps\n", + "[2024-04-10 13:59:51.966425] Step = 25590 loss = 0.011441 normalized loss line= -0.000267x+0.801283. There was 4 stop dings already.\n", + "[2024-04-10 14:00:05.187113] No improvement in 1000 steps\n", + "[2024-04-10 14:00:05.187217] Step = 26590 loss = 0.007728 normalized loss line= 0.000266x+1.068560. There was 5 stop dings already.\n", "Using explicitly sent system of equations.\n", - "dimensionality is 1\n", "grid.shape is (320,)\n", - "Shape of the grid for solver torch.Size([320, 1])\n", - "Grid is torch.Size([320, 1])\n", - "torch.Size([1])\n", - "[2023-10-27 19:00:40.696888] initial (min) loss is 196.3807830810547\n", - "[2023-10-27 19:00:40.807497] Print every 1000 step\n", - "Step = 0 loss = 196.380783 normalized loss line= 0.000000x+1.000000. There was 1 stop dings already.\n", - "[2023-10-27 19:01:50.616373] Print every 1000 step\n", - "Step = 1000 loss = 0.000165 normalized loss line= -0.473463x+34.612788. There was 1 stop dings already.\n", - "[2023-10-27 19:02:09.853673] No improvement in 100 steps\n", - "Step = 1280 loss = 0.000517 normalized loss line= 0.011158x+-0.099722. There was 1 stop dings already.\n", - "[2023-10-27 19:02:42.467247] No improvement in 100 steps\n", - "Step = 1771 loss = 0.000380 normalized loss line= 6.184229x+-198.458141. There was 2 stop dings already.\n", - "[2023-10-27 19:02:57.458969] Print every 1000 step\n", - "Step = 2000 loss = 0.000049 normalized loss line= -0.002891x+1.379557. There was 3 stop dings already.\n", - "[2023-10-27 19:03:24.892572] No improvement in 100 steps\n", - "Step = 2389 loss = 0.000172 normalized loss line= 0.000041x+0.206047. There was 3 stop dings already.\n", - "[2023-10-27 19:04:07.555050] Print every 1000 step\n", - "Step = 3000 loss = 0.000020 normalized loss line= -0.000393x+1.070815. There was 4 stop dings already.\n", - "[2023-10-27 19:04:14.489689] No improvement in 100 steps\n", - "Step = 3098 loss = 0.000295 normalized loss line= -0.000023x+0.069003. There was 4 stop dings already.\n", - "[2023-10-27 19:04:46.576217] No improvement in 100 steps\n", - "Step = 3576 loss = 0.000361 normalized loss line= 0.000791x+0.022618. There was 5 stop dings already.\n", - "[2023-10-27 19:05:14.095773] Print every 1000 step\n", - "Step = 4000 loss = 0.503594 normalized loss line= -0.000000x+0.000031. There was 6 stop dings already.\n", - "[2023-10-27 19:05:18.452670] No improvement in 100 steps\n", - "Step = 4062 loss = 0.000275 normalized loss line= 4.603574x+-146.850933. There was 6 stop dings already.\n", - "[2023-10-27 19:05:53.898385] No improvement in 100 steps\n", - "Step = 4599 loss = 0.000158 normalized loss line= -0.000026x+0.082744. There was 7 stop dings already.\n", - "[2023-10-27 19:06:21.865832] Print every 1000 step\n", - "Step = 5000 loss = 0.000011 normalized loss line= -0.000432x+1.073953. There was 8 stop dings already.\n", - "[2023-10-27 19:06:28.546415] No improvement in 100 steps\n", - "Step = 5092 loss = 0.000226 normalized loss line= -0.000020x+0.048638. There was 8 stop dings already.\n", - "[2023-10-27 19:06:56.960076] No improvement in 100 steps\n", - "Step = 5514 loss = 0.000100 normalized loss line= 1.780045x+116.174507. There was 9 stop dings already.\n", - "[2023-10-27 19:07:25.613212] No improvement in 100 steps\n", - "Step = 5957 loss = 0.000519 normalized loss line= 0.077216x+-2.463396. There was 10 stop dings already.\n", - "[2023-10-27 19:07:28.443307] Print every 1000 step\n", - "Step = 6000 loss = 0.000012 normalized loss line= 3.245062x+-103.526684. There was 11 stop dings already.\n", - "[2023-10-27 19:07:49.922269] No improvement in 100 steps\n", - "Step = 6351 loss = 0.025929 normalized loss line= 0.022275x+-0.682806. There was 11 stop dings already.\n", + "target_form shape is torch.Size([320, 1])\n", + "[2024-04-10 14:00:05.251571] initial (min) loss is 176.67147827148438\n", + "Step = 10000 loss = 0.002977.\n", + "[2024-04-10 14:02:46.933082] No improvement in 1000 steps\n", + "[2024-04-10 14:02:46.933183] Step = 15253 loss = 0.002593 normalized loss line= -0.003300x+0.719203. There was 1 stop dings already.\n", + "[2024-04-10 14:02:58.608506] No improvement in 1000 steps\n", + "[2024-04-10 14:02:58.608857] Step = 16347 loss = 0.000767 normalized loss line= 0.007766x+4.217888. There was 2 stop dings already.\n", + "[2024-04-10 14:03:09.341210] No improvement in 1000 steps\n", + "[2024-04-10 14:03:09.341314] Step = 17347 loss = 0.001025 normalized loss line= -0.001974x+1.192381. There was 3 stop dings already.\n", + "[2024-04-10 14:03:21.366159] No improvement in 1000 steps\n", + "[2024-04-10 14:03:21.366516] Step = 18491 loss = 0.002860 normalized loss line= -0.202613x+39.093391. There was 4 stop dings already.\n", + "[2024-04-10 14:03:32.065374] No improvement in 1000 steps\n", + "[2024-04-10 14:03:32.065711] Step = 19491 loss = 0.001384 normalized loss line= -0.000223x+1.318539. There was 5 stop dings already.\n", "Using explicitly sent system of equations.\n", - "dimensionality is 1\n", "grid.shape is (320,)\n", - "Shape of the grid for solver torch.Size([320, 1])\n", - "Grid is torch.Size([320, 1])\n", - "torch.Size([1])\n", - "[2023-10-27 19:07:50.339617] initial (min) loss is 223.90512084960938\n", - "[2023-10-27 19:07:50.525267] Print every 1000 step\n", - "Step = 0 loss = 223.905121 normalized loss line= -0.000000x+1.000000. There was 1 stop dings already.\n", - "[2023-10-27 19:10:12.435790] Print every 1000 step\n", - "Step = 1000 loss = 0.000113 normalized loss line= -0.003690x+0.832294. There was 1 stop dings already.\n", - "[2023-10-27 19:10:39.247605] No improvement in 100 steps\n", - "Step = 1203 loss = 0.000278 normalized loss line= -4.719180x+555.937133. There was 1 stop dings already.\n", - "[2023-10-27 19:11:19.715492] No improvement in 100 steps\n", - "Step = 1509 loss = 0.000036 normalized loss line= -0.126788x+32.126369. There was 2 stop dings already.\n", - "[2023-10-27 19:11:45.376907] No improvement in 100 steps\n", - "Step = 1709 loss = 0.000870 normalized loss line= -0.264556x+112.970188. There was 3 stop dings already.\n", - "[2023-10-27 19:11:57.343735] No improvement in 100 steps\n", - "Step = 1809 loss = 0.000021 normalized loss line= -0.461683x+33.675682. There was 4 stop dings already.\n", - "[2023-10-27 19:12:09.883817] No improvement in 100 steps\n", - "Step = 1909 loss = 0.000964 normalized loss line= -0.000103x+0.224012. There was 5 stop dings already.\n", - "[2023-10-27 19:12:21.090062] Print every 1000 step\n", - "Step = 2000 loss = 0.000263 normalized loss line= -0.000378x+0.820198. There was 6 stop dings already.\n", - "[2023-10-27 19:12:22.330148] No improvement in 100 steps\n", - "Step = 2009 loss = 0.000816 normalized loss line= 0.024140x+-0.199091. There was 6 stop dings already.\n", - "[2023-10-27 19:12:50.746214] No improvement in 100 steps\n", - "Step = 2247 loss = 0.000360 normalized loss line= 7.732536x+-208.933284. There was 7 stop dings already.\n", - "[2023-10-27 19:13:02.919363] No improvement in 100 steps\n", - "Step = 2347 loss = 0.000847 normalized loss line= -0.382231x+26.921681. There was 8 stop dings already.\n", - "[2023-10-27 19:13:27.492650] No improvement in 100 steps\n", - "Step = 2537 loss = 0.000111 normalized loss line= 0.017029x+0.788984. There was 9 stop dings already.\n", - "[2023-10-27 19:13:46.896315] No improvement in 100 steps\n", - "Step = 2685 loss = 0.387056 normalized loss line= -0.000001x+0.000524. There was 10 stop dings already.\n", - "[2023-10-27 19:13:59.221020] No improvement in 100 steps\n", - "Step = 2785 loss = 0.000017 normalized loss line= 78.509820x+-1951.388910. There was 11 stop dings already.\n" + "target_form shape is torch.Size([320, 1])\n", + "[2024-04-10 14:03:32.156476] initial (min) loss is 172.03207397460938\n", + "Step = 10000 loss = 0.063868.\n", + "[2024-04-10 14:07:53.064450] No improvement in 1000 steps\n", + "[2024-04-10 14:07:53.064809] Step = 13566 loss = 0.064630 normalized loss line= -0.000104x+1.016804. There was 1 stop dings already.\n", + "Step = 20000 loss = 0.036300.\n", + "[2024-04-10 14:12:10.229525] No improvement in 1000 steps\n", + "[2024-04-10 14:12:10.229875] Step = 26977 loss = 0.011484 normalized loss line= -0.000380x+0.821335. There was 2 stop dings already.\n", + "[2024-04-10 14:12:29.904401] No improvement in 1000 steps\n", + "[2024-04-10 14:12:29.904725] Step = 27977 loss = 0.007514 normalized loss line= 0.005792x+1.068029. There was 3 stop dings already.\n", + "[2024-04-10 14:13:01.846003] No improvement in 1000 steps\n", + "[2024-04-10 14:13:01.846326] Step = 29542 loss = 0.009504 normalized loss line= -0.000994x+0.962567. There was 4 stop dings already.\n", + "Step = 30000 loss = 0.009400.\n", + "[2024-04-10 14:13:21.799038] No improvement in 1000 steps\n", + "[2024-04-10 14:13:21.799402] Step = 30542 loss = 0.007162 normalized loss line= -0.001932x+1.253448. There was 5 stop dings already.\n" ] } ], @@ -39718,23 +5680,29 @@ "\n", "#get solution for equation with complexity 5\n", "pred_u_compl_5 = epde_search_obj.predict(system=eq_compl_5, boundary_conditions=[bop_u(), bop_dudt()], \n", - " grid = [t_test,], strategy='autograd')\n", + " grid = [t_test,], mode='NN', use_fourier = True, fft_params = {'L' : [4,], \n", + " 'M' : [3,]},\n", + " compiling_params = {'tol' : 0.005})\n", "pred_u_compl_5 = pred_u_compl_5.reshape(pred_u_compl_5.size)\n", "\n", - "#get solution for equation with complexity 3.5\n", - "pred_u_compl_3_5 = epde_search_obj.predict(system=eq_compl_3_5, boundary_conditions=[bop_u(), bop_dudt()], \n", - " grid = [t_test,], strategy='autograd')\n", - "pred_u_compl_3_5 = pred_u_compl_3_5.reshape(pred_u_compl_3_5.size)\n", + "#get solution for equation with complexity 4\n", + "pred_u_compl_4 = epde_search_obj.predict(system=eq_compl_4, boundary_conditions=[bop_u(), bop_dudt()], \n", + " grid = [t_test,], mode='NN', use_fourier = True, fft_params = {'L' : [4,], \n", + " 'M' : [3,]},\n", + " compiling_params = {'tol' : 0.005})\n", + "pred_u_compl_4 = pred_u_compl_4.reshape(pred_u_compl_4.size)\n", "\n", - "#get solution for equation with complexity 7\n", - "pred_u_compl_7 = epde_search_obj.predict(system=eq_compl_7, boundary_conditions=[bop_u(), bop_dudt()], \n", - " grid = [t_test,], strategy='autograd')\n", - "pred_u_compl_7 = pred_u_compl_7.reshape(pred_u_compl_7.size)" + "#get solution for equation with complexity 10\n", + "pred_u_compl_10 = epde_search_obj.predict(system=eq_compl_10, boundary_conditions=[bop_u(), bop_dudt()], \n", + " grid = [t_test,], mode='NN', use_fourier = True, fft_params = {'L' : [4,], \n", + " 'M' : [3,]},\n", + " compiling_params = {'tol' : 0.005})\n", + "pred_u_compl_10 = pred_u_compl_10.reshape(pred_u_compl_10.size)" ] }, { "cell_type": "code", - "execution_count": 154, + "execution_count": null, "id": "db10489a", "metadata": {}, "outputs": [ @@ -39742,14 +5710,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "MAPE on the test dataset for eq with $C = 3.5$ is 1.0291776644268698\n", - "MAPE on the test dataset for eq with $C = 5$ is 0.1408898496016532\n", - "MAPE on the test dataset for eq with $C = 7$ is 0.45236988834056024\n" + "MAPE on the test dataset for eq with $C = 10$ is 0.10144529768587558\n", + "MAPE on the test dataset for eq with $C = 5$ is 0.05921013441446753\n", + "MAPE on the test dataset for eq with $C = 4$ is 0.9145161732228347\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAi8AAAGeCAYAAABcquEJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAADODklEQVR4nOydd3gcV7m439mi3iWr2JKt4h5XyY6TkIQQLCckAQLYhMSQwCWR4NIuAWwMt+VSjA3ce+HCj9gJEByTxLaAAMGESGlOdWzJvUuraklWXfWyZX5/nJ3VyipW2dW28z7PPivNzsx+Z+fMme985yuKqqoqEolEIpFIJH6CztsCSCQSiUQikUwGqbxIJBKJRCLxK6TyIpFIJBKJxK+QyotEIpFIJBK/QiovEolEIpFI/AqpvEgkEolEIvErpPIikUgkEonEr5DKi0QikUgkEr/C4G0B3I3dbqe+vp7o6GgURfG2OBKJRCKRSCaAqqp0dXUxe/ZsdLrxbSsBp7zU19eTkZHhbTEkEolEIpFMgdraWtLT08fdJ+CUl+joaEA0PiYmxq3ntlgsvPTSS2zYsAGj0ejWc/sCgd4+kG0MBAK9fRD4bQz09kHgt9ET7evs7CQjI8P5HB+PgFNetKWimJgYjygvERERxMTEBGxnDOT2gWxjIBDo7YPAb2Ogtw8Cv42ebN9EXD6kw65EIpFIJBK/QiovEolEIpFI/AqpvEgkEolEIvErpPIikUgkEonEr5DKi0QikUgkEr9CKi8SiUQikUj8Cqm8SCQSiUQi8Suk8iKRSCQSicSvkMqLRCKRSCQSv0IqLxKJRCKRSPwKqbxIJBKJRCLxK2akttHOnTsBqKioAGDXrl0TOiYuLg4As9nMli1bPCafRCKRSCQS/8HjysvWrVvZsWOH8//CwkLy8/MpLi4e8xhN2SkoKACgpKSEwsLCCSk9EolEIpFIJo5dtXOpuZam7jYiQ8JYnJxFREgYly/DiRNgscCKFZCV5W1Jh/Co8mI2mykrK8NsNjutKIWFheTl5WEymcjOzh71uO3bt1NZWen8f/369eTn50vlRSKR+BVWKxhmxL4tkUweu2qn6EQJvzvyAnUdTc7tekWPofr9HH/641jMSc7t9y84ys/Dt5D0652wcqU3RHbi8dvq6NGjmEwmcnNzAZwKi9lsHnV/k8k0TNlxpaSkhPXr13tKVIlEIpkSnf3d7D9ezKvlRzG1NGLpDae7Zh71h24nvHU1t9yi4ytfgfe/39uSSiSCzv5uvvXXn/Fu9SkAIoxhZMSnUtfUSY/ahm3uK3zsvhd49EADP5/1K/aVr+GGS3tI4lVe/uzT3HDYu8qLRx124+LiaG9vdyouIBQQYEyri8lkGvNcYyk8EolE4i1eqyjl7ie/xv+9uY/TjRX0WntYcOUie0p+wt3524j6+Peo+0cxtttu59/vOkpnp7cllgQ7nf09fOHAD3m3+hRhhhC+9YEHefmLj7Om8Ue8863/R/kvHyO0az4fPX2edW1H+VHWFlqLy/hc+D4Alp15jn+59QTGk1VQXe2VNsy4QXP79u3s2rVrVMvKeCQkJNDW1jZi+8DAAAMDA87/Ox0jg8ViwWKxTEvWq9HO5+7z+gqB3j6QbQwEfKl9zx77Bz89tBcAQ3c6lQfvpq82h+8nfIfrK5q491gtZz9yjn9ZsZ/b3z7Lqb8/zU035fGPf1hJTh77vL7URk8Q6O0D322jzW7n0ed/ypkrJuLCo/nVx77Ngllz+eMfFbZuhblU8+0PdvDZO+7C9rOfA5Dx4qvwYh6q4xyzaOaJsrVQBvz7v2AZHHSLbJP5rRRVVdVr7+Yetm7dSmJi4riRQyUlJeTn53O1WPHx8ezYscPpxKvxn//5nzz22GMjzvPMM88QERHhHsElEonkKo53VvLHK+8CEFr5Ppoev4tUXQv3P3CBL/z5q4R2dNAfGcn/fuIGvnjgELF9Fpr1CdxhKyYtpYuHv12JLivey62QBBuvtp7i1bbThCgGHs5YT2poPPX1kXzjG++nr8+IiuLcVwUUl/ersev1HPvqV6lz03pob28vDzzwAB0dHcTExIy774wpL0VFRbS1tY1QPq7GZDKRk5MzQnlRFIXi4uIRPi+jWV4yMjJoaWm5ZuMni8Viobi4mPz8fIxGo1vP7Qv4QvtsdjtvnKrlH+800tMZys09bXz05R8R9rPtqHl50z6/L7TR0wR6G32hfReaq3nouf/Aardxne5DPPuNB1FdVuFVRUEZZWi9+iFge/9tqD8a2bd9oY2eJNDbB77ZxnNXKnlo339gV1X+644vcNfi96GqkJ+v59AhHQ+veo9d/Z9FuXQRxWa75vle++lPuf4LX3Bb+zo7O0lKSpqQ8jIjy0aan4umuJjNZtra2kb1e8nOziYuLm7UaKTRnHVDQ0MJDQ0dsd1oNHqsw3jy3L6AN9pns9t54tVXeKNoF18rPsTlu1dyNiOBW/5eRtR7l/jjg79iwR9+x/Ll7vm+QL+GEPht9Fb7+iwD/NuLv8Jqt5GXvIZnCj9DHqW0piwhseUi2GyjKi4wpLhYMPAKt3PH6y/Bs8/CDTeMur+8hv6Pr7TRrtrZ+foe7KrKnYtv4qPLbwNgzx44dAjCw+GnK/ei+905+NSn4LnnRp4Dh6OsTgd2O+De9k3mPB7PsFtWVkZZWRm5ubmYTCZMJhO7d+8mISEBEJYWLa+LxrZt25wKDwirzbUsNhL/pbO/h/t27eBXx57k7lMnuL6iiQeL61lVEcYdx2sAeH/9Pr71pR/yp3874jUHMYkE4Ml3/4Sp7TKzIuO58LsCBvp1/NvcPSReOQebNk3oHPv/6VFWcRwAy9PPQVkZlJbKvi3xGH85fYhTDeVEGMP4xm2fBqC3F37+jWpyKeWXny8j5u/CIZd//EO8K8qwd1NKDD/YuJb+FctQU1IYiI2d6WY48Xielw9+8IOYzWa2bt067DPN76WkpIRdu3YN84PZsmULO3fupKioCIAjR47IHC8BStdAL//8o0cxmCtZZDFyZ6nINfChc2f40LkzTgexhJ4BXnzju/DGd+H7wJEjsGaN1+SWBCd15ib2HP0bAB/uuZM9r1zixlCFe3pGGfRVdeS7g82/2en0LTC0N4PrstHMuSFKggSLzcqud8TztPDGT5AcJYwHv/wlHG3JFDv9giFlpb1dvGt9cc0aqKnh999/hD+aL1G1cSm/vOvr9L/yysw14io8qrxoodLjUVBQMKpVxVWZ2bhxo9tlk3gfi83KI3t/ynOP7XZuU5XhbmHKVe+a2fL01j0se1kqL5KZ5WdvPMOgzcLajGV87VP38jWAAWBwjEF/xQo4dQqWL4cvfhF1xw6orEQBFIdqrr1jMMBTT81cYyRBw1/OvE59ZwuJEbHct3oDAF1dsGMHHGcvT+s+i85uHak46/Xwu9/BAw/A4CAP93fw1988ynu1Z3mr4bwXWjKELMwo8Rr/88oBzrWfYevGW7AqQo8ey1dAoys8BICUN/Zweo80tUtmjvKWWl66IKKL5ps/w2b2YtHmf6MN+nv3wrFjQqE5dgwKC1EqKqg9+MdRz2/+x2HYvNmTTZAEIVa7jSfffR6Az13/EcKNwkf017+G1lY4smAz9ncOj37we++JPqkoEBrKnNhkNud+CICfv/kcNtU+E00YFam8SLxCWd15fn/8LyytbePel3vp/3+/HX3HqywxMX0in0CSpYNlD+UJc2ZmpoellUjg14f/DMDt86/niZ3zeIbN7H90AoN+TMww34G5KfMAsDs22R3D8MGfnoXbb4ejRz3aDklw8XpFKfWdzcSFR7NxpQh6sdng5yKFC9/8pksJC51u+PsofH7dvcSGRTErKp5+m3vyu0wFqbxIZhyLzcq2Pz8Bikr+QQs3tpcS9brwI3DeNNpgv2QJfO5zzmOvXkayKQYxw5VIPEid+Qovnn8LgLkdH8Nkglmz4OMfd+wwgUHfSXIy9pRkzqcn8r2P59GUvZgGUrH+4xV49VV4+mnPNEISlDxbJvywPr78dqfV5a9/hcpKSEiAT38aSE6G1FThe/X44+I9NZXRMinGhEVy4KEd/PJjW4k0hM1kU4YhlRfJjPO3v/+e+EunWHBpgE/UnREbi4shMREWL4Yf/EBYVFJS4MUX4Te/GXM2uiFvB4cypKld4ln2Hy/GrqrcmLmCA7tEad0vfxnC50180HeSno6uuoYXf/c/vLU4jZ8/eD3fXfpH7rA5FPjnnoNjx4gtL5dLopJpUd5Sy5HaM+gUhU+uyndu/8UvxPt/feQoEffcDo2NUFUFhw9DYaF4r6qC9PRRz5sSneh54a+BrHcqmVG6Bnq598Of5V7H/04H3bY24TfQ2grf/a7IITA4CFoOH20/R34BLQlY4vUvs+sry7klYTvKj3fKCCSJ2+m3DPL86dcAuD7mDna9A0YjFBQAqelikA8JEX20oGB4vx2L0FA+u+6jPHr7g44NT2F32BPV5maM69ZxGwibvow+kkyRP50S0UC3zV9DWoyoDl1VBS+/LLrrZ9gzZO372c+GDnT4uPgy0vIimVGeLfsH2+5fh9VhXnc66KouERd79468eUYxa5pjo2iP1/PBvh+gvCbN7RLP8I8Lb9PR383smCTePrAagG994CipDzj8U0JDh+fDmOCgnxARw1+2PYJVJ47VadFHjnvBrtdjldFHkilisVn521mx1Hnvstuc2//0vyKvS+HaMmIOOkL8n/O/XEPS8iKZMXoH+/nNOwfpy82k7Owj/OPEwyN3OnwYXKqQO0kfPsNV7riD1lNHSHj5CT5aLyJA1GefQ3noIaEIJSXBvHmebZAkKPjjqVcB+MiS9XzrUaF0/3P0HviDQ2GehrVv9Zb/4tO91Tz3s5dGfHZo507e98ADUz63JLh5q/I47X2dJETEclPmSkAYtL/+s0y+DvAeQ0p3s//lGpLKi2TG+OOpV+izdzHQnMrduSvgBENppl3STY+J64w2K4scYB84E9n54w0o8W1qzY0cv3wBnaIQ/V4Wi3pKSU9XmP2Gy4x1GgpzRlwq18+9DngJu6KgU1Vs6NDjvRBUSWDw1zOHALhryfsw6sWj/s03YRd7eYrPYsQlr4vqf7mG5LKRZEawq3aeekd4vbe9dQ/3fSlt8o6Oruzd64zvG4pAumrpSSKZJn87+yYA189dxqe/uYpS1vDnujyU5maxg6YwTyNk//b33U1LdBjn0xN44aM/p5Q8WozeTb0u8W96B/t5o/IYAPcsvcW5ff9+eIbN/ODDY4T4H/afXENSeZHMCO9UnaS5/wqLy3t5teL7pKiT824fwebN4pjR8KMbUOK7qKrKC2ffAOB9abfw6dGS0l3tqzUFVl5/O1/48SPc/5UP0vDoIm42HGaOpZqznTnTbYIkSDlkKmPAamFuXCqLkzMBkdvFUXGHDRscO04mxN/H8D+JJX7JnveE1WXDS31c1/i68BWYoqPj1WgRS86kX9LiLnEDZ6+YqDVfIcwQStPR6/k9mylc6X6FWVEUPrbmTlAUXqos4RMbFQYJpWJfJ/oNG2TSOsmkKbko+un6hetQHOPjoUNw5YrI7bL27imE+PsYUnmReJyWsyfofOcVFte1cW+dMGW6xbvdEYGkrFnD7z7/Ec7OiacpNJaXT/nPDSjxXUouvgfALdmr+PMfRDKu9esdH7p5xvrh627FqDdwsbmGez4j7oelpS+he+01GUUnmRS9g/28YRLj7IZFNzi3a1aXj38cjFnp07N8+wDSYVficZKuW8Wzjr+1Srpuca51iUCaV1HK5kU/RukKJ+m5WeR/7ppHSyRjoqoqJZfE7DVv1jr+W0Scctsnk+H3qZCRAZ//vCgQU1s77RlrTFgUt2bncva9V1Frn+au1A/xicYD4sNpOgVLgot3q0/Rbx1kdsws55KRqsJf/iI+/9jHHDu6Wrr9IK/L1UjlReJRVFXlx5+9i0f3/B2DXR1yqnWXd7vjhrslO5eUqBSuKE2odb+m94YiIn4hk9ZJpkZ5Sy017Y2E6I1cKVuNqsINN8Ds66eYlG4C3L30Zv773m8AL/Bp/s2ZtE5G0UkmwyFTGQC3zc9zLhkdOwZ1dRARIcpnBQJy2UjiUc5dqeT310XzwBfvHH0HNznX6nU6Nq8R6a8fMvyWiMMyaZ1k6rxSfgSAGzNX8LfnwwH4xCccH7rJV+tqbslazX89+P4RSevc4RQsCQ7sqt25ZHRr9lC+LM3qcscdEOa9ckRuRSovEo/y4vl3AOgxLREbPOXdXl3NRwdiWFrXwd2mU4BIWudvWSMlvsGhCjF7vWFOHq+/LrZ99KOe/c4QgxHl059m81fWj76DjKKTXINzVypp6TETYQwjL32Jc/tf/gJ5HOWX5wKnarlcNpJ4DFVVeeGUyH7b0PB+1JRnUOa611fASWYmccCzyKR1kunR2tPBmUYTAIOVq7BYYOFCWLDA899995KbOf23/QDYUdChYkeHTiatk0wAzepyY+YKQgxGABoaxLLRz9lD2vnpZ4X2FaTlReIxzl2ppHWgGftgKEtX3IFSXeU573aZtE7iJt6qPI6KyuLkTF5/MYE8jvJC38zMWFfNWUhI2hxaosOon5fBF5VfcZQ8rEn+FcYq8Q5vVZ4A4ObsVWJDdTVHd5eymjI2G/y3jtFoSMuLxGO8dEFYXTrPrWLTxlBwdQ1wt3f75s2wZMlwS4vGWPWSJJJR0GavN2et5nvfhH9lDwtqZ2bGqlN0rHvfndz5nW5yYjLo/lsB6w4W8h8PD/Kf6f4VDSKZWTr7ezjdWA7ATfNWiI2ZmXwY+DCgWgPLAVxaXiQe4+9nhNNj/4V1QxkdZwC7w5nS5rTBSCQTw2q38U71SdLae1h42kh6cxmfUmZ2xnrHohuxGPSU913h3o29gELRX6XiIhmfI7VnsKsqmQmzSY1JAsC+Zygr9KiRnn5skZaWF4lHqGqrp7G3AbtVz02ZqwgPn4EvdSStG0hN5ifZRj52uJLUJiNJ0twumSBnr5joGujlzR++ALzAHYCqzuyMdX5SBnNik7nc0UTi8lMYDDdy5gxcvCh8bySS0Xi3SgQq3DBvuXNb6eLNFLKEMgLPIi0tLxKPcMh0jKW1bez6+Ts8suzszHypI2ldWOkxyu66nc1fW8/y1b/hXJf/ZI2UeBftAfD01x/AqnhnxqooCrdli4fN0SulfOADYvuf/uSxr5QEAO9Wi757o4vyUlLisoMf1zEajcBohcTnKD5byodLq7ipoZIP1M1gvpXQUBSdjg8vex8oCuErj8l0L5IJc9jxAOC+h7hJ773Cn7flCOXlDdMxPvoxKwDnnj4qMowFSKirxH00drZQY25Er+hYk7HUuf3116GJZHqi/buO0WhI5UXiXqqr6X77TSxlb3PHiRoAwv88897t6xeuAyAq5wzP/rHLX33SJDNI72A/x+svAmCvX45V6AyoXpixrkhbQIQ+lM6BHrJvOI+iQO6ZPfCqTL4oGUnZ5fMALE7OJCo0AgCLBd56Cy6TTsXLVX5dx2g0pM+LxL1kZhIFPId3863MjU9lfuJcyltryNI9Q/f1+4j+1U5YudLj3y3xT45dPo/VbiMtOokTb6bShI2O8FRil3koN9E46HU6FkfOoaH6FI3vFvHgskHuO+XiOCxrHUlcKKsTysvq9MVD28qguxvi42FZXqhLDgn/q2M0GlJ5kbiXvXuxPfQgept9KNbHXXWMJkn+onWUv13Dg+wh+ujbYsYqlRfJGJTWnQNg7dyl/P0Jhcuk8/dfVfGpB91fx2giLI5M57c/3A68wOd4TNY6koxJqUN5yXVRXrTM0LfeGjBuLsMIwCZJvIn6wAN88dGNo384k+nNq6u5uy+cxXVtfLimVGx77jk4dozY8nK/Ts4k8Qza7HVJ4mKOiVQv3PxBz9Qxmgg5ESn8+6ffJ2sdScbF3NeFqbUOgNVzhpSX114T7+9/vxeEmgGk5UXiVqrbG+gYbAcYSmuu04F9htObZ2aSAexjaPlKbWrGuG4dtwF885tyxipxMmAd5HRjBQC2hsXY7ZCd7V23AKPOQOfH72VzYgT7flY8cgc/D3WVuIdjly8AkJ0wh4SIGACsVnjzTfF5oCov0vIicStvV52kLSqMpvAoWuZ50bt9nHIBdr0e6wwuX0l8nzONJiw2KwkRsZw9nAYIc7u3uTlrlfNvu2O4tsthW+JCmWO509Xf5fhx6OqC2NjAXSmXd4HErbxZfpqmuAhyr/8t/a970bt982bxvaNwaOdO1AcemDlZJD7PMUe0Ru6cRbxxSKi7vqC83DB3OW1RYbREh9G5aBWFPM5xfR5qiv+HukrcQ9ko/i7aktEtt4Be7wWhZgCPKy9ms5ndu3eTn58/of1LSkrYtGkTu3fvpqSkhK1bt1JUVORhKSXuwK7anTdSrLqSufO84yswQi6HGLJcgGQstH67LHkx770ntvmC8pISnUDM/EXc+Z17eKvo5zwXU0ie7TClf6jy+1BXyfTpHezn3JVKAHLnjHTWve02Lwg1Q3hUeSkrK2P//v2YzWba2tomdIzZbKakpITCwkIKCwvJyclh48YxHEAlPkVFSx199m7sg6Gsz83ytjjOcgGNC7P43sfzOBU/l57oFAZiY70tmcSHsNntHHf4DejblmCxwJw5wufFF7gpcwUWg553a0+yfj2AwsGX/T/UVTJ9TjZcwqbaSY1OZHbsLABsNnjjDfF5oPq7gIeVl9zcXAoKCsie5ChQWVmJqqpUVFRQUFDgIekk7uZorSgD0FO1kPW3+4AvuKNcQOXBP1B043zu/+KHuHe1if6kJG9LJvEhLrXU0D3YR4QxjIqjcwFhdVF8xFB3U6ZwWnin+iR33in8tl580ZsSSXyF0ZaMTp6Ejg64NeIoud8K3IzM0udF4jbeKheOYz2mJdx8s5eF0QgNJS/jOkJ0RkLi23nX1Exfnw8oVhKf4ZjjAbByzkLePKQnj6P8uNR3Bv3c9MWEGUJo7m5n8Q21gHDnmqAxWxLADCkvS5zb3n1XvH89cQ+61wI3I7NPjuL79+8nISGBtrY2Kioq2LFjx5j7DgwMMDAw4Py/s7MTAIvFgsVicatc2vncfV5fYTrtU1XVmeQrhSVERVnwlZ9Jj8LauUt5q+oE4fNPcOJECh/5iI8I5wFkP50cpbWi3y5LXsCed1R+xB7mXHwV2+9+h91LoRqubTQajeTOWczb1Se51FPG0qUZnD2r8Pe/W/nkJ/0z3D/Q+yh4vo02u53TjeUALEvOFt9TXU39X9tZjZ71rc8BoD77LNbNm0VqiMREt2Vk9kT7JnMun1Nech15C7Slpt27d7Np0yYOHDgw6v7bt2/nscceG7H9pZdeIiIiwiMyFhePknMhgJhK+5oHO+m1d2K3GMkI03Pw4EEPSDZ14npEV49efIyysocC/hqC7KcTQVVV3q08SVp7D4MHLrK47xifUp4DFaxPP807WVmgqgzGxNDnhegerY2xjv7719JXWbjwOs6eXcBvflNPVNSxGZfJnQR6HwXPtfHKgJk+ywAhioHzh09wUTnFR++9l+8B3wPUXseOzSK/lcafn3/erXK4s329vb3X3smBoqqez9RVVFTE9u3bKS0tnfSxZrOZ+Ph42tvbiYuLG/H5aJaXjIwMWlpaiImJmY7YI7BYLBQXF5Ofn4/RaHTruX2B6bTvD6deYfsrv6W7fCnff/932LjRt2aEdR1N3PvUN1BtOhp//nMqL0YQEhJ41xBkP50MWr848a19zm0qCgoqqqKguAyPlsHBaX3XZLi6jVVt9Wx8eitGvYH/WPw4H7kritRUleo/HMbw3W3Yt29HdS0Z4OMEeh8Fz7fxz2de53slT5I7ZzG7N34XgN4nniHiSw9jxDpif9VgwPbkk25LE+GJ9nV2dpKUlERHR8c1n98+Z3kpKioaFl2kKSwmk8lplXElNDSU0FHCcI1Go8duCk+e2xeYSvsOV11gaW0bX9q/nyUfuwujcY2HpJsaWUlzmBuXRo25gYHECi5dymXVqsC9hiD76UQ43yLKRPzyCx+hcNdBDKrVmcxQuaomlzd+S62N85PnkhadRENXC5HZl4iMXE1jo0LHL55h1muvoXv2WbjhhhmXb7oEeh8Fz7XxXHMVACtmL3Ce/53sh/gOyyljpCKrHD6MwQMZmd3Zvsmcx6ccds1mM5s2bcJkMg3bBkw6Ykkyc6iqypGac3y4tIqb28+QeNA3HcRuzVkFwM3JzxP3iQ0+45Ap8R5nGkRJgI5PfJSPpoye1HBGa3KNgaIo3JC5HICKY6/zTytLWU0ZkX91qTRdVgalpbJuV5BwxlHOYllqjnOb5qwLDFVjDMSqjMyQ8jJWjheTycTOnTud/8fFxbFly5Zhisru3bvZuHHjqEtGEh+guporr7/EnKoK7jheI7b56EB6c9ZqAO7vfIFM02sB64UvmTiaw2NGxHwaGsU21UcH/bUZ1wHwyP1f5+dvr6GMPMK7m8WHWqXpNWsgM9N7QkpmhH7LIJeaxXi7LG2+c/vhw9BEMj0xqaI/eKs8ywzg0WUjk8lEUVER+/bto6ysjK1bt7J27VrnslBJSQm7du1iy5YtzmO2bds2TKFpbW0d01lX4gNkZpKKKIDoLL2oDaQavlAAsbqaNVf6WF7fxYfOXQLA/uxz6B56SMiXlOQ2L3yJf2C125zZSQcu59CESrMhlVmrM+Dzn4df/xpqa31m0F+TsRSA7zxwA9/fdxSdbWiJi6uWuCSBzYXmKqx2GwkRsaRGJwKi9u3hw9BOOhderCL3hhCRrKigAAYHvZrl3BN4VHnJzs5my5Ytw5QTVwoKCkYkodOsLxI/Ye9ebA89iN7mUi7OFwfSzEyMwF6GqkwrvqhkSWYMU2sd/dZBokLCKS9L4zI6/u3TVTz+G98c9FOiE5gbn8rfVsO9G7/M9R//9MidZKXpoOB0gyNEOjUHxZFN8dIlaG+HsDBYvibUpSKtd8uzeArfsotK/I/Nm/naN0YZRMEnfAWcjFNlGoNBfC4JKk47/F2Wpmbz3mExFObdFDqUWtcHB/21DuvL+SbhF2jDN5e4JJ7ldKO4/svShvxdtDq0eXkQ4D7QgFReJNOkZ7CP5n7hLGDX1AJfHEjHqTLtU0qWZMbQ/F2uS5nPkSNim68H7Gh+L+/0NdATnUopefxsSeD6NUhGx5mcbhRnXV/vw+7C50KlJf7FmUYTbdGhNEdGYE+8jpTv+J6vwNXYFdCposq0HrlUFKxos9dYSw7d3RAVBUuXelmoa6D5vbxjbedM8WnW3ZBAeJXCF9sLCMF3lrgknqOzv5uadjFhvC6IlRcfnCJL/ImT9eU0xUVw00ceo+4Ph6GwUFgyqqpEYURfIjkZNSUF09wUvvfxPE5G59AXK2erwUifZYByR7RGR6V4AKxZA3q9N6W6NrOi4slMmI2KSndCJbNmKfT1weH3fG+JS+IZzjiU7oy4FOLCowHo6xMFGQFckukGNFJ5kUyLdy6JyJ3uK0tYsdJ3fQUASE/HWl7Ob/7zUYpunM8nPvUgX/lwle8pWRKPc76pCptqZ1ZkPGeOJAD+M+hr1pejtWe4/Xax7ZVXvCiQZEY5PUp+lxMnwGaDlJTgGc6k8iKZMqqqcrZZKC+ZkfP9w0ksNJTsyFQAIrMu8tKbigwyCkK0BF/XpWbz3mGhdPuL8rLWqbyclcpLEDLUd4eUl7Iy8Z6XN+RvHuhI5UUyZRo6W+i1d6Da9NywMMvb4kyYJGMMiRFx6IwW2vSXKC/3tkSSmUYLNV2YMJ/Tp8U2f1FeNMvLheZq1ryvCxD+Dn193pRKMlOcv1IFwJKUoTFXKxsYTFHyUnmRTJmTDcLq0lc/j5vWhXhZmomjKArXOx4AUfNPy1lrEKKZ3kO6crDbYc4cmD3by0JNkKTIOLIShLAdIRdISxPpaLSIKUngYu7roqGrBYBFyZnO7ZrlRSovEskEKKsRyktv9QK/83DXQk6j5p/m5Ze9LIxkRuns76HWfAUAc4UoReJHBZkBWDVnEQDH6y9w881i2xtveFEgyYxwoVmUW0mPTSY6NAKA/n6c1kN/68fTQSovkilz2CRM76FdC/zOSUzzG4jIMPHam/3Y7dc4QBIwXGiqAmB2TBKny6IA/5uxrnYqLxe55Rax7c03vSiQZEbQlowWu1hdTp8GqxU+GHuUjM/eHjQFZ6XyIpkSg1YLNd1VgEjy5W9OYmkxSaREJaLobfRFXuLUKW9LJJkpzjuUl0XJWX5rbtcsL2caK7j+xkEA3n4bbIePwu3B8wALNob6bqZzm9aHvxy7B+XVV4Om4KxUXiRT4mJzDXYsWHuiuXlVirfFmRJ5GYsBiMw6z+uve1kYyYyhPQBy4jM5exbyOEr+j/zrgT83LpX48BgsNiuG5Eqio6GzE9r+dw8E0QMs2NCshk7LS3U1zS+Wspoy1rfsE9uee05oNKWlUF3tFTlnAqm8SKbE2SvC4bG3Npsbb/Azs4uD1XOE8hKRdZ7Lf5Yz1mDhQpMY0EN7MrHb4Qvhewh7278e+IqisDpdWF8qjr3Bg9eJB1jkC8H1AAsm+i2DVLXVA7A4JVNszMzku39aQxl5RPY2i21awdk1ayAz0yuyzgRSeZFMiSOmSgAG6rP91kksN91heZlXzoJ3n5Iz1iBgwDqIqbWOtPYeQl8zs5oyNtr884G/arZQXj557yP84l3xAAvvDq4HWDBxqaUGm2onPjyGWZHxAFif2osFreCsI2GVGhwFZ2VtI8mUOFknlJdZ+iwiI70szBTJ7ray5ko/3ZY2PtL/N7HxuefgoYfEAJCUBPPmeVdIiVspb6nFptp58YcvAC/wEKAOOiyH2gNfw8ezF2pOu9976Da+u/dNdDbr6A+wp57yjoASt+JcMkrJRHE4GZ5euZl/YglljDKDPHzY/5y5JoFUXiSTZsA6yJWBWpbWtfGvZd+Ao/8jZnh+hi4rm187/nYGG/nZA0wyOTR/l19/ZSMP/d/zGPDfB/6SlCxCDUaKlqXw6b/9law7PzRypwB/gAUT56/2d2EoOR0AOh3Y7UPvAY5cNpJMmkvNtaiKjbvfvcx19W/671LL3r3YHZX4nDdCkJhcgxUt1LTt3nu5yXB49J0OH4bNm2dOqCli1BtYljofgEpzFQA2rSfr5NAeaIymvJSVQRPJdEakiknX44+L99TALzgre7hkclRX0/Dqiyyua+POkw6/AD/zFXCyeTMVfysa/TM/eYBJJof2AIjoz8RqFdtUnf8+8FfNWQhA6WAbnZGplJLH0+8LngdYsGCz27nkqIJ+tfJymXT+/qsqMWYVFor3qqqAr9Aol40kkyMzk3wgn8BYasl0pFm3K6BTQVV0KGrgm1yDEZvdzkXHA6CnJpMmVNpCUklYmQGf/zz8+tdQW+tXD3wRMfdnDg02cd3TVdz98RAWtyp85myBqBngi9XdJZOmur2Bfusg4cZQ5saLwrI2m6gmDbDy+lDQgj4VJSiuu/9NNSTeZe9erI4ZaiAstRjTZtMRG8XZOfE8mv1V2rLkjDVQEQ+AAcIMoZhOpHGZdH7y5Sq/nrGunL0AgJr2RhauHQAUzp+HtvbgeIAFC+ebRIDEolnz0Cli5K2oEMU4w8NhwQJvSucdpPIimRSD932ST395w+gf+uNSS3o6z/1pF5u/mk/RDTn8+x3+9wCTTIwLzuykczlWJoY+MWN1TFn9cMYaExbltB7WD5Q7H2LvvedFoSRuR8tN5JpZV7O6LFsGDte9oEIqL5JJUd4inHVBLLEAfukr4MrK7OWgKETOu8Qbb/rfA0wyMTR/l4VJWZw8KbatXu09edzFcofT7qmGcmeB1Hff9aJAErej+bssmDXXuc25ZLTSGxJ5H/9+6khmnDNXTLRFhdEUHkXP4sDwbl+WmoOCQkhCM+erzLS1eVsiiSfQlJd4NZP+foiIgPnzvSuTO1gx26G8NA4pL++840WBJG7nYotQXhZK5cWJVF4kk+JoRSVNcRGse/8v0R/1X18BV6JCI5iflAFA+LyLHB4jglbi32jOuoNXROLB5cv93mgIwPI0sVZ0uqGc69cJZ/PDh4Mi1UdQYO7rorm7HcA5ToFUXgLg1pXMJCcvC8exGONCwiP811fgarTZa8TcS3LWGoC09php6+1ApyhcuSCU7BUrvCyUm5iflEGowUjXQC+xGQ2Eh0NHB1y44G3JJO5AWzKaE5tMZEg4AG1tIjAOhBIejEjlRTJhLDYrjf3iRlo+O9vL0riXFWkiX0bEPKm8BCKa1SUjLpXTJ4SiHSgzVqPewJIUcT+eaypn7VqxXfbjwOBSi9BSFrhYXTSfrXnzIC7OC0L5AFJ5kUwYU+tl7IoVa28kNy6f5W1x3IoWchqRbuLwESs2m5cFkrgVTXlZOGuuc+APFMsLSKfdQKa8RTrrjoZUXiQTRnN47K+fx5o1yvg7+xnzEtKICY1EFzKINbqas2e9LZHEnWim94youQFpbl+eNuS0e+ONYptUXgIDTfFekDSkvGgKuFReJJIJUFZZBcBA47yAmrUC6BSd8wEg/V4CDy1aQ98lnHXnzg0sc/sKh+XwUnMNq/IGATh9Gjo7vSmVZLrYVTvljmUjGWk0HI8rL2azmd27d5Ofnz/hY3bu3Mnu3bvZvXs3O3fu9KB0kslwolYkSopnHuHhXhbGA6yYLf1eAhGr3YaptQ6AzmrhNxBog35qdCJJkXFY7TbadZXMmycSX5/fexRuvx2OHvW2iJIpcLmjiT7LACF6IxmOsgBWq1BMIfD68WTwqPJSVlbG/v37MZvNtE0weYamrBQUFFBQUEBubi6FhYWeFFMyAVRV5XKPI8vjrEzvCuMhnH4v8y7R9aoc9AOF6rYGLDYrEcYwTKeEr1agWQ4VRRlaOnLxe1Ge3gOvvuq/ld+DnEvNwuqSk5SOQSfS6F68CAMDEBkJ2YEVNzEpPKq85ObmUlBQQPYkfuHt27dTUFDg/H/9+vXs3r3bE+JJJkFjVyuDSg92q57rF/lnPpdrsSxtPgoKoYlN3Fb/hBz0A4SLTofHDE6dFENeoCkvMOT3UnviMB9NL2U1ZSwo2yc+9NfK70HOkL/LyPwugZKnaKr4VNNNJhNms5m4URajS0pKZl4giROtLsxAUzp5qwOzGHl0YzPruwwsrmvjk/YDYqMc9P2eS83ius1PnOc0twei8rLMobx89+Hvcf9P11BGHjGDzeJDrfL7mjWQmek9ISWTYijSaJ5zm6a8fHh2cFuHfeopZDKZRt0eFxeH2Wwe9bOBgQEGBgac/3c6PNQsFgsWi8Wt8mnnc/d5fYXx2nesWlybvvp5LF1qwV9/gvHaaMzM5CeOv7XkpGpzM0pe3tDxg4MelnD6BHM/HY0LV4TyEmOfQ28v3BRyhAVf2IL1R9tRXa6tLzGVa7gwQczOt92/jh8eKEWxWtHhqPjuqPyuGgzYnnwS1ct9I9D7KLinjVpBxqy4NOd5TpzQAzrubvkdHHoV2+9+h90Lzi+euIaTOZdPKS9jkZCQMKbPzPbt23nsscdGbH/ppZeIiIjwiDzFxcUeOa+vMFr7Si4cBx3oOmbz3nsHZ14oNzNaG9O//nVW/exn6O12p0lScQz6dr2eY1/9KnUH/aftwdhPR+NU3UUAzrzVB0Bh+JPoX3+Nih/8gNMPP+wx+dzBZK9hojGag7mZLLnudh781+0jPn99xw464uLAR/pxoPdRmHobB+1Was1XAKg+cZG2M7WENzVheXcpqwlnfunvAbA+/TTvZGWBqjIYE0PfDNeYc+c17O3tnfC+fqG8jOfsu23bNh599FHn/52dnWRkZLBhwwZiYmLcKofFYqG4uJj8/HyMRqNbz+0LjNe+H196GYD5CTncdddib4jnFsa9hnfdRdUd7yfnzntHHGd7+21WrF6NP6w2BHM/vZrO/h7+fdezpLX3sKwxkdWUce/A8wBkHz7M3H/7N2GVSEwU6Up9hKlew7f+XsU/Lr5DRFocADZ06LGj6nQodjvvu/lmnyilHeh9FKbfxrNXTKgVB4gPj+aTH/44iqJgDAlhg+NztVfk2grp7OS2b3xj6HtnyDrsiWvYOYnYfp9SXsZy7DWbzWN+FhoaSugodXWMRqPHbgpPntsXuLp9XQO9dNHE0to2vvfWlzGe+KlYO/djxrqG8xJnA2BXQKeCquhQVDtGgwH87JoHWz8djeorDQC8+MMXgBf4GqD2i0FfaWnBuG7d0M4OK5svMdlruGz2fP5x8R1O2bu4KyaV050ZvL348/xL9K+hthbj7Nk+1Y8DvY/C1NtYZRZ9d8GsuYSEhABQ8dhe5v7HZzFidVqFtXcMBnjqqRn/Pd15DSdzHp9y2M3OziYuLm5U35f169d7QSIJDDk83vV2I/OrDwV0BI4hNQ1zbCRn58Tz9Yxv0JqZB6mpMMOmWIl70KI1fve1T2FxzNWUq/xAMBhg715viOd2rnPUOHrH1sqFf1SxjsM81liI+q5/V34PRipaRG6inMShSKNDGZtZxxhl7w8fhs2bZ0I0n2BGlJexln1MJtOIJHTbtm0bFllUVFQ0LHRaMsNUV9PwSgmL69r40GmHUhnIETjp6fxm3/+y+av57M9dxn9+SA76/oymvDTd85GgGPQXp2SioHClq420xX2EhiqYzVBe4f+V34MNU+tlQOR40ThzxmUHLU46SOOlPdpqTTnZtWsXZWVlbN26laKiIufnJSUl7Nq1a9gxW7ZswWw2U1RURFFREUeOHBmxj2QGyczk7oe+yr6fFZPQ3yO2BXjY5eK5i0FRCM8o570jctD3ZyocqdWN3UOz10Ae9CNDwslKEEufl9oqWbVKbD9yxHsySaZGRasjQV3iHOe2s2ehiWR6Y1LFGPz44+I9CK3DHvV5yc7OZsuWLWzZsmXUz7Usulfjuv/GjRs9Jp9kAuzdi/XBBzG4ROBw1RproLHMUaE3fHY1J05bGRgwSP3FD1FVlQpHWYCuugya0NMWkkrCygz4/Ofh18IPJNAG/aWp2ZjaLnOm0cTatas5fFikAnngAW9LJpkovYP91He2AJCdONzycpl0yv5Yxc23h4CiQEEBDA4G3SQr8KYeErdivf9TfObLd4z+YQCZ213JiEshJiwSndGCPqHWWcFV4l80dbfTNdCLTlGoO5vGZdL56VeqRL8tLBTvAbgkuDQlCxDRKmvXim3S8uJfaEtGiRGxxIVHA9DVBTViFZSlq0OF4gLiPcgUF5DKi+Qa1LY3YlesgIi8AQLS3O6KoigsdTg+hmeUy4HfT9HM7hlxqZw+IaI1luUF/qC/NDUHgLONQ8pLWZko6CfxD7RCoq7+LmfPivfUVEhI8IZUvkVgP4Uk0+Zicy1tUWE0R0TSuyR41liXOR4AERkVvPeel4WRTImhaI10p/UsEMsCXM2iWfPQKQrNPe3Ez24jKgp6e+HcOW9LJpko2nKn65KRprwsXeoNiXwPqbxIxuVYVQ1NcRHc+KHt6I4EtrndFa1OTHiGSVpe/BTtAZCoT6e7G0JCYOFCLws1A0SEhDkfeuebTWgVEGQ/9h+0vuvqrKtFGl13nTck8j2k8iIZl5M1wvQequYQHhHY5nZXrksVy0ZhKbWcL++nq8vLAkkmjWZ5oUNEGi1Z4lP52TyKtux5xmXpqLTUiwJJJsVoOV6k8jIcqbxIxqWmUygvmXEZ19gzsEiOSmBWVDyKTiVsTqUc+P0MVVWdfgNdtcIKEUyDvua0e+5KpbMawLFjXhRIMmFEpJGoBp4tLS9jIpUXyZj0DvbTpYrCYCvnzfWyNDOP0+8l3ST9XvyMpu42ugf70Cs6as6kAcE16C92KC/nm6vIzRXbTpwAm82LQkkmRGWbiDRKiIglPkLU5+vsFFH9IH1eNKTyIhkTU+tlUFQsXbGsXR7rbXFmnCWOB0D4nCrpL+BnlDvM7nPjUzl3WqwVBZPysnDWXBQUmrvbSZzTQUSEcNq9eNHbkkmuhRYm7Wp10ZytZaTREFJ5kYzJhSaRVKC/MYPly70sjBdYnJwJQPicSvrfPAq33y6yfUl8Hme0RkI658+LbcuWeVGgGSYyJJyM+BQALrVWsXKl2F79B9mPfZ3yFi2z7siyAMGkgF8LqbxIxuRouVBebC0ZjFHUO6DRLC+hyZfZ0PRbePXVgC5KGUhoZQHidRkMDEB4OGRleVmoGUZTvs9fGVo6ivzjHtmPfRxnTSOpvIyLVF4kY3K2QTwAkgwZgZ6XblRmtXRyQ8sgS+pbuY99YmMgF6UMIDTLCx3iAbBkScDnVhyBprxcOXOM9fGlrKaMZadlP/Z1KsZJUCf9XYbwaG0jiX/T0FsLOpifFHzOugBKVhZaSVC7tlErSqmh1XmS+Awi0kjMXjtrgi/SSENTXrZ9/jHgMe4F7BZHugPZj32S3sF+6ju0SCNpeRmPIJuLSCZKa08HA7oOVLvCmvmBm4xuXPbuxa7XA4xelHLvXq+IJRmfxq5Wegb7MOj01AZhpJGGprx85/4bUA1inqrD0X9lP/ZJqtrqUVGJD48hYZRIo2Dsx2MhlRfJqJS3CH+XwbZkcleEeVkaL7F5M0cO/Gb0zwK0KGUgoPm7zI1P4+xp8dAOxkE/MTKOpMg4/pY7j0sv7B99J9mPfYrRMutqS0ZpaRAf7w2pfBOpvEhG5Uy9eAD0N84NykgjjcyE2QDYHdZ2NdgcJ/yQ0SKNglF5gSHrS1VrPQA2bciX/dgnGfJ3GUoKqikvwdqHx0L2YMmolFYI5UXfmRHI9RevSXL2Ilqjwzk7J54vx36f7oWBX5TS39FyvMSRjsUCkZEwb56XhfISi5JFw8+o3fREp1JKHr9YFhzFVf2R0XK8aP4udyTKMHdXpPIiGZVLzWLZaHZ4cDrraigZGfzrr77F5q/m88zCG/lNYeAXpfR3tNmrak4nj6O8qrsdXVlwDviLk0V8+BF7Byf/UsU6DrO9LTiKq/ojptahSugamvJyZ5MMc3dFKi+SEdhVOy0WcRMtmR1cNY1GY8GcBaAohKdXUXYs8ItS+jN21e58AHRUZ/Age1jbFbwDvmZ5udRSy5JcA4qiUF8PV5pkP/Y1+iwD1JmbAJdIo+pqdMdEmPvC4zLM3RUZKi0ZQX1HM4suN/AvL5yk4YE6YM41jwlknGUCZldS9rqXhZGMS0NnC3FNbeT0WtGfuTw8P89DD4kom6SkoFlHyohLIcIYRq+ln+aBehYtyuD8eVGk8c47vS2dxJWhSKNoEiMd5VgyMzno+Fw1yzB3V6TlRTKC8tZaPlxaxTrTFW689Iy3xfE6Q8pLNWfP2ent9bJAkjGpaKnjxR++wO//90V2Fq9jFiJnhnPAX7MGMjO9KuNMolN0LHRYXy40VzkrTJeVeVEoyag4Hc1dlowu/edeLA4bg6LKMHdXpPIiGaK6mtjycupLDnHHCeHzkvyKNFHOjU8lwhiGLmQQY2I9p055WyLJWFS01rHt/nXY9GJok3lNhpcJ0JSXY8e8J49kdEbzdzmUvpl1HB79gCAPc5fLRhInxgULuM3xt2aIVFqkiVKbvR6/fIHw9ErKytJZt87bUklGo6KljoO5mWTnfYJHvrll5A6HD+Ms9BMkaMrLheYqPuVoulRefI+KFs3yMjLSCBDh7Xb70HuQIy0vEifWp55yZpRVtI1BPGN1ZYkjaiN8TqU0ufswmund3iUqKttlXpMhy0tTFatWifu5ogI6OrwolGQEzoKMLjlezpyBJpLpjUkVk8jHZZi7RvDe0ZIRqA88wKs/+tHoHwa5iXJJSiYglRdfRkQaiQdAQ+dSGkilOkkO+DmJ6Rh0ejr7exgwtDh9lY8f96pYEhf6LYPUmq8AI7PrXiad489XiTG4UIa5a0jlRTKMdms3MJRRNphnrK4MOe1Wceq0ncFBLwskGUF9RzP91gGMegMnq1eTSRXPb5MDfojB6FyKuNBcLZ12fZCqdhFpFBceTUKEiDTq6IA6R3H0patDQXEMyooMcwepvEiuojoUWqLDOJU0m8b/DO4ZqytZCXMI0RvRh/ehRDc5U3ZLfIdyx5JRVsJszp7RM0go1y2TAz64Ou1WOl1+pN+L7+Dq76I4lBRtjJk9G+LivCSYDyOVF8kwTuiM3Pmde/jILf/FrH8N7hmrK0a9gQWzRLbh8DlVctbqg2gPgMz4dMrLxTZZD0awyOm0W82qVWLbiRNeE0dyFeNl1pV9eHSk8iIZxuWeLiwGPQn6eej1BP2M1RXn0tEck1RefJCKVlGPK8aegc0GsbFi1ioZHi69cqXYdu4ccvnTRxgtx4ssyDg+UnmRDMNMKzDc410iWDRLeDqGza6WyosPolle7G3iAXDddUNuAsHOQofVsKGrhbjkXmJjwWLBWXVb4l2GqkmPtLwsXeoNiXyfGcnzsnPnTuIci3Zms5ktW0bJv+BCSUkJu3btIj8/n+zsbIqLi1m7di0bN26cAWmDl87+HqwhXQDkZUvl5Wq0OjHhaTUcfwasVhFBLvE+NrudyjYRadRWKR4Ay5Z5UyLfIiYsipToBK50tVHeUsOKFYt54w04eRJWrPC2dMHNgHWQOmekkVw2miget7zs3LkTgIKCAgoKCsjNzaWwsHDcY8xmMyUlJRQWFlJYWEhOTo5UXGaAcofZfbA9ibwVEV6WxvdYkDQXBQVjbDvL1dcYeJ8sT+8rXO5oYsBqIdRgpOqUyPEiB/3hLExyFGlsrnEqLCdPIvrw7bIve4uqtgbsqkpsWBSJjkgjsxkuC11cWl7GwOPzxu3bt1NZWen8f/369eTn57Nr165xj6usrHRaayQzw/kGYbrsb8hg+XIvC+ODRISEkRGXQo25kc+GPkHke45qxWvWeFu0oEczu2cmzOHYGTEnk8rLcBbMmssblce41FLj9Hs5cQLo3wOvyr7sLTRfLddIo3PnxGdz5shIo7HwqOXFZDJhNptHVUJKSko8+dWSKVBaIW4ipSOD1FQvC+OLVFdze4fC4ro2Nvb+TWyT5el9gooW0Xcz49KpqBDbpPIynAWzxFLwpeYa1syqJpdS1NIy2OdSeVv25RlHS6yYPcqSkbS6jI1HLS8mk2nU7XFxcZjN5nGP3b9/PwkJCbS1tVFRUcGOHTtG3W9gYICBgQHn/52dnQBYLBYsFsvUBB8D7XzuPq+vcKFZPABSQtOxWgOzjdO5hsbMTL7u+FurLKI2N6O41H6y+ED4RqD309Had6lZFBKNGExDVSEhQSUhwYq//gSeuIZZcSL06lJLDau/lkkpQCuoioLCzPblQO+jMPE2an03Kz7Nue+pUzpAz5IlNiwW36xj5IlrOJlzecXdUFNKxiLXkUUpOzsbgN27d7Np0yYOHDgwYt/t27fz2GOPjdj+0ksvERHhGb+N4uJij5zXm6iqypWBWjBAtM3AwYMHvS2SR5nKNUz/+tdZ9bOfobc7K+Y4y9Tb9XqOffWr1PnQ7xaI/dQV1/YdrxZ29vJSKwCpqa38/e9veUUud+LOa2hT7ejR0TPYz6GvfpEbf/4ERqzOPuyNvhzofRSu3cZTNRcBaC6/zMF68Zu//vqNQDI220kOHqzxtIjTwp3XsLe3d8L7KqrquTLBJSUl5Ofnc/VXxMfHs2PHDgoKCiZ0HrPZTHx8PO3t7SOWoEazvGRkZNDS0kJMTMy02+CKxWKhuLiY/Px8jEajW8/tbRo7W7jnt19Htel5OHI3XywM8bZIHmG617DljVdI++CdI897+DDOvOteJpD7KYxsn81u55b/9zCDNgu3tP43v/zRbAoKbPziF745Y50InrqG9//+O1xqqeWnH/46L35Rz09eXTvyu2egLwd6H4WJtXHAOsgt/+9h7KrKiw//H0mRcQBkZRm4fFnh0CErN9zgsUf0tPDENezs7CQpKYmOjo5rPr89annRLCdXYzabx/wMoKioaFh0kaawmEwmp1VGIzQ0lNBRkqgZjUaP3RSePLe3qDQ3ANDfNJvcj4ViNAZ2DPBUr2FqbBIgaj/pVFAVHYpqx2gwgI/1iUDsp65o7bvcVs+gzUKYIYSq08JZa8UKPUaj3ssSTh93X8OFyfO41FJLZXs9CxfOg1dF5W0ddlHHzD6zfTnQ+yiM38ZKcz12VSU6NJLU2CQURRkWabR8ucHXhpURuPMaTuY8HnXYzc7OJi4ublTfl/Xr1496jNlsZtOmTcOO0fxjxlN4JNPjRLXwdxlozGDpUt/U9H0BJSUFc2wUZ+fE8/WMb3AlXdZ+8jZapFFW4hzOykijcVmQJJLVXWyuZu6aZBpI5Wy4rLztLbTEijlJ6SNqGslIo/HxeJ6Xbdu2DYssKioqGrZcZDKZnLlgQFhZtmzZMkxR2b17Nxs3bpSh0x7kWJVYV9V3pxId7WVhfJn0dHY98xM2fzWffStW8e0PyNpP3kZ7AMyLTUfLyiCVl9HRMu1eaq5h4e3pZFJFnvUw1s/LOmbewJlZ11H1G2Ryuoni8bWBLVu2sHPnToqKigA4cuTIsBwvWjZd16y727ZtG6bQtLa2juqsK3Efle3C8pKgxHtZEt9nfvp8OPsqYbOrOXFE1n7yNtoDINIqHrqzZomXZCRacdHq9gbS0gcJiQqluxsuXoSlS2VfnmkqZEHGKTMjjg2uisnVmXK1zLuuaNYXycxgsVlpt18GBTKipNnlWizUahyl1XD2rKgR4+vr0oGMZnmxtQzVNJKMzqzIeGLDoujo76aq/TIrVmTx9tsiWZ3MKTLzjJbj5exZyOMo3/z7Fnhgp0wcOAayMKOE6vYGVMWGrT+cnDTZJa7F/KQMdIqCMboDe6iZCxe8LVHwYrXbqGqvB6DVlEEeR3n8kkx1PxaKojitLxebq4eXCZDMKINWC7XtjcDIgowPsofZ5x1ZjyWjIp9UEi421bK0to0nHn+V6xU56F+LcGMoc+PTAAhLqxYp1iVeoba9EYvNSpghFNOpJB5kD4suy0F/PIb8Xmql8jJR+oCDwDeBDwLzgSTHawGwHvg2UAQ0TeyU1e0N2FQ70aERzIqMh+pqul4rJaW+jPuQWY+vRWDHw0omxNGKGj5cWsWNl6uZdebvwCPeFsnnWZg0l6q2esLTajhxYiWbN3tbouCkorWOtPYe8kLDOV52fPig/9BDoKqQlATz5nlXUB9Cizi61FLDg1J5GZ8q4MfAXqBzjH1agXLgZcf/OuADoGxUMESP/Ygtd/F3URQFMjOJBsoAOyLyiOZmEQWm4bm0bH6HVF6CmepqaGmh5+13uOOEiDbKeOsNOHYM9Ho56I/DwuR5vHTxXeG0Ky0vXqO8pY4Xf/iC47+n5KA/AYaWjWpYfofYVlcHbW2QkOBFwXyJZuAHwK8ArUpCOnAXcAPC2qLFNrQBF4AjwGHgBPAyGF42cEfYHSjvKPB1IGf4V5gcvlpOf5e9e7E/+Fl0dis6HP1V67cGAzz1lJsb6d9I5SWYycwEYAdotwohHR0o69YN7SMH/VHRTO9hadWceMXLwgQxFa21bLt/Hd/ffxS9zSYH/QmQk5iOgkJbbwcWvZmsrDgqK4X15bbbvC2dl1GBPcBXGbK0fBCxJHQ7Yzta3AI87PjbBOwHdY+K4ZwBfgn8P+A+4DFgodjNGWmk+bts3sx//20J33w2jxEcPgxXJWgNdqTPSzCzdy+qQeivjvmq8x2DAfbu9YZUfsEiLeIouZ6mFgtXrnhZoCCloqWOg7mZ7NqyZ/QdDh9GrukNJyIkjIy4FAAutUi/FyctwEbgswjFZTXwElCC8GmZ6NMyG/g2WI9befuxt7HfaRdK0XPAUuDzQBWYWjXLy1COF60iuqo4vkwnH9FjIX+ZYGbzZspfGCN/jhz0xyUlOpHo0EgUvY3QlMty6cgLWG1WqttFWYuOy+Jh7CyZKQf9cVngkqxOKi/Ae8AK4I+I9YgfIpaB8qdxTgWaVzZj+4tNOLLcA9iA38DgEgs1rY5Io8SMITGqRNbjnsUy6/G1kHd4kFPZIm4gzVdAVZTxdpc4UBSFRY4HQHhadXAP/F6ixnwFq91GhDGM09ULaSCVlnly0J8IC5LEA/OiVF7gGeBWoAFYjFBktgHuLI21Gvgr8A6wHqpjG7EpdqL6w0l+Kh6s0N4OZU0i67H9ncNQKLMej4dUXoKcwx29tESHcSo+E+svfok5Jwc1JUUO+hNgYbJj6Ug67XoFU9uQ2f1QRQaZVHFprxz0J8ICx7LnpZYaVq4U206fBpvNi0LNNHbgu8BmYAD4MEJx8WRB7RuAYqj4hchontOYjvJVBVbC5d+JXZLTQ4mJdUwiFZn1eCykw26Qc9jSzZ+/cw/Gk1/iUMH7ODRnNnetX48xKsrbovk8Q5l2qznxjpeFCUIqHNlJ58ZkUFcHEMp1yxwfykF/XDSHc1NrHXMzbYSF6enrg8pKmD/fy8LNBN3Ap4E/O/7/NvB93GttGQdT2mWoguz56ZAInIVlXxcR2X9ZMDMy+DvS8hLEqKpK40AtFoOeRamOkGg56E8YzWk3PK2Gc+dUBga8LFCQoTk8hg8I64qswjtx0uOSCTOEMmC1UN/Z6CwNcPq0d+WaEaqB9yEUlxBEdNF2ZkxxAahocVhebk4XOWK+BnZFGIF+8w7wO4ZCQCWjIpWXIKalx4xF14VqV1i7cM61D5AMIytxDgoKhqguCOvg3DlvSxRcmNqE5WWwWdY0miw6ReesZFzRWscyh8Uq4JWXN4G1wEkgBXgd+MzMizGsplEc8L/wlTVwHIjsR0Q8bQAqZ142f0EqL0HMpRaRmG6gJY3VK0K8LI3/EW4MJSNeRLmEpdVy+c9H4XZZV2cmsKo2aswiPr2lQiovU0HLL1LeUuv87c6cQfTfQOzHv0XkamlG+LUcQfigzDAWm5UaswiUmO9S0+hPdUKvqvlnIAwRor0M+D+Ef45kGNLnJYg5WSNMl/0NGc6Zl2RyzE/KoKa9kbCUWmL/8iKUOerqyEqwHqV1sAub3UZUSDjlJxMBH1JeVIRPRdMor2bEqBsFRANzEXVyFji2zSDzHRFHFS11fMDV8rJnD7waQP14EPgW8HPH/xuBp4BI74hT3d6A1dF3k6NESuP2dmgQUf/EbQf+BZH07hAiYd5+4DeIfiIBpPIS1JSahOUlrH8uMTFgsXhZID9ktTWchro2Zoe9yXXvybo6I7AgHh4Gx8tNfgVNgx2AMLu/fEZEZnhUebEDV4BGRldKrn71T+E7liAsATcAeYicIO6mF6gB2iCnxmF5qazj2wPV/BMttJ9VUBv3icQJgdCP64FNwNuO//8D+He8uuZQ4VIWQHGkpjhzRnyWkQExMUAM8CrwOLAFsdy1AuFU/C/MqH+OryKVlyDG1CosL+lRGdfYUzIWD278Eg8CUCzr6gBcBP6AmDGeROTOcG3+LEQG0vkI0/06IBeImNzXaMpLenS6c8aqOZ1Oi3aE48Fx4BSiMF81UItQxCZDJJCMaHOyy98q0AWYET4N5Yjsruccr9+CESN3h92N7gYd3IRQaFYDc3BJgz0GdsRD+wJw/qr3mqHd5sdmwL9CTX8Did/O5NfasS2OL2jy8378OiIl/xUgFngaEQ7tZUbLrHv2rHgfpoDrgH9G1FN6BLGM9E2EFea3iGy9QYxUXoIUm91Oq60OdLBsjlRepkrTr35Owpe+hsGuBm9dnR7gSYRZ+1qJzpodr8PA7x3b9IhZ5Q2IB/VNQBbjPqSbB4TyEtYvrAdz5zpmrJOlCaFove54nRpnXz1DSsi1XrOY3LJEEyLHyLvipR5WMXQb4DXESyMCURsnFeHoGY2wbPUilKFqx2u8yLdYIAlS4hOJsoTTbeyj8bqfkXLmGyhYGdI2tXcD8JSwDN2FUABuxnefHjbgJ4gcLjZE3/oDQmH2AYZqGg2Nu5rlZVTrYSaiTMFvgEcZykXzNeA7iH4QhPhq95N4mFpzI6rOgn0wlOuXprj/C84iHgomxKA/D1iDMIcHUBLfhIJ/5rOX/s7e//77yA8DvZiaGfgZwpegzbHNgChk92HEtc5CPMRtCMtFHcLS4FqFtxE45nj9ynGeZIQSc6PjfTXDlAHN8jLYNEln3RpEv3zD8X5+lH2ygVXASkQl4HkI35TZeG7ETEakj79H/Gvtt/LGrje4NfRWDEcMQqm5iFBSjk/gfAaE7IsQWWNd34WLEAoK2b9P52TDJY4XreHItsP82/OjFAXMOQwVueK3Og/8N6Ki8oeATyIUGuOUWu1+KoGHENcXRC6XXUzasudJtPxEOS6WF015GdN6qCBqIt0BfAH4G/Bj4NfAvyEsM17y4fEWUnkJUi42C/txf2M6Kz/ixgXgEwjTZskYn89FOMx9CqHM+LkiY9DpmRMrshHbUNCjiro69gAOD7ACuxH+Ay2ObTnANxBm+oRxjk1CKAYaKkKhcVgceBsoRVginne8QPSTTOA6sC620ZbcDQroD2eQCdyU6ZBlEOEs2wFcRiz3mBAWlVOO817NMuA24P2INPG+kFxaD12ZXah3qeJhBUL5q0QoMS2INnYiIlMiEA6/cxG/UzoTUijmJ2VwsuES5S215OQIb1A7OnTYh/rxfsc5X0WkuH8BaEWk1X8GcU0fAB5ELAF64562IZSUrYjrHwX8L/BPXpJnDCw2KzWOelw5iUORRuNaXlxJR1yDvyOckM8CXwf+CyhELDMFiSFdKi9BSmmF8HcZbMpg0SI3nfRJ4MsIk7UeEZZ4HWId/SJiNlSDmLn9N8Kc+wgiM1O8m2TwAvGZObREh1FLOudzv8n9vb+G2trALLHwIkJJcazRswShxHyCqY0mCmKwzUA4VoJwdi1DKDLvOF4NiAd3JdSWXsH+qEp0XwSP7Y3nv0BYbH519clHQY+wCN3ieN2M0xLh8xgRS0YL3XdKLVy6orWOVWveRwOpNIdmsOJnn4dfu/TjBMQ1/gRCUXgXUcTwGYTl7OeO13JElMynGV+JdSfvAF9CWO5AXNM9CKufj1HT3uisx5USLTpeWxs0isjpifltKQhr1wZE1NQOhDXzR47XjcDHHZ8vJWCf8gHaLMm1OFkjLC+x6lyM7jD5/h8ipA+E6fsXCHO7K33AP4ADiIHvJPAVxAxiE0KRuRmfmilNhJTFK7nzO/fQfOp9zKst5P7DBTA4GFiZis8ilJYXHf8nImZ7Bbh/FAljyPdFo9khwxm4VCn6bnZXOs2KQqQK4XrQ2RDKSZTjNRthiZiLsK4sRyjTPrSE4G20cOnyllrm353OPKqwDITQ86BCeMEY/ViPyFD7PsSDsxihLDyPsG59DREh83GEInMbnonuOYHog390/B+LiMb5Ij4bjVPh4qyrRRppzrpz50J09CROZkD8vp9DWMP+F+EfpSn830L09dUIhVe7FxIRk8UEx3s8EI7fjbtSeQlSarpqQYGsODfYGJ9lSHHZBvyA0W+EcOBex6sdUcjjCcSA97TjtQixZv1hxIPGD26o+UkZWAx6wtLqOHUQVBSUQFFcGoH/RFjVbIjZ/1eBf2VmHQVnIZZ13g/lr9fCEUi7cTYpDp/S7g6I1AZgP+gzvoK2dFFnvkJc4iDRiaG0tsL587B69QRKhRgQvi8fQvhAPYO4p48jxoVnEUuKn0csK003kbcNsST9K4bqEimIMWMHvrHkNw6mcZx1pxwtpwc+6njVM7Tc+i4iqu0tx2s8QhlSZFwVmyiEAhSG+O0HEJPQFtCH6oespV5AKi9BSO9gP92I7KSrs+ZO72SnENo/iLXXsRSXq4lHWF2+jPCefwJ4DuHI+R3Hax6QjzCD3oiYPfjgjGqBYyAKndVAR5eV+noDc/y92kIP8FNgp+NvgI85/vdy1IZWFiDUEWmUnQ2RQeas6C6SIuOICYuks7+HqvZ6li3L5PXXRbK61ZOtrhyH8Ln4ImLZ70lERFkFQ/f09cC9oHxAQbFOUMscQFgS/uE4X61ju4LwsfpXxETHD9ByvIzmrOuWPEWzEdfgnxlari9lKOS/DuFc3+7yrikljY7XBFESFam8SGYWU+tlUFQsXbGsvT526ifqR0Qb9CK84H/M5Ge9CiLXxzqEH8w+4E/AK4ib7UnHC8TsYAHDoyeWOP72YhHslOhEokMj6BroJXRWPadOzR1deVERyx8XET4cVxADiAUxgOgRD4B4RN2VbMS6fZjn2+CkB6FI7nTICOKB8xOEj4gPoNWF6W8USqPPZNb1QxRFYX5SBmV15ylvqR2mvEz9pAi/ojxEvzmAiIp5EzFReQ8MGLgr5C501+vEpCQdSHMcb0U4IpcDlxCKUK/L+eMRPjVfRNz/fkS5ZnmZirPuZNEhxsjF4+yj5RxqZ7hCo/3dg7C09CG0hVDHaxbYEj2RRXHiSOUlCDnXWA2IwX/58mmc6N8RoZNpiBnRdK0iMQi/l0cQN82riAHvHURYbR9w2vG6mgyET8OtiOWFPGYsfFNRFHKSMjh++QJhqTWcPj2XO+9E3PxvIdrwNnAGMShMljkI07ur0rYYYZlylyWqGpH46v8YCnvOQlTb/SQ+sxQzYB2krkNYDZsvzSGPo/zPiS1wdGdgpLL3AjmJ6ZTVnaeipc75AHVbgcZIRJHBzyKU4b8CfwH1LRWD2SDujTcncJ4UYD1iOfmjzKxC7yaGRRq5LBudPQt5HOXjv9wCy2e4HyuIcTeGkT6K10C1qHDQAzJNEKm8BCFHLtWytLaNr/zpGTIevhPmTuFmeRexrAAiRNHdERuRDMt7gQ3xgNWyhbq+mhCm5FqGbqZ44COIsOx8xGzBg8x3KC+JqbWkHHDIcYiRKd61kN85iAE5EQhBKCFWhN9AO2LtugIxK7rseB266lyhiFnr1UpN9gQEtiKW/N5ChMK+4fJZDsLh8iE8/rtNlsq2euyqSrguhEun4nmQ/yKnJoDq8HgBV6fdTzpqHGnWALeShnDwLgDrgJVDTxzi/RHvx1BvEMsZjQhrgQHhZ5GDWKJc5nj5iAI9VbSaRpEh4aReFWm0jT3Elsp+PBmk8hKEnGus4f7SKm5qvgR7n4a1k7xZ+hAzKTuinPxMpNzWIx7K2QjnQFfaEGnVjyK87Q85tv3O8UpEOAs+gsf8NeZ3CzPwnWm1fOZFlw8WIyKo3seQ13/4BE+qIvJpmBBLTRcYUt4uItaptfwlLhgx8qHoD2GYZxBr4JEu36lluNUSnmkoiKiQQoTC54O+RSAesGntPSyxqBw/eZz7kPWkpou2hFHRWsd1t4lt1dXQ2TnFrMUTQQfdGd0ij42vJLjzMEP+Lo6aRtXVVBe3sBqFB3T7xHgq+/GEkcpLMFFdDS0txFYc444TjiInrjdL7AT9X/4D8RBNRYTneZsEhkI3v4awdrwFFDleDcD/iJf+Jj0ZeRniQT0Ndx9A+Kr8AfgZLGiaC1+EitRaDiuwdifoPsH0ck0oiARgSQi/E1c0S5RmfXK1SDVBSFfI2EtsGrEIX6P1wP0IvwMfp7yllhd/+ILjv/2ynpQb0JYwLnc0ERbVz+zZYdTXi+WMG27wsnABRHmL8DTWcuuQmclqhEuPapf9eLJI5SWYyMwERISy85ZwuVmMAM8/P/453kQ44YFYLpqpRFSTQY/wfbkV4QT8D4QT6guge1tH7tu5qE+rwmpUyOQjFVoc5/slYjkHR5E7oD6hmZtC+jj74XAWeTJJlqsl6q7hH1maLbzx7BvcmnMrhhbDkMMdDClD8xBWIS9W150K5S21bLt/Hd/fdwS93R689aTcSEJEDAkRsbT1dmBqvcx11+VQXy/8XqTy4j4qrnbW3bsX22c+i161osh+PGlmRHnZuXMncXFxAJjNZrZs2eKRYyTXYO9e7J99CJ3VNrR87HKzWJ98cqwjBd0IPwgVsWz0EQ/J6U4MwN2OVwPYfm2j/xf9RF6JFM6p/4dIhvYJRHvGWlYaQCRo+x0iIZRWYTgF+CLEfSGaWX+Mp7mnnbCUOk6fXuC+zMWTJQ665nWhbgg8k3xFSx31uZmotvv50f5/GblDoNeT8hA5iem09XZQ0VrLsmU5FBe70WlXArgWZHQoL5s386WfLeHxI6PUk5L9+Jp4fN61c+dOAAoKCigoKCA3N5fCwkK3HyOZAJs386df/XT0zw4fRn3ggfGP/xbC/2IuvrFcNFnSwL7VTsmvSrC+YBV5S/SISKBvIMKwcxCKzFcd2x5GWHDiEMn1/oRQXNYgsopWI5bRUoYGpbDUGk6NV51YMiV6Bvuo72wGwHx5NiDq8ACiDo9kyrg67S7zpNNukDJotVDbLpKo5CQORRpVVIh3VZH9eLJ43PKyfft2Kisrnf+vX7+e/Px8du3a5dZjJBPjXJ0IM7WjCJP7RIsI/hV43PH3b5m+v4g30SGsEncjonqKgL8AryOUM9MYx81G1GH6DCIs+yoWJM3l3epThKXWylmrB9BmrokRsZxrWEwDqYRkZ5C45ao6PJJJ46xx1FLHZx3Ki+zD7qOqvR6baic6NILkKFHIrbUVzrUl00Aqyasz0BfIfjwZPKq8mEwmzGazc/nHlZKSEtavX++WYyQT55Stk5boMFpCslj8g69N7GY5jqgaC8Iicbvn5ZwxZiPa9FVEmPIRRA2dKwin2EiEX0kewkdknHBNbfYamlrLqdc8JXDw4hqt8UzjYjKp4uifQkhcocBYdXgkE0LruxWtdSy9U2xrbISWFhH0IpkeIyKNEA7Rl0nn/XOruHg0BBTZjyeDx5WX0YiLi8NsNrvlmIGBAQYGBpz/d3Z2AmCxWLBYLCP2nw7a+dx93pnCZrdzIbqDO79zDzd1/Tc//adU+NznnDfLaO1T3lHQb9SjdCvYb7dj224b8vfwQ8a9hpGIKKTbxjjYOv65s+JFitDwtFrOl6t0dloJn2hYtBvx9346FhebRHLFWcY59PYaMRhUsnOsOJup00GAtHmmr+G82BQAGrtasdBBZmYMVVUKJ05YufVW90e8BGofdcW1jVrfzUqY7dx+8qQO0DP/OiMWq8vg4if92BPXcDLn8kq0UUJCAm1tbdfecQLHbN++nccee2zE9pdeeomICM+Ujy0uLvbIeT1Ny2Anqs7CgDWcUEs9Bw+WARDWGkZkYyT6fj0J4QkcrjxMSGcIs9+dzdziuShWBXO2mbf+6S2sxdd4gvsJnriGg3YrCmCI6kQJ7+TJJ4+Rk9Ph9u+ZKP7aT8fi8OXjANSfFZn/UlO7KSl5xYsSeZ6ZvIbR+nC6bH3sfaGIpKS7qapKZf/+M3R3V3nsOwOtj45GcXEx79SLsXbgSicHD4pMmgcPLgeyCQ2t4ODBs16UcHq48xr29vZeeycHXlFeJqu4jHfMtm3bePTRR53/d3Z2kpGRwYYNG4hxc4Yli8VCcXEx+fn5GI3+F8ZRcvEIVEP/lXQefDiXJV0Kuv/QoSsZ30nMfq+dyN9GsiFywwxJ6jk8fQ1/99QhajuuEJ5WQ2zszdx118znafD3fjoWP3/y7wAkGt4HwNq14dx1113jHeK3eOMaHvzTad6tOUXKogw+8IFZHD0KirKMu+6aarnjsQnUPuqKaxuf+L1Qsu+5eQPXzxW5Gf73f0UmyHvuyeKuuzK9JeaU8cQ11FZOJoJHlZfs7NHzlJvN5jE/m+wxoaGhhI6yPmg0Gj12U3jy3J6kzCSSklia5rL0vBH9ZxAhwAqQA2qkSk9TD5H2SJRYBVYAXwbd+3Xo/C0hyDXw1DWcP2sutR1XCE2tpfu1AYy/3wI7vVN3x1/76Wh09HXT0mMGoOmiqIS+bJmC0RjYqapm8hrOn5XBuzWnqGpvYOVK8WA9e1aP8cQx2OKZfhxIfXQsbIpKXUcTAItSM53tPeswtqxYYcCffwJ3XsPJnMejT6Ts7Gzi4uJG9WMZy/F2KsdIJsapWpHh8TpLBvpPIRSXDyOiay6B9YiVl3/1MtZaq8jYegBR5FAyYRY4HB/DU2vJPLQHXnXUK5FMi4pW0XdnxyRx4bRYDl66VGYfdSeuTruuBRrV38l+PB2q2upRUYkNiyIxQoRpNjdDk9BnWOp+w1ZQ4PHp9LZt2ygpKXH+X1RUREFBgfN/k8nkzOsy0WMkU6OmW5QE+MrFuaKOxn3AHxGFAiVuYdmAgSV1beTq3uN9dS51d8rKoLRUlGiQTBpnavXEDM6dE9EaUnlxL1q4dHlLLYvDq1mjlJLZXob9OdmPp4OpTVi8c5KGIo20HDrZ2RAZ6S3J/BuP21y3bNnCzp07KSoqAuDIkSPD8rWUlJSwa9euYRl0r3WMZPL0WQboUUSOl7WVGSLR3G5kgQg38/71n3Aaq2TdHfdR7gg1nWVMp7tbwWCwM99DRTaDFS1tfUuPmbDFmRxxbFdbZD+eDqarywIwlENHs3BJJs+MPLpcFZONGzcO+0zLojuZYySTp6KlDhSV+O4YErvjYB/gqYqxQYx1z+/gs5/FYFdl3R03UuGwvOi7xdLGnDldGI2eiSYMViJDwpkdk0R9ZwuVP99Bxte+i0HW3Zk2Fa0Oy4uL8qJZXrRsxpLJE1hemJIxOX1ZLBktaMjAsgr4kFfFCVgMn3mQ7/77Z0b/8PBh2Lx5ZgUKAFRVpdwxe+2uE8rL3Lld3hQpYNFS1x+5ZRVPPnx49J1kP54UQ8tGQ2UBNMuLVF6mjlRegoQj5xzKS+NcjP/BuJliJdMjPS4VAGf5S1mvZFq09nZg7utCpyhcPjsHgIwMqbx4giG/lzq04E5ZP2rqDNqtXHZEGmkO0aoql43cgeyNQUK1SZjdk1vm+kc1aD8mKXshLdFhnEpI58c5jws/gdRUWa9kimhLRulxKZw9FQLA3LkTzwchmTjOiKOWWuatFXV3junyUH8l+/FUaB4USSrjw2NIiBDr9A0NYDaDXo/3Ks8HANJdM0i4oogHgC1urlRZPczspau48zv30NmUSdNvCvnmxQIUi6xXMlW0SKP5iRm8fE5sk8tGnkHzy6horSPr4+nE66votYVQfZfC3EJZd2eyNDmUF82iBUNWlwULICzMG1IFBvIxFgS0mjrojOhAsStE3ZZ+7QMk02J+UgYWg57Q5Ho6uqzUNyhywJ8GWqRRoj6Dvj4IC1NJSenxslSBSVbiHBQU2vu66LZ2krkoFFCEg6ki+/FkcSovMtLI7UjlJQi49Cfh75LUmsKifDn4eJq0mCQijGHoDDZCk644ByvJ1NAsL0qnWNJYtEiY3CXuJ9wYyuzYWYBYOtIesFp0jGRyNA2I5c35STLSyN1I5SUIOHdCKC80ZrBkiXdlCQZ0io7sROFYGpZaK5WXaaCqKhWOSCNzlXgALFsm84t4EtelI6m8TI9mp+VFRhq5G6m8BDpmqLCIbJh9A/Ok1XeG0BwfpfIyPRq7WukZ7MOg01N9Kg2Qyoun0fwzKlqk8jIdegf7MVvF8qb2m9rtoqZRHkf50I9vh6NHvSmiXyOVl0DnRbiYWsPS2jb+3xs/kjfLDCGVF/egLRllJszm9EkDeRzl4WfWE1de7mXJAhfN8mJysbycPSsevJKJo+V3SYyIJS48GoCaGujuhs/p9hD5nqwXNR2k8hLgWP5qxZRSx4dLq1hZXypvlhlCm2mFptRx5owc+KeKprxkx2dw8SI8yB6STr1G+muveVWuQMZ12Wj+fDAaoadHPHglE0db7sxOEEvIVFdT+3wpqynjU4qsFzVdZKh0IFNZTcPbp8mZ1cQdx8VDgOeeg4ceEpmSkpJg3jzvyhigaJaX0KQG+i2DVFaGkJPjZaH8kIqWOtLae1hwcYAVtjLuV/aBCulvvAHHjgnPXdmP3YprxFHnYAeLFsVy+rRYOsrM9LZ0/oOpdaggIwCZmdwClAGqTdaLmi5SeQlksjOZC+z7GThvCXmzzAizIuOJCYuks7+H0OR6Tp/OlMrLFChvreXFH74AvEABoKpi0A/p6EBZt25oR9mP3Ua4MZQ5sbOo62hyOO0OKS933+1t6fwHU9tVlpe9e7F95rPoZb0otyCXjQKZzXuxKeISO6sBuN4se/d6RaxgQFGUIb+XlDrp9zIFbHY7ptY6tt2/DptOzLO0Qd/Zn2U/9ghaHR6TjDiaMprlJVvL8bJ5M59eIOtFuQupvAQy5s1sv/cLo38mbxaPo/kOhKXWyoF/CtR1XGHAauHl6xfw1XXvjL6T7MceQQv1lxFHU6NroJcr3W0A5Dh+S5sNKivF56oi60VNF/nLBSoWUF9XqZ7VAIBdFgmccWTE0fSocGTWzU5Mp/yS6LfaoK8qsrKoJxkt18u5c9LxfKKYHM660fpwokMjAaiogDpLMo2kiqX7x2W9qOkgn2SBSim0KGaqZllpiQqjeV6uvFlmGFfl5fx5sFi8LJCfoUUaZURlcKZFFAm0r87D9stfYs7JQU1Jkf3YQzgLNLbWkZ2tEhICvb1QVeVdufyFS80iNCs5NNa57fRpuEw6H8+tQnnvMBQWCsthVRWky7Itk0U67AYqb8HF2dU0xUVw28OP8MSmn5FykwIFsrjaTKHNXkMSmrEp/Vy6FMbSpV4Wyo+41CIeAGEDGVwmnQ9kVnH+aAgWq5VDs2dz1/r1GKOivCxlYJKZMBsFBXNfF52DnSxeHMvJk2LpKDvb29L5PpccindKSJxzm7bstnB56JDTlqwXNWWk5SVQeQcupIm8AV3NOSxf4bhb5M0yY8RHxJAYIWZeodJpd9Jos9fBK3MBWLQiVPRfkP3Yw2gRRyDLBEyFckffTbnK8gKyLIC7kMpLIKIC7wjLC0CUZR5yguodpN/L1Oi3DFLdLvy1rpwXysvy5d6UKPjQIo5kgcbJoaqq0/KS6mJ5kcqLe5HKSyBSC9TDhdlC+8+Kkwm8vIWr8tL3xlG4XdYzmQim1jrsqkp8eDTnjsUBUnmZaWSBxqnR1N1OR383ekVHUoiwvAwOwsWL4nPtt5RMD6m8BCJvQ79hkKpZIs9AbpZUXryFll0zLKWO5cf2wKuynslEuOgwuy9ImsuZ02KpaMUKb0oUfGh919R6eVjEke2wVMLHo9zhq5URl4pRpwfgwgWwWiE2VvrmugupvAQi70BFai12nYq1O5qbVsV7W6KgZUm/jiV1baxWy7ijQ9YzmSiXWsTvkhI6l54e4d6yYIGXhQoyXC0vWVkqYWHQ3w9dv5RK+HhcahZLRgscVleAkyfF+4oVQ25bkukho40CERdn3b6GeaxeLe8Wb7F03e085/jbmWtHlmi4JprlRd8prIZLlohkupKZwzXiqOvSKT4218L5iwrhf3FRwmWdtBFcdCjeOUnp0CK2uSovEvcgh4NAow84Bhfvdgz+HfNISfGuSEHN3r1YH/wMBruKTtYzmRCqqjqVl64a6azrLcKNoaTHJVNrvkL80pU849iudkglfDzKHZaX+YkZ9LZcAaTy4gnkslGgcRSwwrkMof3PiZCzIa+yeTM/+dGXRv9MprYflZYeM+a+LnSKQs0psXQhlRfvoC0dvf29b42oLyXrpI3EYrNiahO+hmMtG0ncg1ReAo13QEXlQqpQXpbNkcqLt8mIE6YvmyzRMCE0q8u8+DTOnAwB5KDvLbSigq+sW8gbP5ZFBa9FdXsDFpuVCGMYaTFJALS0QH29+FyGSbsPOYoGGu9AQ1wLfaG92K16bl4+x9sSBT3J2YtpiQ7j9Kw0tsXLEg3XQktOl5Mwl0uXxDZpefEOWsRRRWsdOTlim017bEglfARafpf5SRnoHHW4Tjui5XJykPm23IjsfYGECrwNF+YIq8tA0xzyVku3Jm8z57pc7vzOPTzwzx/iR+0FdBbLeibjoTk8xtjnYbNBQgKkpXlZqCDFGXHUUkfaylk0kkopeVz5L6mEj4aWWXfBrKElo1OnZKi/J5DKSyBRDTTBeYfyYm2e55wtSbxHVuIcrAYDhqhuDFEdnDkrU9uPh2Z5sbUMOevK8FLvkJUwBwWFjv5uOmZF89GVVazjMO8sl0UFR8M1P5GGVF48g8en5Tt37iQuLg4As9nMli1bxt2/pKSEXbt2kZ+fT3Z2NsXFxaxdu5aNGzd6WlT/x5Ez6ky2UF6S9POkZdcHCDeGkhGXQo250VEmII4bb/S2VL6JxWbF1CocHlvLZaSRtwkzhjgjjspb61i4fBnvnRCZdu+9VyrhV6MlqFswy1V5Ee9SeXEvHn207dy5E4CCggIKCgrIzc2lsLBw3GPMZjMlJSUUFhZSWFhITk6OVFwmikN5sdre5YnHX+UOa4935ZE40erEhMoaR+NS1VaP1W4jKiScC8eEw+PKlV4WKshxXTqSZQLGpnugl/pOkdhFs7zYbHDmjLS8eAKPKi/bt2+noKDA+f/69evZvXv3NY+rrKxEVVUqKiqGHS+5BkehI7ybW88e5/qKJjY1vO5tiSQO5ruUCZDKy9g4ze6z5sKRUl7mdm4KkWnovclQmQCpvIxHucNZNzkqgdhw4Znb0BBFf7/C+0KPklMgSyq4E48pLyaTCbPZ7FwycqWkpMRTXxu8VFbD4VJqEg9yxwnxAFhy7A8yDb2PkCOrS0+Ii82in86OmMdHOvZwO6+y8LBMQ+9Nskcp0Hj+vKjVIxliyN9lyFm3qioGgK/G7UGRJRXcisd8Xkwm06jb4+LiMJvN4x67f/9+EhISaGtro6Kigh07doy578DAAAMDA87/Ozs7AbBYLFgslskLPg7a+dx9XndgzM4EYHk3WgopDObhGTAtg4PjnsOX2+cuvNXGzNhUAMJS66hoUqmvtzJrlme+y5+vY8uZ4yypa2NOby8bEWno9QeexfLQZpEULTERy+zZgH+2b6L40jWc5+i7FS11zJ49SESEkd5ehfPnLSxaNLVz+lL73MXFpioAshPnYLFYsFZUoDtew2r03Omoa6Y++yzWzUN92Z9LKnjiGk7mXDMeR6spJWORm5sLQHZ2NgC7d+9m06ZNHDhwYNT9t2/fzmOPPTZi+0svvURERIQbJB5JcXGxR847HZZ/6L/I/vt/AVYtFRqKIwOmXa/n2Fe/St3BgxM6ly+2z93MdButqg0dCoT1YYxr5Te/Oc/y5a0e/U5/vI4/+NJPHH8VD6sFZVy3zrnPweefF3v4Yfsmiy+00WK3OiOOiv72R9LS7qSiIo69e49x440N0zq3L7TPXbxXK9Lo9ta1cfDgQT56773sAHYAar9jp6v68p8dfdmfcec17O3tnfC+E1ZeioqK2Ldv3zX327Ztm1MBGY3xFBcYUlo0PvnJT1JYWDjmEtS2bdt49NFHnf93dnaSkZHBhg0biImJuaa8k8FisVBcXEx+fj5Go9Gt554uutfugb/fDeSN+Mz29tusWL2aa/mL+XL73IU327j36bcwtV0mLKWW6Ogbuesuu0e+x1+vo7mvi233r+N7+94bVgtKU8ZVgwHbk0+Sn5/vl+2bDL52DZ966hC1HVfIXr2YG2+MoaICwsLyptyHfa1900VVVXY+/mcAPvHBe1g4ax4Dv/41us8XYnSdUGr7O/ryXXfd5RV53YEnrqG2cjIRJqy8bNy4cVJRP1crIRpms3nMz0AoSa7foyksJpNpVKUoNDSU0FHC9YxGo8duCk+ee8qUQZ+xn3AL2BXQqYgMmHY7RoMBJiGvT7bPzXijjfNnZQjlJbWOc+dWYzTqPfp9/nYdKxsaOJibSU92Jj//wciJknL4MIbcXFSHadnf2jcVfKWNOUnp1HZcocrcyPLlqwA4f14/7T7sK+2bLo2dLXQP9qJXdCxInofRYKTlI59hA6soG2VCqfXlQMCd13Ay5/GYw252djZxcXGj+r6sX79+1GPMZjObNm0adozmHzOewhP02IAyKE8ZpCU6jDOps7jwdZkB09eYL8Olx+WS5qwbI9LpyjT0voNrmQAZcTQSrSxAZsJsQgziAayVBQCG+rDsy27Do7/ktm3bhkUWFRUVDQt9NplMzlwwIKwsW7ZsGaao7N69m40bN466ZCRxcBHohpML+rjzO/fwkQ3fJPnfZAZMXyMn0RFx5AiX1orySgRatIYauYgGUjllzIPHpRLuC2h9t6Kl1qm8XLgAAeRvOy2Ghfg7OHVKoYlk2kJTRB+WfdmteNRhd8uWLezcuZOioiIAjhw5wq5du5yfa9l0XbPubtu2bZhC09raOqazrsSBI3XAyYWVWAx6QvuyiI8HkBkwfQnXXC8dnXYuX9ZJvdIFrSxAi20VmVSRnx/CC4UKFBTA4KDsy17E1fKSkaESGanQ0wPl5bBkiZeF8wE0q6FrmPSpU3CZdH7yJRM//Em4qHEh+7Lb8Hi0katicrXPjJZ51xXN+iKZBA7l5dSsKgCyY7O8J4tkTDLiUgnRGxkMGSQkoYnTp1Ol8uLAYrM6Te9N5zMZJJRVqx0fKlIJ9zaZ8bPRKQqd/T2093ewdGkcR46IpSOpvMB5R5j04uRM5zatptHS1SFDxblkX3YbcgEuEDgK/YZB6qPrALh+vlRefBG9Tkd24hxAJqu7mqq2egZtFqJCwjl3RJjUV63yrkySIcKMIaTHpgDDl46k3wv0DvZT1SZCxheniLHXbh/yeVm+XK4PewKpvPg7VuAYXEqrQdXZsXbHcEtegrelkoxBjiwTMCrazHXhrExOnxLDklRefAut75ZLp91hlLfWoqKSGBFLUmQcIFwNu7sVDAYbCxd6VbyARSov/s5ZoA/O5lQC0Hc5i7w8ZfxjJF5jfuJQmQA58A9xvkn032RDJgMDEBUFMsDQt9AKNJpaL0vlxYULTcLfZZHLktGJE+I9I6MLw4yngg0OpPLi7zj8XU4sFIN/WG8mMjDLd3ENlw49dRT1dlmsDeD8lSoAdB2ZgKgkLaNKfQtnjSOXZaOLF4X/aTBzwWE1XJQ8lOr/2DHxnp3d4QWJggM5PPg7VznrZsVIfxdfxrlsNKueTw48JYu1IbKTXnBEa5grMgG5ZOSLuEYcpaerREeL4oy1fzoKQayEn3cqL5nObZrykpUllRdPIZUXf+coWPRW6qJFmOnaHKm8+DJp7T2sbuxhSWMzn1KeExufey6oq39f7miia6CXEL2R8jLh0LxypZeFkowgK2Eo4qi118zSpWK7/ak9EKRKuM1ud4b4LxlFecnJkcqLp5Crcf7MIHACKlLqsOut2PoiuO0WmfzIl9FlZfOU429nVZjm4dW/gy17nTZznZ+UwcvHxJAkLS++R6ghhPS4FGraG6k78R53pSQxiELaIUcph+eeg4ceEv03KcmvKyZPlJr2Bvqtg4QZQsmIE9W3m5rg8mVQFJXMTKm8eAqpvPgzp4FBODe/ChDOurm50lnXp9m7F9tDD6K32YfMnpqyYjDAU095STDvoSkvc6MyaW4Wvi7LlnlXJsno5CSmU9PeyOqb72Q18O+A2jtU/TvYlPDzjuXOhbPmonc4aWlWl/nzITzc5i3RAh65bOTPHBFvxx3OuuF9mbi5kLbE3WzezItP/ffonx0+DJs3z6w8PoDmrBvSkwnA4sUQHu49eSRjo0Uc/XHr57HrxdxXcVT/HqaE793rDfFmnPGcdVetCnzlzZtI5cWfcSgvJ2cJ5SUzWvq7+AOaeVlbNlKDPKxGs7z01GYCcsnIl9Gcdv+8cg7Nfz08+k5BpISP56y7erVUXjxJcI+a/s4RsOpsRHSW8sTjr/KRkE5vSySZAHMWXEdLdBhn0xP4gu4X9C0J3mJtrT1mmnvaUVCoOSGK2knlxXfRCjSaWuuYNUs8nIO1+reqqk6roWtZgLIy8S4tL54luHpbINEDnAFTSh13H7/E9RVN3F3zorelkkyAhIXXcd9jn2LzV9fzu9S7eOHfgrf6tzZzzUxIw/bWaV7mdm4JD86QW38gMyFtqMZRdCitxlRKyePI54OvYnJLj5n2vk50iuLM39TZKYpVglRePI1UXvyVv1eDrZTajL9zxwkRqpd2aF9Qh9z6C4qiMDdtHigKYal1nD4TvMXazl0RS5458Zl8oG4Pt/MqK08FX8itv6BFHAFcClP57uYq1nGYv6QViuWiIFLCNX+XzPjZhBvF/XviBORxlDdDb2dWdakXpQt8pPLir2zKBNbwwfe+SEL3AAC6Foe3/5o1kJnpTekk18BZJiAluAs0XjldxpK6NnLO2bkPEXIb/nxw573xdbSlo4qWWhatCAUUUSYgyComa4kVXZ11y8rgQfbwvoFXUX7/e2+JFhTIUGl/5ca98M5nASvO4OggD7n1JzQzc1hqLaff9LIwXuS7j3zf8VcxdoI35NafmJ+UzqvlR6hovcwNQVzjSPN3WZScKZTslhbaShT+2aGE6/btIzYrS2g0qalBkfdmJpHKi7/SvJlB/XxCbDeM/OzwYcjNnXmZJBPGWSYgtZYL5dDXF3zhwV0Dvfzw/nV8b997GOwqutFCbqUS7nNkJ4osyBWtdXz+VrGtvBz6+yEszIuCzTDnm6sAh+XFYel+DIaU8JYWbvvGN4YOkEq4W5HLRv5IO1AO1UkNgEum1iDz9vdntHwZIQktKCF9nDvnZYG8wMWmag7mZvK1b28cfYcgCrn1J4aWjepIS1OJjQW7HS5c8LJgM0jPYB+17VcAWDQrE/buRXWUj9aUcMWhrKhBlPdmJpFPO39EK8a4rIuW6DBOJaUz+PPg8/b3Z+IjYkiKjAMgNLkuKP1ezjcJZ930mDQgeENu/Q0t4qhroIeW3nZnhemzZ70r10xyqbkGFZVZUfEkRsbC5s2c++3oeW+sb70llXAPIEcJf+Q98Xbouk7u/M49fOaOnxLyleDz9vd3hvm9BKXyUgWAGr6EBlI5HZIHj0sl3NdxjTgytV52Ki/B5PdyoUk46y6elencdv68eNcKfwR78klPI39df8SRWfd0fAUWg55FSfPFhiDz9vd3tKWjsNRgtbxUAXBlcA2ZVPHYXYehUCrh/oBrxFEwKi/nRykLcLQmmQZSuZwqlHB19Wr64+Jg1izvCBngSOXFHzkC3aG9NEcKn5dbluZ4WSDJVHC1vJw65WVhZphBqwVT62UALp/KZJBQ8tY4HB2lEu7zzHc4nFe01gW58pLp3PbKxXQyqeKt/xZKuO3ttyl+4gmphHsIqbz4G/XidTajEhSVwbYkbr8p1ttSSaaAU3lJqaOuDtravCzQDHKxuQar3UZceDSn3k0EhkdHS3wbzWpY0TKkvFRUiIijQMdis3KpRSQGXZoi6snZbHDyJAwSyurcISXcbjR6S8yARyov/oZWSXpVBQCDDTksWeJFeSRTRgs5Nca2ow/v5sQJLws0g5y9IvrvoqRsLl4Ug71UXvyHbE15ab1MSopKfLyIONL8PgKZ8pZaLDYrMWGRzIkVflkXLoh0B5GRsGCBlwUMEqTy4m84lJfSdDH4J+tz0Ou9KI9kykSFRpAWnQRAaGptUCkvpxtNAMRZswHIyJCuAf7EWBFHwbB0dKZRjL1LU7JRFKF4DxVjlMFyM4X8mf0NR6TRmXhR/WvFbOnv4s+4+r0Ek/Jy1qG8DDaK/iutLv5FqCGEjLhUYPjSUTAoL2cd9biWOJaMAI4dE++rV3tDouBEKi/+hAochSsxbXRFtKLaFfJzpfLizwxl2q3j5EkvCzND9FkGqGitBeDySWF5kcqL/5ETpE67Z68Ixfu6lGzntiMOi/iaNd6QKDiRyos/UQG0w8mcSwD0N8zjlhuDKB93ADLktFvLmTNgffco3H47HD3qZck8x/mmKuyqyqzIeI6/nQDA9dd7WSjJpNGcdk1BpLwMWi1cahbOukscyovNNrRsJJWXmUMqL/6EQ7s/ukwoL/r2+aSkeFEeybTRlJfw2TUMDKh0/N8eePVVePppL0vmOc46fAYWJGRTLlY/5aDvh7hGHC1bJraZTNDd7UWhPEx5Sy1Wu43YsCjmxAonrXPnoKdHOOsuXuxlAYMIqbz4E5ryMlsoL5mRC70ojMQdZCfOIb29j2WttayNeoXIF0RFWp57TkznSktFxdoAQnPWjRoQM9cFCyAhwZsSSaaC67LRrFkqqami9uDp0wjLYQBaEM84loyWpGQ5nXW1JublIYMnZhCPV5U2m83s37+fAwcOUFxcPKFjdu7cSVxcnPP4LVu2eEw+m82GxWKZ0L4WiwWDwUB/fz82m81jMo2JCSzZVvpmt5NqSOCDyzPpd2NiBa+3bwbwxTb+4fcOT93Ez6MSQX/8PJGo7eMfH9ppEjGovthGV66YW0iNSMDQlsW8ef3cccfk8oP4evvcgT+0MTU8gTmRSdhUO5fbrvDBD8bx5psibHjVpeeFGebPf8ZplnHhWu0zGAzo9XqnguArnNP8XVJH+rusXesNiYIXjyovZWVlHD16FLPZTNsEM3Dt3LkTgIKCAgBKSkooLCxk165dbpVNVVUaGxsxm82TOiY1NZXa2tqZv6lU4Atg0Vv4dsTHiemzoMS2UVnZ476v8Gb7ZghfbGPPM08T0dnDmNIkJUFl5YTP54tt1LCrdjbPvx0AQ38o73+8kvj4STXPp9vnLvyljVvXPIDVbqWl/gpf/kIz//RpOxERUJl2K9x8s4gb1spN63TgqLw8kfbp9XqSk5OJjY31md/gjMNquFQ663odjyovubm55ObmUlRUNOFjtm/fTqXLSLZ+/Xry8/PdrrxoiktycjIRERETujnsdjvd3d1ERUWhm+lg/h6gF9oju5ilQnzoIGp8DMqc2W77Cq+2b4bwxTa2JyfS1dHK3LZRFNHsbIiImNT5fLGNGr2D/dg7jBj0BvqvzCYyErKyhL/ARPHl9rkLf2ljWGc03QO9JEXGET9QA1r8QITLOqCrZduRwW289qmqitVqpbOzk4aGBvr6+khLS/NwS67NgHWQ8hYRJacpL4ODOFMcSMvLzOLxZaPJYDKZMJvNziUjV0pKSli/fr1bvsdmszkVl8TExAkfZ7fbGRwcJCwsbOYHlNYBwIoSMkBSh0VcuK5OsDk8dg2GadeD8Wr7ZghfbGO0Yqe3x0wYwsA2TI0ODYWwyUWU+WIbNbpt/eiMeiKMEag2G+nUERWaji5s4tqLL7fPXfhLG6OskfTaB7DpQZmThfFyFTrUkTsqCmRmOvvyRNoXHR1NaGgoLS0tJCcno/eyQ8ml5lpnSYu0GJFc8uRJWD54lP82bCG7bSfkSPPLTOFTd4XJZBp1e1xc3KSWd66F5uMSMckZrVdpOAWcI7W9Eb3dMThYrcLV/dw5gq6yXwARagjBplew6BR6iaAveZ4wRRiN4hVA9FkGAFDsYSTSSgxd6NpavSyVZKqEGUIAGLAMYkxJ5IIyRrjN4sUwiYmiRmRkJKqqTtgv0ZOcHcNZ90H2cKv1VZS9gRsh6Iv4lOVlLBISEsb0mRkYGGBgYMD5f2dnJyAUlLE6vMViQVVVVFXFbrdPWA5VVZ3vkznOHSj6LLBVoaCO8I1QFQXmzUOdpkzebN9M4Ytt1Cs67EYjl9Li6G+aw+yQUELTE0XohqKIojGTwBfbqGHt6yXMYkNvs5OAuKfVtjbUREd7DQYICRn3HL7cPnfhL20MMQjlesA6CNjFpRsYsiBq73YY1o8n2j5tnLZYLF63vJxuEHH9i5PmYSkvh9ZWGg/q+SIiQlB99lmsmzeLfpyYiGW2WNL3BcXLE2jtcmf7JnOuCSsvRUVF7Nu375r7bdu2jdzc3AkLMBHGc/bdvn07jz322IjtL7300piWFYPBQGpqKt3d3QwODk5anq6urkkfMx0Ui0KsLZGeUIgcGOnZ2D1nDjaDARyK23SZ6fZ5A19rowEdA4oNJWSQjg4dYWG90z6nr7XRptrJuqxZWTqHFhesVnTnzjn3M8+fP6Hz+Vr7PIGvt1FVVXQo2FFpNbeBIZzBASN2gwF7fBQhnZ3orFa6entRR3kwXat9g4OD9PX1cejQIaxWq6eaMSHerT4OQP9lM8YPCN+d/wTs2nSyuRnjunXO/Q8+/zzAhKNs/RV3tq+3d+Lj3oSVl40bN7Jx48YpCTRRsrOzR91uNpvH/Gzbtm08+uijzv87OzvJyMhgw4YNxMTEjHpMf38/tbW1REVFETYJfwJVVenq6iI6OnpGvd+VdvFdfaGDRI4yq4mMjobw8BHHffvb3+bHP/4x//jHPybkL+St9k2HtWvXkpeXx+OPPz6h/d3Vxg0bNpCbm8uPfvSjKZ8DhuR/7Cc/ZKC3A51xEOtA9Jh9dyJMtY0lJSV88YtfxGQy8a1vfWtSbduwYQP5+fl861vfGnOf7sFe6hIimdMmIqs0ybR3zYJ4rbZ7s59Otr9NlYm0saioiJKSEuLi4py+e9/61rcoKSkBcJuP4LUwt/fSa+nHGBZKRFwEp3qWExulkJ2hao0h+qo2TPQa9vf3Ex4ezq233jqpsdrd9A7285+PPwfAQ3d/EutTUegffhjFanX6+Dj7scGA7cknyc/Pp7i4mPz8fIwBtvwLwkri7vZ1TmIC7lPLRtnZ2cTFxWEymUYoK2PdiKGhoYSO4qhqNBrH/EFtNhuKoqDT6SblDKeZN7VjZwyHMtodaiVWp2DThxI2OwWlpQUGB9EZjaOWMn3iiSfIzs7mD3/4Axs2bLjm13itfdNg27ZtxMXFTVhed7VRURS3/E6a/OFG0Yd1xkH6O8SjfaqnnkobzWYz9913Hy+//DK5ubmYzeZJtW0iv0efZZCOiBD04dGkXW4ceY7FiyEycuyQcQfe7KeT7W9TZbw2ms1mHnnkEdauXcvu3buHfbZ161Z2795NZWWlR2UsKSlh165d5OfnE5OSQHFxMTeuu4FPfvRzqCj094NON/aVnOg11Ol0KIoy7ng+E1xsuIhdVUmNTiQtbhY89BDHrMtZ/fDIolzK4cMYcnOdliZvy+5p3Nm+yZxnRu78sZZ9TCaTM6+LxrZt25wzBxCzCy3nS9DSDXbFTm+IhUtpcZhnL4RZs4QT3PLlo/oIlJWVkZCQwNatW9m/f/+UvrakpIScHN8o/DiWLBs3bpyxGeZ0uJb8oQ7HR51xEFVVJ5W0zV3yJSQkOJd8R4v40/abap/os4hGKTbR1lFiUnwGX+1vZrOZvLw8CgsLR03emZ+fT0JCwpjXz51yaDm4tn39W2TMm8uGe+50GoD7+0XNn0DhlMPfZVnqUJ/Q6jjZtceon0z4AgWP/tqacrJr1y7KysrYunXrsJwvmvbuypYtWzCbzRQVFVFUVMSRI0fcnuPFr7ADvdBvHERVVOx2A7FRDu1UUca8YXbt2sX69etZv369c6CR+C6hBiMKCuhsKHobk1j69QtUVXVGGg1YIhjEiMUYCfMCN7LKE2zatMl5X4/GmjVrZky5qqysRFVVzpw/x6bPfIp+6+CwyzjTCrgnOe2ox7U8bYFz25sXk2kglcY5efD446I+QGoqJCd7S8zgQg0wOjo6VEDt6OgYc5++vj717Nmzal9f36TObbPZ1Pb2dtVms01XzInTparqEVVtutiunm4oV09WNqh2+7UPi4uLU4uLi1VVVdXs7Gx148aNI/ZZv369umXLFuf/R44cUQHVZrOpGzduVBGTY+ervb3due+WLVvU7OxsNS4uTi0oKBh23o0bN6o7duxQCwoK1Li4ODU7O1stLi5Wi4uL1ezsbBUYIc+BAwfU3NxcFVCzs7PVAwcODDvfWLJc3QZVVdUdO3Y4vyc3N9f5O6jqyGu4ZcsWNS4ubtR9x2vj1d979f+lpaWqdntNVP5LzTXq6YZy9aHPfV2dO3dyv60rY/XTsdqzZcuWYbJd/Z2TuQ5jydU3OKBu+vSn1JjYGDU9PVv9zrZfqe3tjo5st6vqGPeU6/l27do1rH2f+MQn1Li4ODU3N1fdtWuX6jqcjXc9VNW9/W0q98K1GO0aXt3G0Whvb1dLS0uvef7pcuDAAedvYrVZ1dMN5erphnLVarOqFy6o6pEjqtrUNPbxEx1LpzpWu5v1v/qiuuLH96lHa846t2VkqGoI/eorL7v04/5+5+eDg4Pq888/rw4ODs60uDOCJ9o3kee3hrRzOVBVURnUl16qCjgqtPaE9gEQooRzLR/FkpISzGazcwa2cePGSWU5Bjhw4AAHDhwgOzvbGa6omaI3bdpEWVkZxcXFVFZW0tbWRn5+vvNYs9nM1q1b2bRpE5WVleTm5rJp0yZ27dpFaWkppaWlFBUVDVuvb2tr44knnkBVVXbt2uX8jmvJcjWFhYXs27ePAwcO0N7ezo4dO8bMEVRSUkJRUZFzBrljxw4SHBUCr9VGd/2WroQZQnj0kS9z/kIZv/nNxH/bwsLCa8owXnt27NgxTL6xLJ3XasfOnTvHlOu++z5JbU0th8re5f/+r5j/+8U2Ll48Jj4cw4K4adMmTCYTlZWVFBcXs3XrVmef+NjHPsaxY8d4+eWXefnllydtnXVXf5vKvTCR6zUau3btumbQRFxcnNujPcdi//79FBUV8esnf83//uAnAPRbB51LR319MyKGx7nS1UZTdxs6RWFJShYAly9DbS1YdaGsvd4xICvKtBOFSiaOTznsepPeXoiKutZeOiDO88I46O6GyB5QUekzCpN75ARuDm3JSOO+++5j586dFBUVTTtirKysjKKiItrb250D+oEDB4iPjx+WBTk3N9f5d2FhIUVFRRQWFjoH19zcXCoqKpzndfVrWr9+PdnZ2ZSUlExqIDabzezevZuKigqnw/d4JnSt5pbWDm3fibbR3Vw4fZ6XXniRN45dIIpsYmMn9tteS6maqfaMJZfJZOIvf/ozb58vIz4mGUt6Il//+g7+8Id9XH/96NfXZDINkzkuLo4dO3awb98+YmJieO211zhy5Iizf2zbto1NmzZNWFZ39Lep3gtTVYLLysqmrPi4MtFz5OXljelvqP1O2n3W9j8/5tFHvsyz+54jIkJoL4Gy9Hm6Ufi7zE/KICJERDy9+674bPnyiTw3JJ5AKi++TjcMGAexKzZQdcRGX1t5KSoqGjYTzc3NJS4ubkIzt2tx9OhRZ1SYK2vWrKG4uNg5SK9xqVKmWTNct2VnZ4+wiOzevZvi4mJMJtOY2ZbHQwsbHSus/mrWr19PQkICiqKwfv16CgsL2bhx44Tb6G7OnDhF+rwM4pLC6G8UJWFCQib2247HTLVnLLk0i8ad624DdKh2MUm9/vqxU6lrx2RlZY34jrKyMmJjY4cpGhO95q5Mt79N516YLJp8a65R/W+0SM2rcYcP4dXf8fGNG9n26BauNDczb1YsICwvWp5Ff0Zz1l2eNpR/SFNebrjBGxJJQCovTiIihKVjPOx2O52dncTExMxIiGaEAbBAb5TwfLMPhhEZMf5IoC0Pbd26la1btzq3a067Y9WOmigTLdMw2neM9715eXnO6Kj169eTlzcyBNHdxMXFUVFR4XyIbdq0iR07dnj8e8eit0t0QMVgAcVGb69+1GSzk71+7iytMR5jyWW321m6/Dr2v/RnDJ3z6Oo0MG+eCJgbj9zcXEpLS0dsn2r0nCvu6G/TuRcmi9PCMU7CTk3hmwmutuKmJImLeaminEUZ81EUEW00OOj/KynOSCOpvPgUUnlxoCjXrmxrt4sbMjJyhqLiHONUd5hQXgyEXfN7NevKgQMHhm0vKysjLy+P/fv3j2kKHm9g1Fi/fj1bt24doQQdPXp0yiZtk8lEWVmZM2X4VNHyk0xk9ulKQUEBBQUF7N69m127dvHEE09Mu40T+S2vZsOGDWzbto3Ojk5CQwbp7Q0nLm56vy145ppNhiXLr+PsqTP0dfei6xZDzrVM7bm5uZSVlY2qbGdnZ9PR0YHJZGK+IxvvtX5v18/d1d9m+nddv349ZWVlY1rKjh49OqG0EtNdNjKbzWzatGnY8mx/j1gjSklPQ1FUwsIU+vqE9cWflReb3c7ZRmH1Wp4q+prFImoaAdx4o7ckk0iHXV+mR7z1GoXyEhkyfoZJ19wLV6P5mbiajLOzs52zNZPJxLZt24Ydk52d7az0XVJSgslkcq7ff/CDH3R+tmnTJrKzs6e8JKWZ0jUH3qKiohGzyNFkuZrs7GwKCgqcjp5ayL2rBcqVoqIidu7cidlsxmw2U1xcTHZ29pTaePVvefV3TkT+3Nxcbn7/rTy86TPUN16koWH6v612Xndds4m042pSM2az6dOf4l8e/jI1NSb0enjhhaIROZ6u/h7XawlD1ys3N5eVK1dy3333OWUZ7fce63q4q7+543fVfHsmwq5du9i+ffsIWTRfr4nmw9q1a9eEXmOdLy4uji1btgybIDz169+y4Z4PERUTxaDNEjBOu6bWy/Ra+okwhpGdmA6IStL9/RAfDwsWXOMEEo8hlRdfphsG9RZCbAPMa+4iwTB+1qf9+/eTnZ095syssLDQOZvV/j969Cjx8fEUFhbyyCOPkJmZ6dxfU3iysrKGLado6/l5eXlkZWWRkJAwqnl/omiDYWFhIfHx8c7zu85mx5LlajRn5fz8fOLj49m1axf33XffqPtmZ2dTXFxMVlYW8fHxmM1mnnjiiSm18erfsrCwcNjgPlH5//DCn7jh1vfxwMb384EPTP+31XDXNZtoO1zps/TzHz/+PsuWrebBB/O47bZ4du/edU1fm127dpGbm0teXp7zWmrH/PnPfyY+Pp68vDw2bdo04hqPdz3c2d+m+7uWlJTwyCOPTGjf7OxsKisr2bFjB1u3bmXnzp3s3r17whYXd7Jt2zZ27tzpfLW1tfGrp8S9028ZRCsr5+9Ou6caLwGwNDUbvcPs/c474rN162ReOm+iqNO1nfoYnZ2dxMbG0tHRMW5to8rKSrKysiZVL2NGfV5swHEwh3dh01WR2D2AmpyMMneux75ypn16vIGvt7Grv4cacyN2SygDV9JZtaAHQ2MdpKdfe13TgS+1UVVVLjRXY7PbCO2fg7kljNmzwVFwd0qM1j5tWdTfhjPNWnN1cTtfuoYTpb6zmfbeTpIi4wizJ3LpEoSFwbJlI/edaPumOla7i/948XGeP/0a/3T9R/narfcD8OlPw+9/D489Bv/+72Mfa7FYOHjwIHfddVdAlgfwRPsm8vzW8I+7IhgxD4Daw6Cxk9heUflaaWsbSgIzMOBlASWeIMylxhGKHVtTK3R1QWvrNY70TQasFmx2GzpFR1+XaJsMLR2ipKRkUiHevkyYQVxfV8uLv5cJOF5/EYBVcxY6t0lnXd9AOuz6KpWnAEjucKkBY7XCuXND+1wjbFLifxh0esLsgNWKTt+JocvhaNrWBo7KwRgMfuMF2euoZxRmCKVvoJeF1BGppAMTsyIFOtNNXeBLhDnqc/VbBzEYVAwGBatV+L34o8La3ttJVVs9ACtnC+WluRniKo7yMltYa9wJyDHYW0jLi68SnoXqqK87IjhaUeCq/BeSwEBRFHLq28lp6mSJtRyd3So+0BTXc+fg1CnvCjkJNOVFbw8jkVZi6ELf7p9WJMn4hBlCUFCw2q1Y7bbhfi89PXDhgnj3E040CH+XrITZxIVHA/D22/Age7idV4l+/mlvihf0SOXFF7EDA4l0hY3h37J48dAsXBJwdKTNclrb/F1xtfT2EDZoQ98NCbhYkdy8/Jmbm+t3/i6Bhk6nI8QgfB/6LQNOF63eXsSyp58tfx6/fAGAVXMWQXU1lJZS+Ycy7mOf2OG556CsDEpLxeeSGUUuG/kifYAd+kIHiOkXy0Z+nqRSMhkSEjHZBshp6hz52eLFE3bc9TYWm5Wsy9rDqlMufwYBYYZQBqyD9FsHiTIaiMCKvRuwXrX8qaooFotXZb0WTuVl9iJwRGH+C2DXRuPmZlFJWkMqzzOKtLz4Il2inlF3mAWLTmHQGA7z5omHlmvNeUlAEmYcSqvrz8Nh72A/dQmRAWNFklwbre/2WweIrTnFUs6R3X8O1Tp8+VN3/jyxPmytGLRaOONITrdqzkLYuxfVIOb6Oq1Ha8qKwQB793pDzKBGWl58kW4YNFjoN9q5lBpPenQmodE6SEoSN4yfhE5KpkaI3ohq0AvFVQ3DGptMrKVF5Fr3I8W119JPR0QI+tBY0hrqR+7gR1YkycRwOu1aBlGzslArq9A5vfeGUBWF3uRkwmdexAlxvqmKQZuF+PBo5sWnwebN/7+9d49u6zoPPX8Hb5AiCUBvWbJF0HFkO6p9ATmZvDy2BfqmTptbJ6SUW/fWbhoJyUzb3OSuCGE6uRm3kyhkpu2aJJ1lQBNfXyeTZYmYO3Wb2KkJxY7Taa8jEm6iOHZiE7QtWbEeBEGQIPEgcOaPg3MEkAAIkABJQPu3FpZE4Dz2d84++3z729+D0dmbueNTJcpIvPACrFElb8FVhPKy0ZCBWUiYFUfH7IKFTe15ZUWSmr/KmWBZJEnCYLHy6k5ITW2jU+qga1/zKa5z6XxNrozyQhPLn62PGuqfzmbIbbZx/tI+bki8vGQ7ed8+MpnMhlVeXswvGd226yak/Jj7r/8KdwA5dOjIKc9iLrd+jbzGaZ6R8FohCSxAwqLk1TZWUc9I0HpYDCZkCXSmlBKgIUlNpbhkc1lSC0p+orn5NtIYWTC3i+XPFseg02PU5512F9KoeeWabfnzXy8UOOvmee6X2/gNO7h8vRseeUTxd9mxA7ZtW69mXtMIy8tGY1bxd1EtLx3mjTo3ETQSq9ECTKMzpUjFlGJwzfSun8+kkJEx6o3E59o4y37edZMEZkksf7Y4FoOJTDZDMpPG3G4lPWkkqzNh3bMFruSXPw0GpVNvQGRZ5mdvqcnpFOVlYQH+PrybIK9z5u9NbL9NgqNHW6NsdpMiRo+NxgykjBmyugWQdTg6V/5g+Hw+JEkiFArVsYEbC7fbvSaVkRfT29tbtuBjLZRrv1XLtJsCcmuSHiMUCtHT04MkSTXL1tvbW1RoUV0yMmFBlsFo0mE25xeNmsyKVMh69bdSBINBvF6vVudIvf6hUGhdn/lCp11Lp4mz7OeX8j5ym7cqfk77929oTfz89EUm56Yx6g3csl1xKP/5z2F2FqxdZm59V0E/ForLuiEsLxsJGZiBWbNSzUzOWLBYVj7IBwIBnE4nw8PDyxbBa1YGBgaKCuo1G+Xab9Qb0Ov0Smp9U5pEwkIjxVRr7Jw+fRqXy6UV71wpibSy7ClnlHWDZsywWoqN0N9isRhHjhzhjjvuKKoSD8qEJRAIMDEx0fA2nDp1CoDx8XEikQgnTpzAZrNhzZcJmM+kMHeCTq8jm4WnngrxX/+rn97eXvbu3csPfvAD3v/+93Po0KGGtrVWwudfAeDW7U7MeQfkf/on5bf3vx/0+vVqmaCQ5pz+tCpJIAOzeX8Xs65txf654XAYh8OBz+fTBplaUWfiG4Fybenr62sKxazW9kuSdNX6YkoxO9v49jkcDlz5qIlyL+hq+kQ2l2M+oySfS80qy54dHfVr61qwUftbLBbTrD/Hjh1b8ntvby8Oh6PhCpbP58Pj8XD06FEGBwdxOBxajSbNaXchQ07OaZl2L12KEQqF8Hq9fPrTn6a7u3tDlkcYPfdLAA7suUX77ic/Uf79wAfWo0WCUgjlZSMxAzlm2TpzCWs6S5d15f4ufr8fj8eDx+MhFou19NJRq1KovMzNNU8OrPlMUvN3ScwoywPNprxsVPr7+7XnuhQHDhxYE+UqEokQDAa1v3t6ehgdHQUUq6FRZ0BGJrmQ0pSXVAomJiaQZZlXX32Vhx56qOHtXAmLlRdZvmp5EcrLxkEoLxuJOCzoL9GeytA5m8HRtfJ14VOnTtHf34/T6cTpdC4xL8NSv41wOIzdbgeUQbK3t5dIJIIkSUiSVLSU4PP56OnpwW63L/EB6O/vZ2hoCK/Xi91up6enR1uHV30qFlfSDQaDuN1upbZPT0/RwFipLaV8T4aGhrTzuN3uioqbz+fDbreX3LaSjNVcSzXEstb2q+d9554eHv78/4LOlCKbVSr0lru21VBOHp/PR39/v9a+crIu1ycmJyfxer3s3rGLD/0Pd/PT519Q/F2MimtAYZsDgUBVba60z6FDh7Db7bjdbgKBgHa9ofL9gPr2t5U8CyshEAgQCoVKPsuFrIVPzsjISJHl58yZM0VKk9V0demoUHnZ6Lw1fYkL8SsYdHqtGOOvfw1vv6304TvuWOcGCjSE8qIiA4l1+kRTcCkBFxPoE3GY12GbymBIzq2o/ksoFCIWi2mDSV9fX9HgXA3Dw8MMDw/jdDqRZRlZljVTdH9/P+FwmJGRESYmJohGo/T29mr7xmIx7YU4MTGBy+Wiv78fv9/P2NgYY2NjBIPBopdRNBrlxIkTyLKM3+/XzrFcWxbj9Xo5efIkw8PDTE1NMTg4WNZ/IxQKEQwGtdmgav6uRsZ6XcvFFJ731ddeYzoW48gD/x6kLIlE6WtbzcuqkjyDg4NF7Sv3clxOjqGhIfr7+3lu7P/jlv238uf/SXm5dXTAoUOKcjQxMcHIyAg+n0+7v5XaXG6f+++/nxdffJHTp09z+vTpZV/oi6lXf1vJs7BS5cLv9y+7zGKz2bSlv7UiGAwSi8U4ceKE9p2lwO9FzUOYTsMTT5zSnv0vf/nLa9rOalCtLu/a0UObSfHXevZZ5bf3vQ8t9Fuw/giHXZU5YBmnQh06bNgacHJz/gN6bgfyN+b5MFjzSZBqqP+iLhmpHD58mKGhIYLB4KrXmMPhMMFgkKmpKW1AHx4exm63EwqFtPO6XC7t/16vV4uMUAdXl8vF+Pi4dtyjR49q//d4PDidTkKhUE0DcSwWIxAIMD4+jtPp1I5VaftoNKrJoW5brYz1ptR5v/WonwPv2M+Z8NNs3fI7wNJru5xStVbyuFwu7r7nbn516Q36/8O/58jhBwGIRiNF57fZbAwODnLy5Mmy91ddlii1T2dnJ8899xxnzpzR9h8YGFhizatEPfrbSp+FlSrB4XC4LlaVao/hdruLrtNiVKdd1dm7UMFTlzznMynMXYqT6003uXjHO+DWW53kcjm+8Y1vcOjQoZonVo2klL/Lj36k/Hv33evRIkE5hPKy0ZEkrShYtQSDwaKZqMvlwmazVTVzW47R0VGcTueSmeiBAwcYGRnRBukDBcqWas0o/M7pdC6xiAQCAUZGRohEIkQikZrbFgqFsNlsmuKyHB6PB4fDgSRJeDwevF4vfX19VctYb0qd12o0c8tt+/npT0e4832/o7VDRb22tR5XPU495Tlw4ABzacXfZUu+6vlN/IqnXjoDQPeiOkYHKijkqhWk1D7hcJiurq4iRaPae17Iavvbap6FWlHbV+maqdstdy1qtVKVw2azacpNIBDAbrczMTGhRBzllZdMNkM2l6W9Xc/u3U62br26//33389nP/tZYrHYukdwqSxWXnI5eO455bd77lmnRglKIpaNVNqA2cqfXDxH7HyMXDy37LY1f0YTiqWl8GPJKXkR8i+CalBnMaovh/pRnXZXGwJb7f6lBqNKA5Tb7WZ4eBiv18vY2NiamL5tNhvj4+P4/X5sNpvmn7Daa7RSSp1XSVYH6LLMzyvOg7UO9Gslj81m00KkDSjt7mQGw9wMLpeLqampos/IyEjF461kn2qpR39bzbNQK6pCEo1Gy24TDofX5F6rS2GF51ocGKDX6bUw4/kFZeno9OlgUb6irq4ugBUpjo2glL/LSy/B9ZdHeVZ3D+/Wja5zCwWFCOVFRQLa1+mjBhVZc8jWnLJUZM2tqBCMal1ZPOiPjY0BVAybrjQwqng8HiKRyJJBcnR0lDtW6M0WiUQ0v4HVWAHU/CS1DoZHjx5leHgYv9/PyZMn6yJjNddyMaXOazWa+eXPzvJb7lsBmVwmC9EotWSta8Q9K0dqLoElnUU/d/U71549yov1rbeq9t9yuVxlX8ZOp5Pp6emi+7zc9S78vV79bS2vq3q+Sn5Co6OjVSlhXq+3qk85p+pIJMLQ0FDRNVWvQemloyQLCzG+8IV+Xnnl6j2bnp4GVmY1awTl/F3+kMe5K/csxie+s57NEyxCKC8bgQTkZAMZnUTSpGdmy3Urqv+iznxKrWmrfiaFJmOn06kNhpFIhIGBgaJ9nE6nNjiHQiEikYi2fn/w4EHtNzWqaaVLUqopXR0sg8HgkkG6VFsW43Q6OXr0qOboGYvFCAaDZbPFBoNBzdISi8UYGRnB6XSuSMbF13LxOatpf6nz/uHv/wG7r9/DvR+5F0m/oKRUT6dhcnKZq1r5uCu9Z5XkyMk5bjh/mZ5LcbZP/+bqPjt2cPT+++n/6EeJPP00cPXaVzpP4b0s3MflcnHbbbdx+PBhrS2lrne5+1Gv/laP67o45LgSfr+f48ePL2mL6utVyT9l8XGq+ZQ7nsvl4tixY0VKh+q/pCqDkUiEwDcfASCZSbFrl40//MNjbN3qZGFB2eexxx7jYx/72MZcMnrjDRgb4/X/FuYwJ5UNnngCwmEYG1N+F6wrQnnZCMQhYcny6k4b45sdtO3ZcTWNtslU9WFOnTqF0+ksO5v0er1Fs1mv18vo6KgW4nnkyBH2FvjXqApPd3c3g4OD2vfqjNXtdtPd3Y3D4dAsOyvBZrNx7NgxLZxUPX7hoFauLYtRnZV7e3ux2+34/X4OHz5cclun08nIyAjd3d3a0poaMVGrjIuvpdfrLRrcq23/4vNu3ryZp575AZZ0lnbDNLpsvh6Man2Zny97rErHXek9qyRHJrvAeUd7ySJ8/i9+Ede+fbgfeki7L8tZPfx+Py6XC7fbvWSfJ598UguT7u/vX3KPK92Peva31V7XUCjEkSNHqtrW6XQyMTHB4OCgVhIgEAgwOjpateJSLwYGBrSSBOoE4PTp09rvoVCI7zz6GKA47RoMMkePDvD440N89atDfP0rX2HmzTcZfuyxNW13JYqUl7174cAB/vrHbrZyWdng8mWlGOOBAzX7IQrqjyTLjU19pXqkDw8PV7VereYx6O3t1V4ud9xxR9UzmXg8TldXF9PT03R2dpbcJplMMjExQXd3N5YaYt9yuRzxeJzOzk509azN8ku4YL7MVHscXbqLm6/fUr9j10DD5NtANKWMo1fX2mXKrCYWOHKul4wXpi8zNR/HtmDlurcvLN3g5pvR4mZXQSn5wuEwbrebBg9ndUe11iweG5uyny4iJ+d45eLryMi8Y8v1nH/TSDQKu3bBzoU3kS5dQt62Den668seY6Vjda2cj13kw//XZzDo9PzkT75N2/D/g/zgQ0jZhaUbGwzw2GPwwAMVj5nJZHjqqae47777MG7gWk4rpRHyVfP+VmnoUxEOh7VQump9AAqXPrxeLz09PRsyhXTdyIA8JzNjUZwEOixt69wgwUZjbvcuzZqxRHGRJFgUkbMeyLLMbFrpw7mU8pJpLjVifQiFQjWFeDcTOkmnFWmcz6TotKRoI8HCdEKxHMJVC+IK8lnVk39+/WcA3LbrJsXf5YEHePx/fqH0xi+8sKziImg8DQ2VVk2vtcbxq+F21wTTMG9KsaBfgJyerfaVlwQQtCbGrduIpGfpuRRf+uO+fXWxZqyW1EKGTHYBnSQxM9tOGiN6iwn99i1w5Yrip9OCs8/V0tITMxSn3flMivlMih0XXmELQKJAsV1YgJdfvrpDDfms6sk/v34WgPft/S3tu9FReBCQJR2SnFMqoedy69I+wVKa0x7ZSkxD3JqPHMm0YTavsBKjoGUx6g0Y9co8Y6NaM1Sri1lvZS5j5iXdfqRb9sHWrSvy3xK0Bmqo/1wmiby3m1zedqiOctpot44WxEx2gZ+++QsA3ptXXlIp+GF4G79hB/O3uOGRRxR/lx07YNu2dWmnoJgNmaTu1KlTOBwOotEo4+PjFR3mUqkUqQJzYzyuzE4zmQyZTKbkPplMBlmWyeVy5GrQpNX1dHXfVSODFJeY2aooL+3Gtvocd6XNqbd8G5BmldFktpLRSWQkE1G2c535CmQyyAbDktngesg4m8rHRmcUy+GmDgmQyeUK1K06taWUfLfffjvZbLap7mklmrWfLsaaLxOQzKTIbd3Bmxf30T3/8pLtcvv2gdW6pI/kcjlkWSaTyaDX6xvSxn9961ck0vN0WTZxo2M3mUyG556TeC25m/fumODXYzoyOgn+6I8UC6LZrET9LYP6/in3Hmp2GiFfLcfacMqLmqdAjQwIBAL09/czPDxccvvjx4/z8MMPL/n+mWeeoa2ttP+IwWBgx44dzM7Okk6na27jzMxMzfuUQj+nxyRl2TUV5e3OdqyWrKZ8rSf1km8j02wy5iQdr+60kU1bSF3ehnmrGaM+q0QblYk4WisZc3KOuXQSgLmY8rIymeaJx2t/tmqh2e7hSmh2GWVZRi/pyMo5rsSiGPTK0qHqeK7+m5iZIVvixZVOp5mfn+f5559nYaGE82wdOD35cwCuNzj44dM/BOCxx24B3kHPLZd4+ocvrur49UqsuFGpp3xzc3PLb5SnauUlGAxy8uTJZbcbGBhYVXbUxQmLDh06hNfrLZtCemBggM997nPa3/F4nD179nDvvfdWjDY6d+4cmzZtqsmDXZZlZmZm6OjoKKpQu1KkGYl53Wu0pxawzWax7+lY9TFXQ73l24g0q4zW7AJTk7PozCmQckhSJ5220otIay3jbHoeOSVj1BmJJ5TnaccOC0ZjY6JDmvUe1kIryTgbTxNPziKZ9GxyWEnPGsnqjMib27EmEsiZDO12e0mfqGQyidVq5c4772xYtNHJJxTH3I++999y3y13AvDnf668Gj/xiV3cd9/OFR03k8kwMjJCb29vy0Yb1Vu+WibvVSsvfX19a+Jctrh4oKqwqAnSFmM2mzGbzUu+NxqNZS9oNptFkiR0Ol1NYYiq+Vbdd8WkUoqj2hSYFpSZlS01j06dQRsMimlyjambfBuYZpXRJBkx6Aws5BbQmVLMzlrZsqX0S22tZVRLAhhpAySsVjCbG3feZr2HtdBKMrabLMSTs8xnUthsDs6+uR85J9G9aRrLnj3oAKmMjDqdDkmSKo7nqyE2P8MvL04A8H7nv8FoNHL+vFIWQKeDD33IsGo/80a1faNQT/lqOc6GWjZScx4UVgVWE6ptlBTSdeHsWe2/6iquLpfdEF73go2JJEm0myxMJ2fRmZPMzGycqDTV32VhXmnTtRIoKKiOtgKnXaNRxmTWkUpBMpl//ayjcvbCG79ARubGLXvY3qFkXv7Hf1R+e/e7ayorJ1hj1qTXlMvxotbIUFEzXxYqKoFAgL6+vtYKne7uhg3odS/Y2Kj1VvSmJKlUVT6DDSe1kCadzWDNZNlx5TxtJMjX2xMIADAbTOh1enJyjmQmRUd+dXx+fv3nzmp+l/fecDVE+oeK2wsf+tB6tEhQLQ3tPWrdjpMnTxIOh/H5fEXZctVsuseOHdP2UdNOq0xOTpZ11m1aNm9GvmhBmlvqdb9R8nYINh7qDFZnTgIyMzMS+TI968ZM3uqyeS5Lh5xgqzRJu+i/ggIkSaLNaGEmlSCRSdLRYeHKlfVXXmRZ5p9fV5x139etKC8L/32UP/1/jzHBEB/6kLB+b2Qa2nucTifHjh0rUk4KOXr06JKaHKr1paVJQzKTxkqFdO8CwSLUGWw2l0VnSjEzY1lf5SWVIjUdw7KQpT2hRBvZiSLN5W3t6+S7Jdh4tJkU5WUunWT7JuW7VEpPLiev26rRyxcnuDQbxWo04959MwCX/upx7sw+yxHzdzgglu43NM3tCdasxCBuTSlVpA0muOGGFVWRXg6fz4ckSYRCobodc6PhdrtLVtFuNL29vWWrVddCLe1X/V4AdOZ54nGoZymfUChET08PkiRVJ9vZs1x34Qo9l+L89qc/xdDjj6OX8xlTX365yLerVViv/laKYDCI1+vVijSqFutQKLThnnnVajifSWIyyZhMyrRtdnb9pm7PvqbUDPtw2x7MPzsL4TCdTykRtR/nCfQ/ExWkNzLrv+h4DZKbyjHVkWSyw0aXfifXbW2DLVuUN1EdpyGBQACn08nw8PCyFXyblYGBgab2h6q1/e2mNuLJBHrLPKkZO+lYAvOl87B796qWG1Vn+dOnT+NyuTRH+UrMXreD9rfeLrIcFvlutWDl3Y3Q32KxGEeOHOGOO+7A7/cX/ebz+QgEAkxMTDS0DWolb6fTueR6lAqusBjN6CSJhVyWdDZDR4eRyUmYnV0/B+/nxhXl5UtHvwJ8BYD2fA/uTOUrSKs0WcHPawFheVlrUjC7ME9Wl0XOGdi+OR81Ikl1VVzC4TAOhwOfz8epU6dWdAx1Jr4RKNeWvr6+plDM6tX+TSalv+jMSZByZC9OwswMTE6uun0Oh0NLR1DuBV0oxxWzjsi2MpVf9+1r6lCNjdrfYrGYZv0ptbze29uLw+FouIIVDofp7+/H7XbT09OjfcpZ7HSSpJUKSKSTbMovHa1XDr7zsUv8+vKb6CUdiUdPKEucgJQvwKH+i8EA3/3u+jRSUBGhvKw1UZhuU55Yo7wJg74xZlO/34/H48Hj8WiVugXNj8lgpE3WYUkvsMk4hSlRojrvCrJG18pCLqtl1RWsHf39/dpzXYoDBw6siXLl9XqRZbno4/f7KwZXqNFyifQ8HR2KcjA3B9ksSr/91a+Uf9eAH+etLv/munfS/kefVCpFl0JUkN6wCOVljclMxnHMvo01nWVrZ+My6p46dYr+/n6cTidOp3OJeRmW+m2Ew2HsdjugDJK9vb1EIhEkSUKSpKKlBJ/PR09PD3a7fYkPQH9/P0NDQ3i9Xux2Oz09Pdo6vOpT0d/fX7RPMBjE7XYjSRI9PT1FlcgrtaWU78nQ0JB2HrfbXVFx8/l82O32kttWkrGaa6lmRq21/ctd2+Hjf8PQf/wC7/1AN+/8d79D6IUXrlbnfflldL/4RVlZSx3X5/PR39+vta+crIVyGPUGbt3Zw+xckjRGsui5nJPxDg1hv+ceevbvX3LdC/tDIBCoeD2r2efQoUPY7XbcbjeBQKAoE22l+wH17W8reRZWQiAQ0CI0K7EWPjmLE5aGQqFlHVw3mZRyLYn0PCYTGAw5ZFlS9JXJ+lgQq+XZ18YAuOvG4jZn1VdikycGvBYQdyiPLMvMpZMVP/OZZL68e+Xtyn6mk8TT55Dm5jDF5jFbchW3l1e4zhoKhYjFYtoMrK+vr2hwrobh4WGGh4dxOp3azEo1Rff39xMOhxkZGWFiYoJoNEpvb6+2bywW016IExMTuFwu+vv78fv9jI2NMTY2RjAYLHoZRaNRTpw4oc3g1HMs15bFeL1eTp48yfDwMFNTUwwODpb13wiFQgSDQSYmJpBlmcHBQRz58J3lZKzXtVxMVdf2m9+k7+BBJp58Ete+fXiPH796AElCLuFrUum4g4ODRe0r93IslOON6G/4xW9eo6NrN2fZT8rQzv/+t9+i/8gRJl5/Hdcix1ZVOZqYmGBkZASfz6fd33JU2uf+++/nxRdf5PTp05w+fXrZF/pi6tXfVvIsrFS58Pv9y2Y5t9lsqyrPUi2Ffi2RSKRsBvRCFL8XHdlcluRCmg7zHG0kSEYTiuUQii2IBQV368n0/Czh80qairvzysukXqkgPYabK18RFaSbAeGwm2c+k+K933hobU/6TOWf/+XPHtNMrbWgLhmpHD58mKGhoSWlF1ZCOBwmGAwyNTWlDejDw8PY7XZCoZB2XpfLpf3f6/VqkRHq4OpyuRgfH9eOWxgy7/F4cDqdhEKhmgbiWCxGIBAoytBcyYQei8WIRqOaHOq21cpYb2q5tj0f6cV2MY73/vvp/ZM/uXqQffuQrVYoqBHSCHkSaSW/y/x0OzI6jMb8Pc+/uL1er/YSV/M9qee32WwMDg5y8uTJsve30j6dnZ0899xznDlzRtt/YGBgiTWvEvXobyt9FlaqBIfD4bpYVao9htvtXpLKohSDg4NVKY86SaLdZFXyvaTn6Unkn/8rBRupFkSVd72rqrbWwk8mXiQr53jHluvZbdsOwJNju/k0r3Pr7SbCX5Rg4OjVCtKCDYlQXlqQYDBYNJi4XC5sNltVM7flGB0dLRlhcODAAUZGRrRButCErFozCr9zOp1LLCKBQICRkRFtJlcroVAIm81WdSkJj8eDw+FAkiQ8Hg9er5e+vr6qZaw3tVxbi0EZVO1VpLOttzw5OUdOljHpTEzPmpAkxa+x1D0HNItG96LM0ZWWGSrtEw6H6erqKlI0VlI+ZLX9bTXPQq2o7VtuaSYSiSx7LWq1UlViOevZYgqVF/OWHWy6chEdJSzMDYxWe/bVMwDcdePVaKK/+ztIY+b37i84v1BcNjRCecljNZr5lz97rOI2spwjHp+hs7MDSaphxS0azecKKPOQXn8DpbKNWY21Pzzq8pDP5ytam1eddstV566WakJooXS0SqXzut1uLTrK4/HgLgxTbBA2m43x8XHtJdbf38/g4GDDz1uOWq6tta2djO4KSb3yCOes7egW0iXzBFV73GrJyTLWdJad03EyzGHoaEeSKt9fl8vF2NhYTecpt89Ko+cKqUd/W82zUCuqQlKu1ArUrkjUA7/fX1NE4iazFWZgLp2ks8tGJL6PG9MVMo0n6+sUPpOa4/nIiwAcfMe7ASVc+5m8Ffz++8vtKdhoCOUljyRJyy7R5HI5MsY0VqOltkqv23cxH23DOvfa0t9uvrmu5QBU68pir/9wOIzb7ebUqVNlTcGVBkYVj8eDz+dbogSNjo6u2KQdiUQIh8Mr9vFRUfOTVDP7LETN9BwIBPD7/Zw4cWLVMlZzLRdTy7Vta+/i1Z02LlxWnCAnt+1j6+Z8nqB8ReKVHHc5FrIL5OQcXXMprKkUm5lEZ6/cf10uF+FwuCbFudI+TqeT6elpIpEIN954I7D89S78vV79rRHPwnLnC4fDZS1lo6OjVS3z1HPZKBQK1aT4mfRGTHoj6WyGVG6BTe1mSK9dpvHTv/4p6WwGp+M69m3bCyiFGFMpcDobskolaBDCYXctmIFpoxIe3chUR6p1pdTgpPqZFJqMnU6nNluLRCIMDAwU7eN0OolEItpxVac8j8fDwYMHtd/UqKaVLkmppnTVgTcYDC6ZRZZqy2KcTidHjx7VHD1jsRjBYLBs7olgMMjQ0BCxWIxYLMbIyAhOp3NFMi6+lovPWU37azmv2WDEZDSBpPSoWKx8nqC63bNUih1bHJx/4xzyxSihF14gdv7nOCwJJd41my17bQrvC1y99uWotI/L5eK2227j8OHDmjylrne5+1Gv/laP66r69lSD3+/n+PHjS9qi+npVo7iox6nmU83xap0oSJLEJrOicCdzaaxdBtIYmZfaka9vTKbxQp56+Z8AuO+WDyjRZ6Oj3PTpe3Azyv33K4ZwQXMglJc1YO5iinhbkoxOImuxNqwcwKlTp3A6nWVnZl6vV5vNqn+Pjo5qIZ5Hjhxhb8E6s6rwdHd3Fy2nqOv5breb7u5uHA5HzUsChaj1rNRwUvX4hbPZcm1ZjOqs3Nvbi91ux+/3c/jw4ZLbOp1ORkZG6O7uxm63E4vFOHHixIpkXHwtvV5v0aBebftrOW+n5arFIx5XfB3rcdyynD3LXV0OXPv28Y7f/V0GH38cAwvof/WyEh1y+XLZXf1+Py6XC7fbrd2X5XxtKu3z5JNPamHSarbXQirdj3r2t9Ve11AoxJEjR6ra1ul0MjExweDgoFYSIBAIVG1xaQROp7NmP56OvPKSymVotxn5hbSfX8r7SHZsVZaL9u8Hk6nubb00G+Wnb74EwG/ve7/ShhOPs//ys/wHvsPv/37dTyloIJK8WtvpBiMej9PV1cX09DSdnaWzfyaTSSYmJuju7sZiqT6aJ5fLEY/H6ezsrH7ZaDJB8nyEC3YzGTp5557tinovy3UvB7BaViRfk9FKMiYzKcYnz4MsMX9hL917dWze3DgZU5fexvTm+dLmfdXBcg2y6paST10WbbbhTLXWjIyMFH3fSv10MTk5x68uvUFOztHtuI7zr1uIx+G662DnzuJtVzpWl+Lx0R/wV899h4PGbfz1Bx4ESWL+rt/GOnOJSf02HC88rWTW3bJFmWCukkwmw1NPPcV9992HsUGWpPWkEfJV8/5WET4vDSZz4RKWTIquOZB22q/aJSVJ2CgFq8JsMGn+A3rrHLHYJjZvBmlujva33kIyGNDysNeBSYue+W2d9FyKL/1RdbAU1EQoFKopxLsV0Ek6NpmsxFMJZlIJbDZFeZmeXqq81BN1yeiv/+M3gW8CYMmr4o7sZaQDopZRM9FaKv1GIZWCRAL5SgJdZgqArrkMDsNCQ5MvCa4tJEmiy6IoJ/q2GWKxvNtJNIpxfv5q4q86kM3liM9fTd0uhvb60NfXt25LPutJR37JM55M0NWl9KbZWchkGnO+icm3ePniBAadnsS3RS2jVkBYXhrB2bOA4j2vaof6XK44+dIy+RoEgmrosm7icmIKiyGBUZph5qKOrsJspeoyjsGwqrwV08lZsnIWk9FEGiMZTJiv24IhdkVJ5tWCZnFB49hkakNCIp3NIOvStLWZmZtTrC9bttT/fD/IW13et/c22j/6SbjdVVw1WuWFF2ANMhQLVo9QXhpBdzfyxOtIyJp/gLZA1MDkS4JrD7PBhNVowXn+N0AMLhRYRRZnK12hwizLMlNz8fz/bZzlejo6JG7aKcGOLRvCd8vlcjWdv8u1jE6SMOuMJHNp4qkENpuivExNFSgviQS8+eaqLdWZ7AJ/d/Y5AH731juLfsuiQ0+uZIoBwcZGLBvVE7UyqsHC5c7tpbfZt29NnBoF1w5dlk2cd7RrSktJhXlRptpamMskSS6k0Ek6Zi53IKNj67YC360WcygVrA1WvRJRND0/i92u9N6iqLnJyat1jlbBj8fDXE5M4Wjr0moZvTyp1DIK4ybxV6KWUTMiLC/1JF8ZdT79NjNdWbbF1y75kuDapcu6iYvtZlIGfUOcaScTsXxG3XkmMkl0pnbqkDRWcI1j0RnRSTpl6UifwmKxkEummL20gK2Lqz5biQS8pIQ4ryQSKPhzpYr3/fvvwpjPSP3I93fzCK/z4d8z8d8+J8FnRS2jZkNMmWpAi+KYm7v6Zd45l8TVyqjGTAxDLkdW0iFZG5fXRSAAMOj0muMuXF02qsciSjKTZiY1l8+om2Qzk2zbJgLlBKtHJ+m0nC/TyRkcDvgtzmK78LKy3KmaYHI5+NjHlGXPGpfcJybf4l9e/zkSEh/dfxBGR8n+j/fw80dHSWPmqLfAgigUl6ZCWF5qIR/FIUejSghqIlHkU6BaWfQ5mRuuzCpfzs/D1q3KjGED+AYIWhO7tZPzs9NkdBLpnJVMVyddmbgSvrFShTmVYjp2GUs6S9ecEgbiIIqufTMkWLUTsEDQZdnEdHKW6WSC622biVzoZi+vly7WaDDAY4/VdPzvhp8GlCKMu23b4PH/Df3zz/J7fIfX9x7g3nvrIIRgXRDKy3KkUtoMIHdlEj1cjeJ4++2iTSs654q8LoIGYjWaMVisvLpTR2bGjjHTQee+XUpfXKnCfPYs24HtXLXiGFhA+pWImhPUh3aTFaPeQCa7QFpKkGzbzCtzFm6hRLHGGiOBpubi/MNLP2bnVAKvcS+Ew8gnTyIBH+cJOj78ILoX65eUTrC2COVlOfJhz1CwxrYoiiMrSehLRTqIxF2CNUKSJLa02ziXuYhhU5y539hIJqGtTadYCM+fh927a+qPk9sdOC5GkVhGMRcIVoHd2sml2SjRuTibN3cwObf8PtXwvfAPSS1k+OFXvw98P/+t0oO3cplP/K0b/jb/tYhUazrEGsZydHdrFpPFA7ja3XWi4ws2AB3mdswGE+iyGDbFuXgx31PzjuRMTlZ9rER6nreNMhPby6ToFlFzgjpht3YgITGfSdLWmWRBMpLGSNbaDrt2KXWOtm6tKRIonpzle/klo7Nf/1+XJKXTiaR0TY9QXpZj82ZloC7BktkoKCbIdXLOHRoaQpIk7HY7drsdSZLo6enB5/NpxRirIRQK0dPTgyRJZSsyrwdut7tkxexy9Pb21tz+cDisVJttQlTrC0Bb2yTJqVnSsauO5ESjV53LK+TOkGWZqctvs/fyDJsWlHBWoZ4LGoVBb6Az73AeS03TbjNxlv2c37QPHA6lZsDp08oy/T33wOjossf8v8eeZjY9z41b9nDrf/qSsuRUihdegAceqKc4gjVCLBvVgOqQWzb8+eabFcVlHYsu2mw2pqamtL8jkQg+nw+3283Y2FhR5dxSTE9Pc/jwYU6fPo3L5apJ6Wk0AwMDy7b/WqfLsonJxDQ9598GpuC1gh+rTFo3OTeNdWaW9tQCyUyONEYwmjDt2gJXREZdQf3Z0t7FdHKGeDLBzi1ppqZMTE4WGPdMJnj8cXj2WfjOdyr6Wl1JxHh89AcAfH7TLegOeuATnwBEUrpWQlheqkG1orS1Mbd1K1Sqbjo6CgcPQji8du2rgNPpZHh4mGg0yqlTp5bd/rnnnsPhcODKO8ath7KgWn4W09fXh8fjWfP2VKJcW9cLSZLY3uEoSlpXEotlafKvVIpUPEY8epmuuTQAnbk450w9GHquh85OxQq5f7/yMhEI6oTFaKbD3A7IzMkxrFZFt4hPphVl+aWX4ORJZeMnnlDG17ExeOONJcf62386xVwmybt29PCeH4fh2WeJ//2PeJsdjOFmwieS0rUCwvJSDSYT7N+PDKTjcSw7diC98ory/ZZFs9EqZwcCQaNoM1q43NFBpFzSOrtdycM+OalYClWH3pkZzICT4uiinvQr8Er+iwMHRNScoCFs3WRjJpVgOjmLY2sXb71pxnH510xfuQKf+hRcvqxsePlycV2iAp/Dl94e578//31uTiT5z+86hHTq08om//AD/h1Pcsd7DHzr01vhuEhK1+wIy0u1FC4BGY3K7HPfPsWRzGpVHoRf/KLq2cFaEYlE6O/vx+FwFFWv9Xq92O12enp6CAQCAHzhC1/goYceIhKJIElSkX9Jqe0B+vv7CQQCBAIBenp6CIVCVe0zNDRU9Lu6X39/P729vVobJEnSlq4W+7AEg0Hcbrfm2xMMBmu+PrFYjN7eXiRJwu12F7V/uXNUams92rYaugxtGHR6oIS/Sjyv0Kg+MG+/rWSGtpobVmJAIFgOq9GST7Yok9RfwWiUOcd1V/uvqqSo/+r1cMstmg9MeiHDf/7hIzz91X/gif9jhHf+9sc0hacjeZkXeC/feuEOJUpOJKVrehpueRkaGgJgfHwcAL/fX9U+6nJFLBbj2LFjDWvfiilUZgoHdXVWWmF20EhisdgSh9Njx45x4sQJ7e/+/n5isRgTExNEo1HcbjcHDhzga1/7Grfeeit/8Rd/od2vSturPjF+v59YLMbg4KC2rLPcPj6fj5GREQYHBzly5Aher5fx8XGGh4cJBoP4fL6iNpQiGo1y4sQJXC4XoVCI3t5exsbGtCWvaujv7ycajTI+Po7D4eDIkSNVn6NSW+vRttWgk3RstW0lcynGgkGHNZ29+mM2//9FPjDGVJoLjk1cF51dekAR9i9YA7Z3bGYmNcdcJoltxwyXz9lJ6HbiLLVxX58yWcxbuYOPDuIbepxvf9jNJ/7xZ0gLC9q4WxRdVGOiO8HGpKHKi8/nY3BwUPvb6/XS29vLyMhI2X1UZUe1EoRCIbxeb1VKz7rx3e/CQw8pL4PFs4M1flgKHXbD4TBut7vI0TUSiRAMBpmamsJms2Gz2RgcHOTkyZPcfvvtS45XaXv1RRyJRJiYmKjqHOo+LpdLU3TUflErhZYkj8eD0+kkFApVrSBEIhFCoRDj4+M4ncrwODAwUGQlWek5Vtu2emBp28TMvps4H79E11ya66KJko7mhZmhSyouAsEaYdQb2LrJzsWZSeaYxGjeTFb1q1WdbCVJGV+feUb5/okneOEuN/b/M8C7xy+x4313Ib0QKJ48qtSY6E6wcWmY8hKLxQiHw8RiMe2l5vV6cbvdRCIR7WWxmOPHjzMxMaH97fF46O3t3djKywMPKJFGG+xhURUEn8+nXb9w3pG4e9ESwIEy/jnVbO/xeIoce6vZp/D/DoejKnlKEQgEGBkZIRKJEIlEato3HA5js9nK9sXVnmM1basXXW0dIOk4L18ita20D0yp/EWy0Yxu1w4RXSRYcza3dZFIzTObnsNov0L2gsRltrLtXdsw//zM1YlhfolWvnSJ93z0QW3/6//xOfi3HwEgh4QOmZykQyeL6KJWoqGWl9HRUSKRiDbbVF8S5cJvI5FIkbJTSCgU2nCRJiVRZwcbJBRvcHAQt9uNz+fTrr/L5WJsbGzJtrky7S23vUqpl/9y+9QjisntduNwOPD5fHg8HtyllMd1OsdatK1auqztmPTX8Zu3LgLxZUP+52/YR9uWdmWGK2pyCdYYSZLY1bWViegFUpkk+s5ZPtR1kt3bu3n68e+h+8RDRVbuQqVbAmXJ/g/+AFCWi76y5xG+uP3bcP6ciC5qIRqmvCzONwJozpDlZrrlZqc2m62swpNKpUgVJNyK550RM5kMmUym5D6ZTAZZlsnlcmVf2KWQ8w+Lum8RW7Ygbd8Oe/Ygf+ITSI8+CufOIW/ZsmZKjNq+wrbdfvvtHDx4kGPHjnHq1Cluv/12wuEw0Wh0iQIhF/jlqMeotL26z+LrUes+y/1bar9IJEI4HCab999Qty08bqm2Fd7DvXv3EovFeO2117Q+WXjuas5Rqq3V7NdISvVTs8HADTu3kZ2aJIWBSWMnW9IxrHJ6iTJjaZPIqbmKVDaAIq5S8TlsEVpdxuXk00s6brDtIHLpHLJugV0PfosX/+EuvvRmL1/88XO0v/8DS/bRFPH8sTPo+ZTpv/CZ73+chX1/dDW6qMx7od6o759y76FmpxHy1XKsNQ2VPn78OH6/v+ZZt8PhIKpmCS1xzIcffnjJ98888wxtbW0l9zEYDOzYsYPZ2VnS6XRNbQGYmZlZ+mVnJ/zsZ0r4tCTBxz9+9WGJLzXVN4JkMoksy5oCp/KlL32Ju+66i5/85CfcdtttPPjgg3zsYx/jb/7mb9i7dy9PPvkkr7/+Op/5zGcA5WWrHmPLli0Vt89ms6RSqaJz1rpPIp9vRP17+/btRCIRzp07x4svvsjevXvZu3dv0X7G/DLGN77xDR566CGefPJJwuEwH/nIR7TjlGqbyszMDDfeeCO33XYbfX19PPnkk0xNTfHJT35Sa0s15yjVVrvdvux+a0HJfrr3ehZyenIJIxfmHNww9xoLeiPpjk7a5mLoFhaYmZtDboIBt6R8LUary7icfDZ9G5ckPTpLkp0f/h7f53u8dirKSSAngU6GHKXDZt/DTzn4Z1kmJp6iwBNhzank49kK1FO+ubnqC1tVrbwEg0FOqmHAFRgYGCjplOjz+Th8+HCRI2O1lFNc1PN97nOf0/6Ox+Ps2bOHe++9l87O0nVZkskk586dY9OmTVgqJZxbhCzLzMzM0NHRsSFTyFssFiRJWiL3Bz/4QQ4ePMhf/uVf8swzz/Doo4/yhS98gbvvvhtQlji+9rWv0dHRAYBOpys6RrntOzs70ev1mM3mJeesZZ/2fBSL+vcHP/hBXC4Xt99+O263m0ceeWTJfp2dnXz+85/ns5/9LA8//DD9/f0cPHiQ7du3a8cp1bbF9/BHP/oRhw4dYu/evRw8eJBPfepTfP3rX6/6HKXaumfPnmX3ayTV9FPNzUjej0mSUFLOXQeyTMcG7NuFbPTnsB60uozVymcymdi6yc7/9N5+Bp96hlnDG0Q3WbjSYWVu+xakP/5jrnv8JLz8MrIkIcmylkX3M3+W5fe/ejtw+1qJVUQmk2FkZITe3l5tItRKNEK+WiZ3kly4VtAggsEg0Wh0WcUlEonQ09PD4iZJksTIyEhVPi/xeJyuri6mp6crKi8TExN0d3fXpLyoFonOzk50LegD0OrygZCxFWh1+aD1ZaxWvsKx2mSy4P30Av/lu/Po54zcdKuZB/5Aosd0jg996Q5eTV2PP/vHfJJvc0vHOdp/eUappL5OZDIZnnrqKe67776WVV7qLV8172+Vhi8bqX4uquISi8WIRqMl/V6cTic2m61kNFJTOOsKBAKBoCHodBB4xMB739PBZz4Dv3gJBgYA9mDiDdKY6OmR+HjgKO3vF9lzW52GqvThcJhwOIzL5dLCRQOBgBYaG4lEtLwuKgMDA0VZToPB4IqWmgQCgUDQWkiSUmPxzTfhG9+Aw4eVUnIPHjHzxBMSr7wCd98jsudeCzQ0z8vBgwe1bKqFqBlzQ6EQfr+/KIPusWPHGBoa0hKFnTlzZmPneBEIBALBmmK3w5/+qfIRXJusaaj0Yo4ePVrSqlKozPT19dW9bQKBQCAQCJqX1vMEEwgEAoFA0NII5UUgEAgEAkFTcU0rL2sQJS4QCASCFSLGaEE5rknlRY1JryWbn0AgEAjWlkQigSRJLZknRbA61rQ8wEZBr9djs9m4dOkSAG1tbVVlsczlcqTTaZLJZMsmjmpl+UDI2Aq0unzQ+jJWkk+WZRYWFojH48TjcWw2G3q9fp1aKtioXJPKC8COHTsANAWmGmRZZn5+HqvV2rIpu1tZPhAytgKtLh+0vozVyKfX69m5cyddXV1r3DpBM3DNKi+SJLFz5062bdtWdSXLTCbD888/z5133tmSZsxWlw+EjK1Aq8sHrS/jcvIZDAb0en1LKm6C+nDNKi8qer2+apOkXq9nYWEBi8XSkgNKq8sHQsZWoNXlg9aXsdXlEzSe1ltMFQgEAoFA0NII5UUgEAgEAkFTIZQXgUAgEAgETYVQXgQCgUAgEDQVQnkRCAQCgUDQVLRctJGaTjoej9f92JlMhrm5OeLxeEt6yLe6fCBkbAVaXT5ofRlbXT5ofRkbIZ/63q6mLETLKS8zMzMA7NmzZ51bIhAIBAKBoFZmZmaWTU4oyS1W+SqXy3HhwgU6OjrqnuAoHo+zZ88ezp07R2dnZ12PvRFodflAyNgKtLp80Poytrp80PoyNkI+WZaZmZlh165dy5bFaDnLi06nY/fu3Q09R2dnZ0t2RpVWlw+EjK1Aq8sHrS9jq8sHrS9jveWrthyEcNgVCAQCgUDQVAjlRSAQCAQCQVMhlJcaMJvNfPnLX8ZsNq93UxpCq8sHQsZWoNXlg9aXsdXlg9aXcb3lazmHXYFAIBAIBK2NsLwIBAKBQCBoKoTyIhAIBAKBoKkQyotAIBAIBIKmouXyvDSKoaEhbDYbALFYjGPHjq1vg+rM0NAQAOPj4wD4/f71bE7D6e3tZWRkZL2b0RB8Ph89PT0AOBwO+vr61rlF9SMQCBCLxbDZbIyPjzMwMKA9l81ILBbj1KlTDA8Pl+yPzT7uVCMfNPe4s5yMhTTjuFONfOsx5gjlpQrUB+zo0aMAhEIhvF5vUz5opfD5fAwODmp/e73epnzIqiUYDBIKhda7GXUnFotx8OBBTp8+jc1mIxwO43a7q6oT0gwMDQ1x9OjRopf5kSNHGB4eXt+GrZBwOMzo6CixWIxoNLrk92Yfd5aTrxXGneVkLKQZx53l5FvXMUcWLIvNZpOnpqaKvmuVSzc1NSV7PJ4i+cbGxmRAHh8fX7+GNYipqSnZ7/e3zP0r5OjRo/Lg4GDRdyMjI+vUmvrj8Xiq+q7ZGB4ell0u15LvW2XcKSVfq4075e6hSrOPO+XkW88xR/i8LEMkEtHM1ItpNi26HKOjo0QiEe1vp9MJKFp1q3Hq1CkOHTq03s1oCIFAgL6+PiKRiNY3PR7POreqfthsNnp7e7V+GYlEtL7aaohxp7Vo1XFnPcccobwsQ+HDVYjNZmuJh8xmszE1NYXL5dK+Uzthq70YQqFQS73MC1H7aTgcJhaL4XQ68Xq9LfOiAzhx4gSRSAS73Y7P5yMUCjXNEkqtiHGndWjVcWe9xxzh87JCHA7Hsmuczcrx48fx+/1N7QhZCvUBa4XBfzHqQGKz2bQXwuDgIN3d3UxNTa1n0+qGzWbD5/MxMjLC0NAQHo+HQ4cOtVw/rYQYd5qPVh131nvMEZaXFdKqA4jP5+Pw4cOak2CroJo3W50DBw5o/1dn6a1iffH5fDidToaHhxkfHycajeJ2u9e7WWuKGHeai2th3FmvMUcoL8tQzoSpatOtRDAYpKenp+nCMZcjHA4XPWCtSLm+aLPZyi5BNBOqD4hqfnc6nYyNjWGz2QgGg+vcuvojxp3mp9XHnfUec8Sy0TI4nU7tZiy+Wa20jqlqyurMRw2Na4WBMhqNEg6HNRnVnBJDQ0M4nc6WmBk5nU6cTieRSKTIjyAWi7XEABqJREouJ3i93rVvzBogxh0x7mx01nvMEcpLFQwMDBAKhbQHLBgMtpR5MxwOEw6HNa9xaC0ZPR5P0YAfDocJBAItN9MbHBzk5MmT2kASDAbxeDxFA0uz4vF4GBwcXBKBMzY21vROu+WWglpl3CknXyuNO6VkbKVxp9w9XM8xR1SVrhJVWwY4c+ZMUXKlZiYWi9Hd3V3SmawVu0YwGOTkyZMEg0GOHTtGb29vS81k1Qy0AJOTky3TT0Hpq8ePH2fz5s3a2nph0rpmIxKJaP0xHA5z7Ngx7rjjjqIZeTOPO5Xka5Vxp5p7CM077lQj33qNOUJ5EQgEAoFA0FQIh12BQCAQCARNhVBeBAKBQCAQNBVCeREIBAKBQNBUCOVFIBAIBAJBUyGUF4FAIBAIBE2FUF4EAoFAIBA0FUJ5EQgEAoFA0FQI5UUgEAgEAkFTIZQXgUAgEAgETYVQXgQCgUAgEDQVQnkRCAQCgUDQVAjlRSAQCAQCQVPx/wOR5DkG+Qcx3gAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAi8AAAGeCAYAAABcquEJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAACqcklEQVR4nOy9e3wb13Xg/x2AIEhKJEGKoiiJskhQtiVLsmRAsh3HcRyZdFInadJUtNqoqb1NTG5227TJNmK13Ufd3VYrtru/tum2Jh23rqw6enDTuEnUxKQjP2IntEjI1st6gqTeEl/g+4HH/P6YGQgkARIgAQwe9/v58DPgnde5M3funDn33HMkWZZlBAKBQCAQCJIEg94CCAQCgUAgEESCUF4EAoFAIBAkFUJ5EQgEAoFAkFQI5UUgEAgEAkFSIZQXgUAgEAgESYVQXgQCgUAgECQVQnkRCAQCgUCQVAjlRSAQCAQCQVKRobcA0cbn83H9+nVyc3ORJElvcQQCgUAgEISBLMsMDQ2xYsUKDIbZbSspp7xcv36dVatW6S2GQCAQCASCeXDlyhVKS0tn3SbllJfc3FxAqXxeXl5Uj+12u3n99dd58sknMZlMUT12IpDq9QNRx1Qg1esHqV/HVK8fpH4dY1G/wcFBVq1a5X+Pz0bKKS/aUFFeXl5MlJecnBzy8vJStjGmcv1A1DEVSPX6QerXMdXrB6lfx1jWLxyXD+GwKxAIBAKBIKkQyotAIBAIBIKkQigvAoFAIBAIkgqhvAgEAoFAIEgqhPIiEAgEAoEgqRDKi0AgEAgEgqRCKC8CgUAgEAiSCqG8CAQCgUAgSCriEqSuvr4egEuXLgHQ0NAQ1j4WiwUAl8vFrl27YiafQCAQCASC5CHmyktdXR179+71/19bW0tVVRXNzc0h99GUnZqaGgBaWlqora0NS+kRCAQCgUCQ2sR02MjlcuFwOHC5XP6y2tpaWlpacDqdIffbs2ePX3EBqKyspLGxMZaiCgQCgUAgCKStDbZtU5bB/teRmPu8tLW1TVFUrFYrwBSFJhCn04nL5fIPGQXS0tISCxEFAoEguiRwpy8QhM2+fXD0KLzySvD/dSSmw0YWi4X+/v4pZZoCoikx0wllkbFYLEEVnomJCSYmJvz/Dw4OAkrSKLfbPR+xQ6IdL9rHTRRSvX4g6pgKJEP9DC+/jPHoUbz/9E/4Nm2a8f9cJEMdF0Kq1w+SuI5dXdDbC5JExoEDSIC8bx8eu52MV/Yr/7/6PTxPP03+xYt4Ll2CioqonDqSayXJsixH5axhYrfbqa2tnTIsFEhLSwtVVVVMF6uiooK6uroZ+/3Jn/wJzz///IzjvPrqq+Tk5ERPcIFAIJiF7Nu3yRwcBEniY3/6p5gHBphcvJgTzz3H/S++SObwMBP5+fziv/03kGUm8/IYKy7WW2yBYApf+OIX/b9lIHh+Z0ldq/DaD34QlXOPjo7y5S9/mYGBAfLy8mbdNq7KS11dHUuWLJl15lAo5aWgoIC9e/fOUF6CWV5WrVpFT0/PnJWPFLfbTXNzM1VVVSmb4jyV6weijqlAotbPlJnp/y1LElKQrlVGQgro9H2PP45vzx5ku33Kdolax2iR6vWD5Kyj1N6O4Xd+B+n8eSSvd87tfUYj7sZGDF/5SlTOPzg4SFFRUVjKS1ymSgM0NTVRUVER0uKiEWo4yeVyBV1nNpsxm80zyk0mU8waTCyPnQgkVP3a2mDXLqivhy1bonbYhKpjjEj1OiZc/fbvh2efBY8nqOIC3FFcDBnwxDYMza9j+N734OGHg26fcHWMMqleP0iyOr76Knz0EUfyf4OnBg7MufmP/+tf8ZmvfCVq9YvkOHEJUqf5uWiKi8vlCunbYrVasVgsQddXVlbGTkhBYjDdsTGBHMQEgpC0tcFLL3H1f/xjeNv7Xoa3P1B+HzgADge0tyv+BgJBPOnqUtqew4H3ewcBeGjgp+pKacpSlpSlV1UdFi3Sz58n5sqLw+HA4XBgs9lwOp04nU4aGxspLCwEFAddLa6Lxu7du6fMLGpqaprTYiNIETRl5X//b6VDP6g8TKKDFyQ0arv96L/+WC1QulZ5Wqd/h9+CiW7lZ3c32O2KZbGsLC7iCgR+ysqUtme3Y+hR2mQh2kQb1VK46X4wGJDuvx9eeAHvJjvuomW4C6LrmhEJMY/z8sQTT1BXV0dFRYX/r66uzj8VuqWlZUbwuV27duFyuWhqaqKpqYljx46JAHWpTIDmP0VZsduRb98GQL49rYMXU04FehP4xfqq0m63eZqBJfiy1sJ/+TOkaZ3+6HJrgMeL+ksbYsrIUIaeBIJ4sn+/0va4M6zpV7Ulo7L++HHo71eWtbVkHm+FjouMFxXpIzM6TJWeTk1NTVCrSqBT7/bt26MumyCBCPjaVBwa73DHaKk8VH5HyFdeiaoPjEAQMQHt1qC2VCN9gIxhvBf+5x+DzwdDQ5CbC5JETk0NF/65lbu/8rGZx2ttBZstPrILBBo7d3LCs477n7XPXHfsfbCrbTLQgVaSIIivaTwRiRkF+tLWBuvWIRuNAFNmYgTDI1sAkMUwkkBvgnyx+q0pmhVFkpROXxs2kiTuvk+ZleT1d7/q8t34iC0QBOLzwV+onhvy9DYZfJ50QiCUF4G+7NsHH33ExYzq4Oun+QqYtLHY6cNIAkG82bmTs//UGnxdayvs3Bl8XXExlJRwa6WdWl7gNnagBJ6/AY+JKLyC+HLgABw9U8wtSsBkB16AlXYoKVHaaoIilBdB/AnwFZBVH5e7JxTvds3B0a+0rFsH/+7fzTiENP0LVyCIM7IMf/mX2n9qV2oIo0stLYXOTkq6Wjn7WC1ltHIjuxN6fwrviJl1gvjhdsMf/zFco5QPPtuJ5G6F0lq40AqdnUpbTVCE8iKIPwHe7dxWZ1yoFhW/UrJlCyxbBj/5CfzDP4T8Gj3RMMsXrkAQQ/7t3+Dc8WJkSgA7fPMFpU2H88VqNmMwSrz037rYmOHgT8ZOA6qz+v4DcPw4+RcviiFRQUx5/c/beKlzG5+2tFF1ygxIsAvI1t+nZS6E8iKIP0F9BVSMqnd7a6vSca9apZRrlhiDNgVVWX7w+zD8pkh6J4gvvvfbKHp6G3/ETSQ64TOt8H9qlXYbwRfrmsoyWj1baMCOjKrI93VjeughHv/DP8R0990xq4MgvfH5oP9v9rGNo/zd0lcwdAFLga/qLVl4COVFEH927lQ6+WC8/76yfro3u+ongN0OL7yAb7XiJ/Dl4WLOPicC2Qniy+X/uY8HR47yK7wCmOF/BAx3RvLFun8/cginX5/RiOfll6Mms0AA+Ift3/2Og6o+xdpX3nEAcMCvt0N3clj74pYeQCAI5NTbsAFQ9GefYlHx+ULvoPoJkJkJkoTxyU/T++s3WHL8NpsuBsSGeeYZxRmhqAhWr455PQRpRFcX9PSAJJH/E6XNGTgAW58BSYauebS5nTuR1q1TlPJpvL23no9/+cvRkFwguIM6weETgE/1MZQ83YAdXkD5i2++5nkhlBeBLlzbU8wGSpgwrsL8na/CP74EV67M7isQ+EVrLWeJ+tOkRTHt7kYKfAkkwQMoSCICZrXl++eQdsMxO2ghhxbQ5rwYMOJDxoCEj4LzBfM+lkAQkv37kZ99FsnjwRBsin+SWPvEsJEg7rz/ffjU7VKgk8EDrfD1yH0FAv1mtAdPEpFKBbEkoM0F7fTn2+bUIdGeu5Sp0z0GZUh01dEgQcMEgoWycyd/99vzmOKfYAjlRRBf2too37mNTNo4t9zM0u3z9BWYzW8miR5AQRKxcydDLTFoc+qQaN5Hrby2rJZNvla8hg4KL9yP1JbAUcIESYnPBz/6kfJ7RlC6JCL5JBYkNf1797F0/CjwCov3ROeYsjR1BpJAECt+8hPtlxaBNEptzmwmO0fi934PbiDxbxYz0IbxC0+KWXSCqPL223DydjG3pBLIU4PSrUj8oHTTEb29IPYEBKXL+YHi6OiRDrBywwLD+6vmdsluZ6DgBSTsjEgleAqT5wEUJBf/8KNiRrW4Lg+/AFui2+nX1CgGyD/uk4B9SN1H4e/FLDpB9Ni/XwlK9793dCINtwK1cDTxg9JNRzjsCmJPgKNjppbATu5WOn6N+Tg6BsxAymqS4OkaPPIk//pvZn7z6wsTWSCYztmz8LNTpUzQSQ6Z8GcSfKoGJiejFtBr6WgX//nTPfzgXyXc0gFMMnDwAPxHMYtOsHDGx+HwYeX3cyVm8AEPAvdIQGIHpZuOsLwIYk/QlOtRcq41m0GSMP863F4mkY8Z6b+0IYugdYIo873vwVNAAWYoleBxop9dt6yM//avW3BgJ0PuUcpGRB4vQXRobobBQeW7b80JtfBpXUWaN0J5EcSeeDjXGmDR88rPz/XtQxJB6wRRRJbh1Vfht7SCLxOb3jOcTNUCwTz5f/9PWX7lV0B6Sy38km7iLAihvAjiwrVT2q8IEthFQlcXiza1M5TlYDEBQescC/SrEQhQmtDNi/BZrSBWsePELDpBjPD8so3f+edt2Gnj2QLACzwAlOss2DwRPi+CuHD+H4tZSQnjhlVk/d1X4aUwgtJFgmpOzwXQgtbdFkHrBNHh+99XhoyyANYA98f+nFrQOn8U6t7Yn1OQulzfu4/HPEepzX6Fu0+pURWT1OoCwvIiiAOyDEW/UILSndzZCrXzCEo3F8GC1glzuyAatLXxq3+9ja+h+lBtB2IZfqW4GHnZMs4t2kgtLzCRoQSt46iYRSeIkICZnpafKhbpHfIBpBYH0A5bk9ciLSwvgphz5l9h4wR4MHPfn6qF0XZ03LkTQuSIobUVbLbonUuQVvT/9T4eHj3KJK8AW+DXY3zC0lI8Fy/yvf91gsY/e4SszBr+2jMJTWb4M2KrOAlSiwAH78Vqw8kdV/MYAXyGpLVIC8uLIOZc/StleboYFpXF/nwiaJ1gwQR8sWb+i/LFmskBKFG/WGPtQ2U284CthxUrZF4alfBkmuEC4IjtaQUpRpCUFn6LtJTcFmnRuwtiiizDyl8ovyd+NcYnCwhaN5apBK0bNiVX1EhBglBWpkxNttvJGelWC7vhph22xmfKstEo8+Uv+xgB3l+qFh6I+WkFqcRsDuD/J7kdwIXyIogp538CGybAA6z7zzE+mRa07v1Whn6rFmjlHXcn3ebkiRopSBASZMpydbUPgO/cVgsO3RFDIIgE7/Q8Rlt1EyUqCOVFEDva2lj6W9uANk4VQW48puSpQeuK6wAknsTM9/82DucVpBYJMmV582awWuEHbvBktsHlbfCPIviiIHyGc4q5SQnt2Bn+7AuAHTJLYHVyW6SF8iKIHfv2UdinJGEc+lScz30P3KgAIzDaEOdzC1IKn46ZdyUJtm+HceBW5j7gKHxHBF8UhM+bF0tZTSdftray2KBYpPnvnUmVxygYQnkRRJcAR0ffq2qwOA5wz6fjHywu7/eV5RdvtTHykEgXIIiQ4mJuG0sYQc28W6ZD5t2uLp7Z0M4DOFg0rD5PJw7AMRF8URAezc0wiZlPPyHBmwASfCa58hgFQ0yVFkSXAEdGSQsWRzfLvhb/YHGLngH3H0C5bx+8r6YL2LQpLucWJD+XJkp5yNvJTTIBCY7WwPLoJWEMB9Pdd3MfyiQjnzZH2tcND4rgi4LwaGlRltWrgSFgCbBZP3mihbC8CKJLLJMwRkJXF1xop/duB6jpAuQDB+D4cfIvXhRfrII5+elP4WHMZCApUXXLohybKAw8L788Y6qryHUkCJdr1+DMGWX48aEhtfAJUuLNLywvguiSKMHiVAtQCeCP6nW7G9NDD/E4wB/+ofhiFczK669DpfbPE/rIIH/5y7BxY/Dn6RetsEUEXxSE5mc/U5Z2O2S/rxbG2/8wRqSA/iVIXNRgcdFOwhgOs6QL8BmNyhetQBACt1vp+P3KS+VsW8cHbaqrP/jiWR2FESQFb7+tLCsfA36pFj6mlzTRRSgvguhTXMxkZglg51LeC0pyxHg7Os4y1fXt+nrli1YgCMEvfwk5Q7ABkCX0/VpVgy9eLbZTywuMZqq5jtqSe6qrIPZoystnlwNjKP4u63QUKIrEfNjI5XJx6NAhDh8+THNz85zbt7S00NDQQFVVFVarlebmZrZu3cr27dtjLaogWpSWct7cyYbJTK58VqLin2tgMr6OjoHIkgFJ9iFjQMKniwyC5KKl5c5IkfQASqevF2rwxXNvZtL4GYmsrBr+enIS3kr+GSOC2HHrFpw/r/i72EbUwk+QMrmxYmp5cTgcHDp0CJfLRV9fX1j7uFwuWlpaqK2tpba2loqKCqG4JBmDV2DtkBmQsH6d6CdhDBctXcBmO16UdAEj2cuYyM+PvyyCpOKddxJryAizmU8+LrFoEbw6KCFLZvgAuKK3YIJE5ec/V5YbNkCOFiXiE7qJE3ViqrzYbDZqamqwWq0R7dfR0YEsy1y6dImampoYSSeIFWdfVEx6HSa4S8+HRUsX0N7KzQeU4Ez/tKiT8aIiHYUSJDqTk/DLXwT46OrkrDsdsxkqK6EHuKrFF/uhnhIJEpmOw228wTZ+6942UBUZobwIBLMw9m/K8kpkOmtsUNMFWL4KIPFwTxa3buXoLZUggTl+HErH4S5AzgQe1VuiO3z2s8ryNW2inFBeBCFY+bN9bOMoT994BVzAIuABnYWKIgk5VfrQoUMUFhbS19fHpUuX2Lt3b8htJyYmmJiY8P8/ODgIgNvtxu12R1Uu7XjRPm6iEK36FZ/JACTkT3pxuxPDxyTz18D3uxnYkPjemytx/1Zq3kMQ7XShnN3v4IfsBuqRP2bDY/JCnC9lqDpWVQGY+PurMr+LhPwzGU+/BxbHV76FkuptFHSqY1cX9PYyMiKxrVuJb3XXBweAZ/Ct9+LtLIDVq6NyqljUL5JjJZzyYlPjgGhDTY2NjVRXV3P48OGg2+/Zs4fnn39+Rvnrr79OTk5svrDDcTxOZhZSP09vFl8cfRKAa2ve4ciRgWiJtWDWL61iTXcOee+Upvw9BNFO50vW4X/lXpScXOeXL+bckXMxOU84BKuj1fpJzjgt9ORNUDRo5nj9cW48fEMH6RZOqrdRiG8dv/DFLwJg4U5EZmmkG7BjeB8Md8NrP/hBVM8ZzfqNjo6Gva0ky7GP1NXU1MSePXtob2+PeF+Xy0VBQQH9/f1YLJYZ64NZXlatWkVPTw95eXkLEXsGbreb5uZmqqqqMJlMUT12IhCN+h3/MwMPPm+kI0OmdNQTZQkXRt8fGlj2N0Z+KMk8eGOMwsLUu4cg2um8UL9YZVli4JHPs1S+DRTj+dsfIm+RYcmSqH2xhsNsdfzv/93Anj1GfrhG5nMXJXzP+vA2euMmWzRI9TYK+tRRevVVjF/7GpJnZt8rGzPwvvTdqIWJiEX9BgcHKSoqYmBgYM73d8JZXpqamqbMLtIUFqfT6bfKBGI2mzEHmcliMpli1mBieexEYCH1G/+JsrxcLlGeYNdo2VeBv4EnZIkjr2ey/SsJ1/yjiminEXD33f6fSwJycmX87kN3ttEhInOwOv7qr8KePfAP1yU+RxuG7+3C8B/qYeuWuMu3UFK9jUKc6/jMMyEjMktvt5LxSPQjMkezfpEcJ6Ecdl0uF9XV1TidzillQMQzlgQ60NaGvW0b0Ib0Sb2FCcJG6F8sk0MbD37jSZFlWnCHgIjMBj1zcoXB1q1QVAQ/GQXZsA8mjsJ3XtFbLEGCoA1EeP2vd3WZpYs4MSMuykuoGC9Op5P6+nr//xaLhV27dk1RVBobG9m+fXvQISNBYuF+YR+LvIqvwOpn9JYmCBIMPy4D+7jLdRTfPtHhC1RmichMa6uyPkEwXu3iOVs7a3HgVZOO8i8HwOGA9naRdDTN+fBGMTco4USGHfmxFwA75MQ5wnkciKnd3Ol00tTUxMGDB3E4HNTV1U2JlqtF0921a5d/n927d09RaHp7e0M66woSgK4u6OlRAtEdUDpSLwe4K/sZaJeVT8Q4+gqERJWz+JNe+JEq5/4DGJ59RhkOSBQ5Bbrjj8QsGUBOjNlyUygr48+BPwdknxoudbh76lCBSDqatvy8s5TddPLUZzP5l6sSUAMvTkJpakVkjqnyYrVa2bVr1xTlJJCampoZQeg064sgSVCzNwNkqL4CBrqRtiRYR6rKaQZkJCQgo190+II73PAWY6CEYlYBX4X7X4JbVxLvi3X/fuRnn0XyeO4MbQUOcYmko2nN++/DJGY+ZgN+DCDBx1NLcYEE83kRJCEBvgJSIvsKJIucAt14p6OUr9KJRCtYa+F4qxKhubR0zn3jys6dSEkyxCWIP1rT+FQB4AGWoURcTDFSe7qFIPbs3Anr1gX1bqe1FYLMENOFZJFToBvvvw8Pon6hPop+ObkiwIsBIz6U79AEHOISxJXbtxV9W5Jgg5aM8UFSJhljIMLyIogiSnOSDYndrGRJmwqb2HIK4sv77wdkAvi4npKEQXEx45YS2rHzL3mqU6Yp9ZwyBZFx7JiyXLsWsk+ohQ/qJk5MEb23YOEUFzNpLgHsXMx7Acluh5IE7EiLi5GXLcNVUcGw+e+VLNOZCSinIO54PPBhG/ijuiRQPqOglJYy9lEnD9PKfxxUko7i7oScBBviEsQVbcjowQeB99XCh0JtndwI5UWwcEpL+XBpJ9BK58dqlScoEX0FSkvxXLzI23/xF/R8ogZo5VBWZ+LJKYg7Z87APWNK7jq5AFirt0RzU1BiZstWiRtA/0oJMMMbeksl0JP3VYXlk+uBS2rhVr2kiS1CeREsHBnuumEGJPI/TWL7CqhZpot+0wdIbBk0ExATUZCmvP/+nQ9U6WMkTc9YWaks389VC47qJopAZ2T5jvLyqNb93ouS6CgFSZJHVJDI9ByHZWri3Xt+Q29pwsP8lIwP2Ai826S3NAK9UZx1VZLIR0BTXg7eUgve1EsSgd5cvAj9/cr3WXmPWphEbTlShPIiWDCdapDPc1mQv1xfWcJmCdxcofzsFzEQ05733w+wridRh//II8rL6gf9IEvAR8BtvaUS6IFmdbHZIEPLfJKi/i4glBdBFJhQTdW3yvWVI1LkJ5Rl8QnwJldSXkEUGRmByydhnVaQRD4CWVnw8MPQD/StVAvf0lMigV74nXW3csdZN4kU8UgRyotgwRScU5bSI/rKESklX1GWj01Cu8jRmLYcPw4PaCFSyoEiPaWJnMceU5Zti9SCN/WSRKAnmuVlWxnQC2QCm/STJ9YI5UWwILyjUDGo/F75JX1liRTjYzBhhBXAyQN6SyPQi2PHAowtSWR10fikmsG9SfNzEJaXtGNyEqT2Nt5gG49cU7/ENqEoMCmKUF4EC6LzX5R8QT3Amif1liZCzHBzjfJz8qf6iiLQj2R11tX42MeUDBff7wVog9Pb4HVhSkwnzpyB3/TsYxtHWfLmK0phigcNF8qLYEHc/qGyPL8EjEmYbMKkKlwrLiiBygTpx9g7bTzFNqAtKZWXnBzYuhX6gPHF+4Cj8Fev6C2WIB50dUF7O13/4mAHyswJ6eQBwAFL2pX1KYpQXgQLwqiOsw5t0FeO+bJsh7L8pKeN0Ye3QZv4Yk0nenrg89f2kcVRZF5Jzq/Vri5+8952HsCBNKZO/XvzADgc0J7aL7C0p6wMtmzhC39qZyndStlkN2CHP9+irE9RkvBbWZBIrLiqLBdv01eO+WJ8EMaMYPHug/aj8MorsGWL3mIJYk1XF/T0cOGXEr+ufbEaD8C5Z5RoX0VFsHq1zkKGSVkZvwf8HiB71Qx8Y91Tk5DKsh6SCWLN/v3w7LPg8WBAu8fqMiMDXn5ZJ8Fij1BeBPNm5DKUupXf1mp9ZZkX6gusf6VE9mX1i/XAAXgmCV9ggshQv0g/Bshayl1vkr7w9+9HfvZZJI8HKc1eYGnPzp1471mH8UH7zHWtrUrQlxRFDBsJ5s3Nv2oDtnHV0MbydXNunnioJtcVl+2gmlzlbvUFtiW1Ta5pz/79yosdgr/w9+/XR675sHMnkhbkYzqtrbBzZ3zlEcSVy5eVpdf/Ok+P13p61FIQE3L+RXUOzE5S58CAF5j24pLkJH2BCSJj5847Ub2mk8Qv/HR7gQngg+vF3KCEc4vssPEFwA65JVBcrLdoMUUMGwkiQx1qQZIoUodalk4eAEcSDrXs3Anr1k0dKtBIcZOrQImsq8R1MwA+MBjA55t9p0SluJhxSwknXKu4mvdVvjT4EpiupPwLTADvXS7lN+jka7+dyf/9VwmogdcmoTRBk+NGCaG8CCIjYCglQ/UVyHQnqa9AADIGJHz+pSD1Od1dzN2UUMAqWP1VKH4JriTpC7+0lOFTnTxUmkn5oMSXqAF5Epak9gtMoEwqm8TMI2uAa4AkwdbUv+/CtiiIjCC+AlKy+gqA8qIqKWFyqR14ARd25GWpb3IVwC+vlvJdOoFW+EytYm3r7ITSUp0lmx9FK83ce69EBzBeIIHHDO16SyWIJbKsKC8AD2nRdO8BFuslUfwQyosgMlLNV6C0FDo7MR5qBWpx08qZH3cm7QtMED7Hj8P9mAEJHkD5YjUn9xfrI2p+sYua7v2ubqII4kBXF7hcYDJBWb9amCaj3UJ5ESwAtfkYkrwZmc1kPCIxboBiJE79a3K/wAThcdwR0M+nSIevKS9va9GihfKS0mhWl/XrIeOEWpgibXkukvytI9CF4mImDSWAndPrXlD8XUqSfKglE66vUn6OvK6vKILYMzEBrtOwFJCNwEa9JYoOmvJyUA0eyXv4Z4ALUo/jx5WlzQaoioxQXgSCUJSW0mvoBFoZ+Ery+wpo+NSO33JaXzkEsefUKbjfq/5zH5ClpzTRY+1asFjgFxPgMwO9wDmdhRLEDM3y8vA6wKkWbtZJmDgjlBdBxIx2wnKP4itQ/iVSwlcAoORLyvL+Ibh1S19ZBLHl+HHFzQVAemDWTZMKg0GxvriBG9q3hBg6Slk++EBZPpyjFqwCCnUSJs4I5UUQMZ3/T1leNMLye/WVJZosrgIfsAZwHNFbGkEscQT6u6SQ8gJ3ho6OabNPhPKSkvT0wPXryu+7x9TCTbqJE3eE8iKImIGjyvLqMn3liDr5cF39aun+ga6SCGJMoOUl1XwENOXlNTXJsFBeUpOTJ5VleTlkaUODQnkRCEKTqT404+v1lSMWjGxWlpnHdBVDEEO8Xrj2IdylFWzWUZgYsHUrGI3wWo9acB7omW0PQTKiKS/33w98qBber5c08UcoL4KIWXZTWS7+hL5yxIL8p5Sl9SaMj+sriyA2OJ1wr2pml9cAebqKE3UWL4ZNm6AfGFyuFr6vp0SCWHBCnRp9/3rglFooLC/Rw+Vy0djYSFVVVdj71NfX09jYSGNjI/X19TGUThApk91QOqn8XvU5fWWJBctUp90HZPjgPX1lEcSGEydS01k3EG3o6Eqmkvmdf2nTVR5B9NGUl48VA6NANorDXpoQU+XF4XBw6NAhXC4XfX19Ye2jKSs1NTXU1NRgs9mora2NpZiCCLj8Y2V5RYK7NusqSkyQyqAnC0y0Uf6VbdAmOv1U48SJ1HXW1fjYx5TleJ+a+f31JM38LgiK1wun1ZAO/pGiDYBRJ4F0IKbKi81mo6amBqvVGvY+e/bsoaamxv9/ZWUljY2NsRBPMA/6fqYsLxcoM6RTDgl67gHYx7LrR+EV0emnGidPpq6zLgBdXXxiUTsP4KB8SMn8zpUD0O6A9nYlprwgqXE6YXQUsrJg+W21MI2GjCDBfF6cTiculwuLxTJjXUtLS/wFEsxAViM6DoWvjyYPXV3Q3s7iDQ5A6fTlAweUebWi008ZLnwAd2v/bNZPjphRVsaqL27BgR0L6pQjuRu22GHLlimZ4QXJyYkTYKeNtzO2YXhHtQ6nkbMuQIbeAgTidDqDllssFlwuV9B1ExMTTExM+P8fHBwEwO1243a7oyqfdrxoHzdRCKd++V0ZgITR5sXt9sVJsugxWx1NaqeuxPZSzUq3u5X0B9r+k5OxFTAKiHYamuFhWNSRgQEJ71IZX6FHieiWYCykjtLLL2P82teQPB4M/twAylLOyMD73e8i69w2Ur2NQmzr+MEHBn6bfWwdPor8wT4ktuBZ70F2xy8XRCzqF8mxEkp5CUVhYWFIn5k9e/bw/PPPzyh//fXXycnJCbLHwmlubo7JcROFkPXzSHx66PMAXC5s48iRm3GUKroEq2PpN7/JA3/zNxi8XrTOXlKXPqOR49/4BlePJE/0urRtp7Nw/nwBnyMH2MWA5Y9458jYnPvoybzuocVC/t69PP6f/tOMVW/t3cuAxQIJ0o5TvY1CdOuYffs2mYODXH5tA7+rWoeloYPAs/zyvXcYvZTDWJxzzEWzfqOjo2FvmxTKy2zOvrt37+Zb3/qW///BwUFWrVrFk08+SV5edOdAut1umpubqaqqwmQyRfXYicBc9et+C7KQGEKmus5G9iIdhFwgs9bxqafw/sZvYHjooRn7ed97j/sfeCApLLPp3k5n48YNiSq+BRzFsmgDTz31v2Mj5AJZ8D1UM/Z5MWDEh+Ih4OPjjz4KD+jvpZzqbRRiU0dTphI2+XHAp1mH6QbsPPpf1fPGyToci/ppIyfhkFDKSyjHXpfLFXKd2WzGHCSvjslkitlDEctjJwKh6nf7dVgBXMyWeMCS3PUPeQ8zlEdCxoCEz780ZWRAkt3zdG2nQenqgp4eBt+UuE/9YjU4D2I4+SzIMhQVwerVsRN2nsz7Hq5Yga+4hPbbq/gBX+XPeQm4gqlwRUK141RvoxDlOu7fj/zss0GHBMnIgJdfjvv1jGb9IjlOwikvFosFp9M5Q1mprKzUSSqBhqGlDdjF5JJ6YIve4sSG4mIoKcFtWkXmla8yxktkL7uCFGdTrCDKqP5MuwBZ+2IdnOrPhBw/f4GYU1qK4XInv7kuE2eHxJ/k1ZA5OAnd5rSKBZJy7NzJKc86Nj5rn7mutRVsqTh9LjhxmW0UatjH6XTOCEK3e/fuKTOLmpqapkydFujH8nNKzIhl5hSePlxaCp2dZLzbCtRippWT3+tUygXJy/79yKpVTQr2xbp/vz5yxRKzmYceVhS1zuUSYIZWfUUSLJwLF5Slz//6TqhJw3EjprXWlJOGhgYcDgd1dXU0NTX517e0tNDQ0DBln127duFyuWhqaqKpqYljx47N2EYQR9TpwzgcLBlVzO0rbqX49GGzGcMqidtmMCLR+cOZw5KCJGPnTrp/GOLN3doKO3fGV5448eCDytJfc6G8JD0fXC/mBiVcXWYH0wuAHYpKFKtxGhHTYSOr1cquXbvYtWtX0PVaFN3pBG6/ffv2mMknCIOAmBAG1dxuGk5hc3sAt6xQ/BFMvq23JIJocOECKN274ryKwQC+5JvuHwma7/m/3oKvgFBeUoC3naXspZNX/1Mmd+2SwFQDnZOwKL0+stLT3iQIn/37/U6s06cPp6y5XcXwsLIsOK+vHILo8OGNYsYoAezwhRcUBbwktb9YN29WHtMWl1rQAYSXqUWQgMiyEqBuEjObTarv1jop7RQXEMqLYC527lTM6sFIYXM7QMkXleX6IRgKfwafIEF573IpTjqBVniuVmm/nZ0p7c+Una1kmHYBQyVqYbuOAgkWxPXr0N8PRiPcNaQWbtBVJN0QyosgAtTmYkiPZrOkSgm+WgKc/LHe0ggWykcfwL2YAUkJpS5JECTMQqqh+b1czFcLRK7RpEXLJH3PPWA6pxau100cXUmPt5BgYRQX41HN7eeq0sPcDkA2dBUoP7t/qK8ogoUxOQnSOcXJz5uHlgMiLdiiRjX4pRZ5XSgvSYumvNx/P6BmlRaWF4EgBL6CUiTV3C7/UXqY2zWG1ilLw/v6yiFYGOfOwTqP8tuwGX/qqnRA863/4Q21QAwbJS0nTyrLTRuAj9RCYXkRCIJz/XUwYuY2EhWPkjbmdoDsTynL5Vf0lUOwME6evJN0V0qGHA9R5L77ICsLfq6lceoCLdm0ILnQLC8PFQETQDZQrqNAOiKUF8Gc3FbzbjlzwZSpryzxpvRLynL9JNy+rq8sgvlz4sQd5YWNekoSf0wmxWl3CBhcrhYK60vSMTkJZ88qvzdolsP7SNu3eJpWWxAJbnWMvD/xUr/EnMWbYcigfOCc/xe9pRHMlynKS5pZXuCO34tw2k1ezp8Htxtyc2HpLbUwTf1dQCgvgjDIvaQspc26iqEPBri8VPnZH73M74I4c/0D0IwO6djha8rLe8JpN2k5c0ZZrl8Pkuasm6b+LiCUF8FcyFDar/xc8il9RdGLsfsA2njkp9ugTfT6yUZfHyxRnVW95cBiXcXRBc1p9+zVNmAbvCfacbJxWlVY7ruPtJ9pBEJ5EczB0BnIk5V4JxVP6S2NPiz6JMA+lowfhVdSOCllinLq1J2RIuNmPSXRj3XrlIB16yeU5Kp0vwI39ZZKEAma8rLxXiDNY7xAjHMbCZKfaz+FtcClDFhbMufmqUVXF/T0ULZWApSklN5/PoDxmWeUON1FRbA6DR2BkozTpwN8dNPQ34WuLjJ6enh6jcSXTh5UCw/AwWfgUdGOkwVt2GhLPuABcoFVOgqkM0J5EczK4HvK8kaRosSkFWpSymxARkICDL3pkZQylTh9Gp7V/klH5UVtxy8DPn+Am274A9GOk4XJSSWxKMBar1q4nrSKVzQdMWwkmB3VVDlWoa8YuhCQlFJKs6SUqcRHJwOs6+movAS0Y4PWfkU7TiouXACPB/Ly7vhvpbO/CwjLi2AOCq4qS7NNXzl0YedOxVkg0NKi0doKtnS8KMnH2EnFeubNAqNVb2l0QLTjpCfQWVdSh4/S2d8FhOVFMBseWDWs/Czepq8oeiNLyqMii0cmqejuhhXqbDnWk/Y9ntd/AdRlf8hNBQnElJlGp9TCNLe8pPmjLJiN3vchCxgBrE/oLY1OFBdDSQm+9XbgBcCOd2kaJKVMEU6fvvOBakyzyLpTUNvxuUV2ankBT4YdKIFboh0nA2fOgJ02/uTNbXBBneYuLC8CQXCu/1RZXjTDolx9ZdGN0lLo7MT4YSs9xlokWnn/DzvTIillKnD7SBv/gW1AW3p39mo7/rtnWmmkltPlrUAnXBPtOBk4fRp+m32sch4F+RUoBNJt9uc0hPIiCMmomkm5Z5m+cuiO2QwGiavLACRcb6VHUspUYOlP9rGMo8Ar6a28AJjNPGBTpqe0+yTADMf1FUkwB11duH/ZzuLzDnYQMM19tQMc7Uo4hzRFOOwKQpKhBkKauFdfORKF8Q3AdTCf1FsSwayo8XmQJDafDejwfc9Ae3rHNdm8WVn+9Db8DgjlJdEpK8MEvM+0ae7H7aCmfEjXae7C8iIITlsb93Uq5vacrXoLkxgokXZhpYhMmtiUlSnJfOx2LO5uAGS64XN2pVyNe5KOrF+vzI5+e0gtOIfi1CZITPbvx2cU09yDIZQXQVDkf9hHtqyY21c8qbc0icGqLyrLNW7ov6KrKILZEPF5QpKVpcyavgmMW1Deg8KSmLjs3MmLX20Nvq61VZkGn6YI5UVwh64u8i9ehOPH8b2qmNt9HKA8ywHt6T2+CmC5D24awQh0/kBvaQQh2blT6diDkeYdPtwZOrpapBZ8oJMggrC4dElZauEaxGtbQfi8CPyY7r6bx9Xfsjq+KtGN6WERRlzj8lIouQmDPwN+T29pBHMhY0DCB5IBZJ/e4iQEDzyg5Bc9YYA1IPxeEpxjXcXcoITFpavIvfJVyHgJiq6kfbgGocIJ/Hhefhmf0QgIc3soRtYpS9MJfeUQzEFxMQPZJYyjxue52w4lIj4P3LG8HNUC1H2gkyCCOZmchJ93llJGJ5O/3wrUwidaobMz7cM1COVF4Ef+8pd5u74++Ephbgcg6xPKcsU1feUQzEFpKTs2d2JC7fBbRIev4Z9x1K0WnEDJUixIOLScRuZcM4XX1dlGGyQlfEOaI5QXQVC0MPh3xlkFAKVfUJZlEzDZo68sgtDIMgyfNpOBhHcxUCo6fI2CAmWm+EWUfE+Mo8w6EiQcQXMapXlaAA3xZhJMYSI/H9/SZUiquX3yPmFuD6T0AbimfgB1/au+sghCc+sWlA6q/6wHf4gMAaBYX2Tg1nK14AP9ZBGE5oyqsKxfz52cRukebFFFKC+CKYwXFeH880tAK1elWkwfCnN7IJIEXYXK77439JVFEBqR02h2HnhAWX6kGaOE025ColleNpcDV9VCobwAQnkRBKHvl1mAxOU8MBiFuX06Q2vUHx/oKYVgNgKVF9HZz0Tze/m5mjVetOXERLO8bM1RC1YCFp2ESTCE8iKYgfcDxcY+eJfOgiQomQ8qy8LL+sohCI1QXmZHs7wc0aJFH8cfuFWQGExOwvnzyu973Gqh8HfxE5c4L/X19VgsFgBcLhe7du2adfuWlhYaGhqoqqrCarXS3NzM1q1b2b59exykFeR2qjFehLk9KCWfAb7TRsXwLuT36pEe2TLnPoL4cuGkGsMEhPIShFWrFMfdD/tBNrQh9e2CH9fD50RbThS0mUa5uVCgzW4UbdlPzC0v9erU25qaGmpqarDZbNTW1s66j8vloqWlhdraWmpra6moqBCKS7yQYYUa/6HgE/qKkqhUPAET7MPAUUb2vqK3OIJpyDK4TymRkD25wPK59kg/JEkZOpoAJnL2AUehQbTlREIbMhIzjYITc8vLnj176Ojo8P9fWVlJVVUVDQ0Ns+7X0dHht9YI4od0O5tCn4QXWP0ZvaVJMNRsxZmShFtNT29uPgCOZ5Q3ZhpnK04kbtyAVWriQcMGxEyjYHR18fkVPbiQMI6pmbePiracSAROk+aIWigsL35iqrw4nU5cLldQJaSlpYXKyspYnl4wDybalwHQYYQK0XdNJSAbcYb6RswY6wa7SJ+QSAT6uxjE0Gdwysr4JvBNQPaq2t2IaMuJhGZ52VIG3FIL79NJmAQk5spLMCwWCy6Xa9Z9Dx06RGFhIX19fVy6dIm9e/cG3W5iYoKJiQn//4ODSnAHt9uN2+0Ous980Y4X7eMmCm63m4yPlgBwvUBmtSf1wm4u5B5KL7+M8WtfQ/J4ZqRPkDMy8H73u8gJ0DbSoZ0GLqdz4oSB9ShpLrxrvfjcyZfTKNb3UHr5ZYxf/RqS905bJo5tOdXbKCy8jqdOZQASm00eIAO5TMZj9kCCXLJY3MNIjqVLYkZNKQmFzWYDwGq1AtDY2Eh1dTWHDx+ese2ePXt4/vnnZ5S//vrr5OTkzCiPBs3NzTE5biKQ2/U4AFcLXBw58raussSSed1Di4X8vXt5/D/9pxmr3tq7lwGLBY4cmbmfTqRyO4XQ9fvpTzfxfykD4JdDv6TnSPKGQo7ZPbRYWLynnid2fWvGqni25VRvozC/OrrdEufPfw6QkE9dANZxq+gWrUdCZEvXkWjew9HR0bC31UV5mU1xgTtKi8bTTz9NbW1t0CGo3bt3861v3XkABwcHWbVqFU8++SR5eXlRkxkUrbC5uZmqqipMJlNUj50IuN1unD1KvZZ8Mo9tTz2ls0TRZ8H38LgSzUuWDEiyD8Xn3cfHH330zvxTnUmHdjpb/f6/PzdSof5+8N89CCXxlS8axOUeLlfashcDRuLbllO9jcLC6njmDHi9BnJzZR5efA8ASx9fylMJ1CfH4h5qIyfhEFPlZboSouFyuUKuA2hqapoyu0hTWJxOp98qo2E2mzEHCaJmMpli9lDE8ti64oOyUaVJFG8zYjIZdRYodsz7Hq5YASUlSKtWcc3xVVZ6X8K9+AqmFSsgwdpEyrZTlWD1k2XwnlZew558MJWaktphN6b3cMUKBrJLODe2iruKvkpJz0uQF9+2nOptFOZXRy2+y333SRjPKv2wcZMRYwL2ydG8h5EcJ6ZTpa1WKxaLJajvSyhnXZfLRXV19ZR9NP+Y2RQewcIZ+BAWIzEOVHxab2kSlNJSJV1CaysfldYCrRx/sFOkT0gQrl2D1SPKbzHTaA5KS/nnP+vkIVr5uUVpy+zoFG05AfDPNFqHyGkUgpjHedm9ezctLS3+/5uamqipqfH/73Q6/bFgQLGy7Nq1a4qi0tjYyPbt28XU6Rhzo1lpDpdMMnmFOguTyJjNIEm41wNIZJ4T6RMSBTHTKDLW28yAxNsuAAlOi7acCPjTAtwF9KG8qdfqKFACEnOfl127dlFfX09TUxMAx44dmxLjRYumGxh1d/fu3VMUmt7e3qDOuoLoMvxL5TP11lKh5IdD7mPAEVh+a85NBXFCpAWIjI2qgvczzaf5JPjdXwS6oVle7JouWQFk6yVNYhIXh91AxWR6pFwt8m4gmvVFEF8yzirLsQoZYW+fm1WfBd8fwTIPuK+BaaXeEgmG32qjkl1APawXoe7norBQGSU6dxV8GWAYArqAcr0lS1/cbsg918Yb7GJtZz2wRUTWDYLQrwV+llxXFJZMuwhOFQ53rQenquNd+7G+sggU1v9yHzkcBV4Rlpcwuf9+8AB92qysk3pKI7hwAb7s3cc2jpL7rpqyQbTlGQjlRQCA/F4bZaNPAG0Ub0u+oF56IEnQpfoG9R/VV5a0pqsL2tuR2x186rYS6l6WDsBVB7S3K+sFIdGGji5pYbFO6CZKeqO242s/dLBDTT8inT8AOCBbtOPp6BLnRZB4jP2l8sU6wT6sn/zfeouTNAxWAL1g+FBvSdIYNW2DBBRow52yCHUfLvffryzbJuAhEMqLXqjtuArwae3Y0w3Y4Y9R/kQ79iMsL+mMqunjcGD6qaLpGzhI1tnj4os1TDJUt4rCy/rKkdbs3w8ZynfY9LQNZGQo6wUh0ZSXN7rVAjFspA8B7dgwLWWDaMczEZaXdCZYokG6kR566M42QtOflaVPAn8HK0eAEWCRzgKlIzt3wrp1Uy0tGq2tMC2wpWAq996rxKT7hRaZ/TwwhpjdEm9EO44IYXlJZ8QX64K59zG4gfIgDb2rtzQCWevSJNG1hYvJpLwzbwITeShTpc/oLFSa4lZz4Xr9r2bRjkMhrkw6s3OnotEHo7VVWS+YlYICOJul/L6V+jnmEpfiYnozSpCwAy/AJjuUlEBxsd6SJQXa0NGNIrVADB3pQsdwMTco4QODHfnhFwA7LBbtOBhi2EigoiRlkyUJSQwVRUTvCsAJY+/rLUn6Iq8s5WljJ294MvEskchw1MDkpBINWTAn2oyjM0aUfNzCaVcXTvSVspNONtszafVIQA28OAmloh1PR1he0p3iYryWEsDOAH9Pv7UCedkyoelHgOc+ZZl9Xl850pnLl2HVhBLq3rARZR67UFzCRrO8vDesFgjlRRdOn4ZJzGxYL6lDdxI8INpxMITyku6UlnLiNzqBVs4uruWdv/wLPBcviuRsEZD7cWVZ0o1/coAgvkzJaSSikUaMpry0aKkuxLCRLmhpAR4uAUaBTJTUAIIZCOVFwMQJ5Yu1vxTxxToPVj8Jk8BiL8hidrkuiJxGC2P5ciVVwAkfyBJwGxA5u+KOlpDRpnXB6xDOHSEQyouAnEvK0nefMBvMh3s2gJoWiu43dBUlbRHKy8KQJMX6MgYMLlMLxdBRXHG74bw69FwxphaKthwSobwIKOlVlrkfE2kB5kNmJnRZANpY9MfboK1NZ4nSj44TsFr7R3T480IbOrqV2QZsgx+LdhxPLl5UFJjFiyH/mloohkBDIpSXNGfiOhSrsQVWfUZYXubLUBnAPhbdOgqvvKKzNOmFLyAuiacIKNRTmuRFm3Hkde0DjsLroh3HE83f5b77QDqlFgpFPCRCeUlzrv1UWXZKsOIefWVJStQUC0WrHKAmU+PAAXCIpIDxoqsLKiaU34b79ZUlaenq4mFTOw/gYMWQ2o4viHYcTzTlZcNa7oxDC8tLSIQrUJrT9zZYgWsWWCnpLU0SoqZYeBJAS6bWLZICxhMx0ygKlJWxAXAAPjkgKaBox3FDc9Z9ZBkwAeSgBt0RBENYXtIcr5oNWRn2EERMQIoF/zxpWaRYiCfCWTcKiKSAuqNZXh7IVAvuQ7yhZ0FcmjQnt1NZGjbpKkbyIlIs6I5QXqKAaMe6EjjTyKolyBRteVaE8pLOyFDqUn4WPqarJCmBlhRQFo9VXOk6Aau0f0SHv2BEUsD4c+GCmGkUKaJ1pjHDZyFPBjdg/RW9pUliiouhpITeXCUp4ESmSAoYL7xekD5SfnuKAYue0iQ5xcWMW0pox06zRU0KaBbtOB5o/i5iplH4COUljbn6E2V5KQMKS/SVJakpLYXOTt78D61ALRO+VujoFCkW4kBHB6yZVH4bxUyjhVFaytWfd/IQrfz1aC3QCnd1inYcBzR/l/vXAlqONGF5mRWhvKQxA+8py5tF+sqREpjNlH5awgvkeyToFykW4sGpU3c+UCXR2S+Y8rVmsrIk2icBJLhkhnG9pUp9NOXl40sBD5ALCJ1xVoTyksZo5skxkfgrKtxnv/PRNPSurqKkDcJZN7oYjbBuHdwEJhcDPuCczkKlAdqw0WZt4uIG/JEXBMERyksaU3BVWWba9JUjVcjLg0s5yu/un+krS7oQaHkRykt02KBasG4uUQtOhdxUEAUCZxqVj6iFwoo4J0J5SVe8sGpY+Vn8hL6ipBJ9qqnX3a6vHOnC5ROwUvvnPj0lSR005eWcSS04rZsoacGFC3C/u403jdvIO6HmkxLKy5wI5SVN6W2FLGAEqBDKS9TwqV//iy7pK0c64HbD+rNKEkFPURvk6y1RarBebcPva1YAYXmJKadPw2+zj096jyJ9qOaT2qivTMmAUF7SlL5/VDr9a6Y2chbrLU3qkPtxZbmsD5jUVZSU5+JF2OFTkggac0QSwWihWV6O3lYLhPISG9S8aH0tDnZoedEGDgAO8Ip8UnMhchulKYtfVzr9LPM+rrhK+ehWJ86eqxy79QGvv/YRQ5MjjEyOMzo5zqh7nAnPJLIsIyPjk2Xlt6z8BjAaDBgNBgySgQyDEYMU8NsgYZSM/vVGgxGjpGxvlAwYDHfKDNKd42jrlGNJM/4P3Hb6/0bJgCRJ0/43gCxzoe8CPe0ypgzTnf38x576v1EygCRhQEKSJAySBChLSf0LXDe8UeKNuyUWISG9K2GoCL2fpP5vkJRvCINkQJJAQllq9Zp9XcAxVTm8Hg+j3gmGJkbI9GVOWWcI+A0ErFPaReC6hKWri/yLF7l++jiPqZ2+1HMAHM8oqRmKimD1ap2FTF7uuksJlvahOqxMB4qJdpGOQqUial60WsDn987tBuxQpf4r8kmFRJLl1Lo6g4OD5OfnMzAwQF5eXlSP7Xa7OXLkCE899RQmk2nuHRKNri7o6QFJYvLBz5Dp7aZvURZf/9onkADXIjM3CkQPJbiDpthMV2o0pUf5jfp76v/KNne2J4iSpO4RcEzuKFZI4D/2neP+9N//nV8OWd1bZurkjM9/9/cD9tOk004+8zz+Okh3JJpSR02AKdcmyPWasc20/4PsNH0bJAnZJ9Pf30dBQSEGw8yd5jxPENlmyh9atrY2GBiAT2VIGD3AViBv5nmDn2Zu2WRZ5nZ3N8VLl/qV+dn2CVahmfIHuU5zyBbLKT2y7OPWzVssK1mmfDhNw/6Wgy9/5wBGr2/GOq/RwKu/9xs4Hk/c2RSLTNlsda+M6vswkve3sLykE6qmD6A1tYKRcQ7+dbO//Bsv/SmPPvAQS3MLyTGZyc7MYpEpG3OGSf1yN6gdu8H/xQ/g9fnw+rz4ZB9e2af+71P+93nVMnW9L3Cb6ft48cmyfzuffOfvzv9T13tlH7Isz/q/z+fDh4zH46HrShcrVq4ECXy+qesD//eq55VlkJGnWJ5QrU5TLFHq7/4TMvmyzGS+jHGVjCz7kAGf7ANZWcoonZtvyjGnrtP2CbRyTZdj+rpoo5xP+RWDw8+L3b/5EP/j4Ptk+OQ7ypG69Bgk/uuOB7ncf1Mv8aLK5Rs9+py4GHKLoU3736X+RZmLXTeif9AE46NLV4OWH10JP/rdJ6b0vxpf/t0nOLvSCxeOxVq8ebMkJ5+tK1fOvWGMiIvyUl9fj8ViAcDlcrFr166Y7COYg/37kZ99FsnjmdHpk5GB57vf5QmLhac2fCo5LUth4LeePRk769kLm+Dfn4CO+6A8zjM1ZFlm0j3Jj48c4Vc+8xmMGRkhlR5UxURTepT1cMcYe+f/O9uo5eo2Idf7lZ47il+gjLPtp/0ffD8Z9w4P37//VZ7e/acz6n/px008vXE91YF1CFbHgAzg4dZxynUOosnNKJmxz0xCHdfr8dLuaMdms2E0Tu+mp+2zgPOE2uffjsArr8h8exk8cAv4LMg7p583yHnmkE3D4/Vy4sMPuX/TJoyGqVaJmccI4zzBai1P/zc82aKF1+vl1KlTbNiwAaPRGHQbzzsXgGa8SBiRkZGQkHnu4V+jf93dMZVvoWRIRugY1O/8sT5BfX09ADU1NQC0tLRQW1tLQ0NDVPcRhMHOnewbusgzX/+TmetaW5E3boQjR+IuVqph3AScgLzL8T+3Zh0zSgYyjBmYMlJPCXW73fQYCtT/DIAPDAbw+bi3uAxW3qujdNHB7XYzdv4229Zs1eVDwnQVvvMBjBXDr9wGlgH/M3rHd7vdGDoGeWrdoyn9oZR1ZZynNm4LWccf/WItN/hT+het4r6MryINvASFV6h89PMJn5bB7XZzpEO/90XMZxvt2bPHr4QAVFZW0tjYGPV9BHMz7p7kLacSgMRvcDeICWfRZomaoXvJMKDfh0lK0zW2ggFKADvyhhfALpJhRhP/jKNutUDEeokJbTdLKaOTF77QCgNqPqmznQmvuCQCMX1zOZ1OXC6Xf/gnkJaWlqjtIwiP5vO/5Eom9C7OAeycKRadfiy4+yHQstrLYpppTDg9YOWf6QRakZ6qhdZW6OwUnX6UWLYMliyBU9rIylVi4vOS7pw5A5OYecSifkyulmCpyIsWDjEdNnI6nUHLLRYLLpcrKvtMTEwwMTHh/39wUPnUdbvduN3uyASeA+140T5uvPj+iTe4bcnh7x9/if/yox10PSxz9+F/B5OTYDYnff3CIR51tFrhXSmDlbJEz5seLFvj6+ma6vfR7XZz5Uoev4rSyXvu9SB7ZMWKmCJ1ToR7eN99Rt55x8BIgcyifgnPCQ/yx6LTlhOhfrEmnDqeOpUBSKyXvYAR33ofXrc3PgIukFjcw0iOpctso8LCQvr6+qKyz549e3j++ednlL/++uvk5OTMW8bZaG6e6R2e6Ix6J3BcUzKsff7dJwCJqwWnOfJvM0PBJmP9IiXWdby16FdgOJNLr3Vza/37MT1XKFL5Pl6+/KA/ldHP+3/OwJEBXeWJFXrew8WL7wfKOZc5io1FnDpwiq7+6AZOS+U2qhGqjm63xPnznwMkjB9dA+7iUtYlzhw5E1f5Fko07+Ho6GjY2+qivESquMy2z+7du/nWt77l/39wcJBVq1bx5JNPxiTOS3NzM1VVVUnnZPZe5wlwwqrcEjb1LwWg8vfvpXTDHefGZK5fuMSrjo33GqG9jU0ndmFa9ufIdnvMzjWdVL+PbrebP35OZikgSzIf/9rHITbfKbqRCPfwyhUD//Zv0JmTgw3YaNjI+qeik/0yEeoXa+aq4+nT4PUayM2VWetRhjvLP19O2VNlcZZ0fsTiHmojJ+EQU+XFarUGLXe5XCHXRbqP2WzGbJ45RmgymWL2UMTy2LHibE8nAOXuNQBclaBssylo0KxkrF+kxLqOmTagfR/Z40fhe9+Dhx+O2blCkar3cXQUiruVrst3l4QpP/XqqKHnPdy0SVle7msHdmFsrcdo2hLVc6RqGw0kVB0vXFCW962TMJxWOuKMzRl3gnAlCdG8h5EcJ6YOu1arFYvFEtSPpbKyMmr7CObm1A1leGh5l6IAXskPHu1TsEDUfCVr7nKAlq/k1QPgcEC7yFcSDc6ehfvU2XLGTToLk8JoCRpLBpRUIpwW+aOiyWl1BtfHrUA/YATW6ihQkhHzebK7d++eMkuoqalpyjRop9Ppj+sS7j6CyJBlmdM3FeXlrhMVAAyJ1C+xoawMtmzhU//Vjowyz1Tu6VZmdW3ZMiXKsWB+nDol+f1diM4ohmA6XV0UdrTzZJGDbZoSPnYA3hBKeLTQlJdHNO+Gu4EsvaRJPmKuvOzatQuXy0VTUxNNTU0cO3ZsSrC5lpaWGcHn5tpHEBm3hnrpHR3AKBlY92E5AJJIuR4b9u+HDGVIQ1IjempLMjKU9YIFceaMUF5ijqqE/7THThFasJduqBRKeLTQlJf12gSuDbqJkpTExWE3MLT/9u3bp6yrqakJalWZbR9BZJy6qQzBrSm6i/LuTAAKP6mnRCnMzp2wbp1iaZlOayvYEjfRWrJw5rTEH2v/COUlNuzfD88+Cx4PBn9Y/QAl/OWXdRIsNZicvOPzUqpNlBPKS0SI8KppwNnbHQDck2Ol0AdeoOwz+sqUDsjq4yWLxyyq9JySKARkgyx8BGLFzp2Ksh2M1lZlvWDeXLgAHg/k5cEizb1TKC8RIXrVNODs7U4Aiq4qji6dRlgiApHGjuJiKCmhu9gOvIAPEcU4WgwOQv41NZN5GcJHIA54/a8J8bqIFv4ho3UgaakXhPISEaI1pgHnuxXnuqJTivJyfYme0qQBpaXQ2cn7f9eKm1qMtMI7nSJ0fRQ4c+bOSJFhY3wjF6cdxcX4iktox87/4AXADlIJLBVK+EI5o8ah++RdwBhgBtboKFASokuQOkH8GBgb5taQEuBvZftdAIwGD6UjiCZmM/c9AOeADUh4z5oxis5pwZw6dUd5ke8TyktMKS3FcLmTHfdmcqNL4r9INUjyJBhF7p2FolleHl6sFtyHMlVaEDbC8pLinFOtLivzi1nuVMKQmoTPaFwoK4OP1A6p921dRUkZTp8WyktcMZtZv0FiAnAVS4AZRLLRBaMpL/f51AIxZBQxQnlJcbQho3uKVnPXkFJW/CkdBUojDAboWab8Hgvh+yiIjFMnhfISbzaoL9aORWrB6ZCbCsIgcKbRyn61UCgvESOUlxTnnOqsu2ryLnKAcaC8Sk+J0gu3mjoq84K+cqQKfSfBAvgMMtyjtzTpgaa8fOhRC4TlZUEEzjTKFjON5o1QXlKcc92XASg4XwbARTMsytdRoDQj50FlueQWyhx1wbzp64Oi28rvoZIRxclREHM05eVtLTeuUF4WxMmTYKeNn7EN6aM2pVAoLxEjlJcUxu314Oy9CkBFcz+wjbHCNn2FSjNKPwGjQKYPuKS3NMnN6dPwadqAbUwUCSeieLF2rTIE2jqsFpzBH69OEDmnTsFvsw/74FHwvgJ5wCq9pUo+hPKSwnT2Xcft9bA4M5u1x38IHKXYLJKrxZMNm5S+HsB9XFdRkp6TJ+HTKEkCM8YO6C1O2pCVBWvWwAXAlwEMA5d1FioZUZO2jrzjYIeWL4oDUOYAh8gXFSliqnQKc/nDVtZd7ePe4jKKhg8BsLz7ADieAVmGoiJYLTI0xpKVK+EtE2xxQ9/bsGyH3hIlIV1d0NOD62cS96idfu61H8Hx42A0inYcBzZsgPPnoa8Iim6iOO2KSx4Zaj6o/w/wqVnRoRtO2GGL+q8sTFrhIpSXFOaJqqd5wv+f8rCYRrqn5t0RD0tMkSToWwF0wUS73tIkKWqn/58BWW3HGaP9SA89dGcb0Y5jyvr18P3vgzMbikDxe3lKZ6GSjf37kZ99Fknki4oKYtgohXnp97bjMWgavshwrBfedcoyS/i8zI/9+5FnZOpWEe04LmhOu8fdaoGYLh05O3dy8rsiX1S0EMpLiiLLMq/cW8DO36sMvoF4WOJG7seU5ZJeYEJXUZKTnTu5/n3R6euJpry81aMWCOVlXlxUQyb4RL6oBSOuXIrSM+Kif2wQg6R9o6q32iBuebwpewT6AaOMki9AEDEXL2q/1Ezd/nYtiAd33w0mE7SNqwVnAN9sewiC8cH1Ym5QQnehkrQVk0jaOl/EmyxF0SLr5qy8C7dUAtg5+/EXFH8X8bDElQ0b74TGmHToKkrS8sH1YsZQ2rHvc3+Hq6ICedky0Y7jhMkE996rzPb3mlCSCXboLFQS8vPOUsroxPlUK1ALT7RCZ6dI2joPhPKSopy9rSgvS+7dwKTcCbQy+bu1ipldPCxxpbgYLmYpv/tEeJJ58W5XKefpBFrx/c5zvP0Xf4Hn4kXRjuPI+vWKsaWnSC0QQ0cRIcvKdP9JzKwZUy2HmyUwi2iL80EoLymKZnlZOb6aRZgZR2LNUyjTX8TDElckCQbUd6z7A11FSVrOfAjrMAMS8gZZtGMd0PxeLqmKuFBeIuPWLejpUUbuC6+qhffrKlJSI5SXFOW8annJP6MEY7hkhpw8PSVKc9SOP8c5+2aCmYyNgXQBMgHfYkR8EZ1Yr2bEdGhO5yJNQEScPKks764Ao6b4bdJNnKRHKC8pyLh7ks7+6wAsPab09LeX6ymRIP8RZblkACVCqSBszpyB9eoMf2kjAfOkBfFEzDhaGJryss2K0geYEclFF4BQXlKQS71X8MkyBdm5FH9YAID7Pp2FSnPWPAw9al4e/p/ILxUJJ07ARvW3tHHWTQUxxGpVUgU4JtWCs4hkoxFwSrVUPaYlxl2PCBO7AITykoKcU4eM7lm6mpJbymdq7iN6SiRYvx5G1bw8npdFfqlICFReEMqLbhiNsG4dFNKGT9oGE20i2WgEaJYXv5uL8HdZEELvS0HOqc66FYtXs0qNiLnqszoKlM6oeXkKJYlsNS+P9AuRXyoSTpyAb2j/COVFV9avh63H92GQjwKvwKktYugjDLxeJSs6wKp+tVD4uywIobykINpMo8LLykvxugQrxYOiD2peHoAs1VnDMCHyS4WLLIPzAyjXCoTyog+qEv5EgcSvBGZEbn4GVgslfC6cTsXxPDsbFmtO+8LysiCE8pJi+GSff9io0FEGwBULrBBOjvqwfz88+yx4PAF5eUQytnC5dQuW9ym/5eUgFQLuWXcRxAJVCX+WaRmRX7DDC+q/QgkPiTZktOVekD5QC4XysiCEz0uKcX2gm5HJMUzGDIrfXwHAYPkcOwlix86dSmDAYIi8PHMyxVlXWA/1Y/9+RdmG4BmRRXLMWdGUlypt1ucK1PTcgvkilJcUQ4usu6ZoFYWdSmeTYZ9tD0G8kCWRjC1ShLNugjCbEv6uUMLnQptp9HC2WiAU8QUjetEU49ztTgDuLVrN6kGlrGibfvIIUPIDlJTg3WznQ14A7PhyRX6pcDh5UigviYaWEVnWXh+XdRQmSdAsL2u1aeZiyGjBCOUlxdBmGq0cLWOxDBOgpAUQ6EdpKXR2ktHeyjt5tUArtz7ZKfLyhMGJD4XykjCoSnjnEju1vIDbZAdKoEco4bMxNgYXLii/l91UC4XlZcEI5SXF0CwveScUz/+LZsgWaQH0x2wGSWK0HECCj0Renrlwu8F1BgoB2Qis01uiNEdVwn+wu5VGanlnRSvQCTeEEj4bZ8+CzwdFhWA6pxYKy8uCiflso/r6eiwWCwAul4tdu3bNun1LSwsNDQ1UVVVhtVppbm5m69atbN++PdaiJj2usSFuDvUCUPSemhZgpRLIUZAYmB4APoT8qyj+jmIWWEjOn4e12syie1DCqQv0xWxmvZom4NiYxBOYRZqAOTh5UnnIK9eA9D5Kkq57dRUpJYip5aW+vh6AmpoaampqsNls1NbWzrqPy+WipaWF2tpaamtrqaioEIpLmGhDRo/3yzz49ueANrxCw08oij8BHiBnAriutzSJTaC/i0gLkDhoOY7eFjmOwuLUKQk7bfzlxW1Am0gLECViegn37NlDR0eH///KykqqqqpoaGiYdb+Ojg6/tUYQPlp8l+0fXidv8ufAK1g+tUVfoQRTuO8BOA/cB8gnQVqpt0SJS+9P2/gmu4B62CjacaKwYgXk58OHA2rBBRTnOmEZC8qJExK/zT5W9qlRiTeJthwNYmZ5cTqduFyuoEpIS0tLrE6bvnR1MfTu26y92of954pru8wB7l7jgPZ2JUKmQHfWrQN11iRDv9BVlISn9Gf7WI7a4QsLYsIgSYr15TowmYOSnPHcHDulI11d5F28iNx+nB2BUYkLRZ8cDWJmeXE6nUHLLRYLLpdr1n0PHTpEYWEhfX19XLp0ib1794bcdmJigomJCf//g4PK/GC3243bHd1QnNrxon3caGAqK+M/Av8Rf+gooJv8z94J8uKenJy5YwCJXL9ooXcdjUa4sSQDeiUG3/OR7Y5+Wl6967ggurqgtxckiY9fudPhu707oVWGJUtwr1CCLyZl/cIk0e/hunUG3n3XyPUCmbJRCc+HHuR14UfYTfT6RQPT3XfzKeBTTItK/H/s8H+U/+bqkxOZWNzDSI4V95E3TSkJhc1mA8BqtQLQ2NhIdXU1hw8fDrr9nj17eP7552eUv/766+Tk5ERB4pk0NzfH5LgLoeT3fx/7d/6GDJ/sf0y0MPQ+o5Hj3/gGV48cCetYiVi/aKNnHW8WPQq9S/Acn+DIkddjdp5kvI9f+OIX/b+XqC1ZphvTlx7ylx/5wQ+A5KxfpCRuHcuB+zk+OUoZi7j0w0uczTsb8VESt34Lp/Sb32TzX/8NRp93RlTiSPvkRCaa93B0dDTsbSVZDi8hRVNTEwcPHpxzu927d2Oz2WhpaaGqqorphy8oKGDv3r3U1NSEJaDL5aKgoID+/v6gQ1DBLC+rVq2ip6eHvLzozhF2u900NzdTVVWFyWSK6rEXytnbnfzPv/iPHPzrmQ3J3doKDzww5zESuX7RIhHq+H//wMAf/J2RCaOMYdgDxugePxHqOF+kV1/F+LWvIXk8M9bJGRl4v/tdJqurk7Z+4ZLo9/DoUYlPfzqD55fI/LdeCd+v+vA2hW9FTPT6RQO3282f/fq/8Wc/mTnhJNw+OZGJxT0cHBykqKiIgYGBOd/fYVtetm/fHtGsH81yMh2XyxVyHShKUuB5NIXF6XT6rTKBmM1mzOaZnmImkylmD0Usjz1fLvZd9f+WMSDhQ5YMSLIPU0YGRCBvItYv2uhZx7seh7G/g2yvBFdNsCY250nK+/jMM7Bx49Ss2ypSaysZNhuyalpOyvpFSKLWcfNmZfl2r5op/YwBgylyF8pErV+0uH59EXCnT1bcTCPvkxOZaN7DSI4TM4ddq9WKxWIJ6vtSWVkZdB+Xy0V1dfWUfTT/mNkUHoEy06hvcRbDlnwk7MALuNfboUSEoU80NmyCM+pv7wldRUlo/OHnJRFLM9EoLoalS+84n3MJGNNRoATlxM1yblCCO0/pk1kh+uRoEdNeYffu3VNmFjU1NU0ZLnI6nf5YMKBYWXbt2jVFUWlsbGT79u1i6vQcnOvu5LYlh1f/uAlopctYS+aJVujsFGHoE4yKCvhIffJc7+grS0JSXEy3sYRJVQnnXtHhJyLr18MtYHwxiitH5C4vKc3QEDhu30sZnRhKW4Fa+L+iT44WMXXY3bVrF/X19TQ1NQFw7NixKTFetGi6gVF3d+/ePUWh6e3tDemsK1DwyT5/jBfLB/cAEleLYLUkKWHpBQmF0Qg9y4FrMHZMb2kSj8G8UjZ5O7lOJiBBaw2YJ0VbTjA2bIA334RrFqgYRglWl9xuHFFFi6y7ckUmGefVaRQ20SdHi5jPNgpUTKb7zGiRdwPRrC+C8Lk+0M3I5BgmYwYr31emkY6u1Vkowax41wHXIPOC3pIkHh9+COu0iGdrgDwJEQEt8Viv5h35yAAVEDCGJAAlOB3A58tluC7BEmCVvjKlEmIwOQU4q1pd1hStYsVlRR/NeVRPiQRzsUid+VvYDSRvqIeYcPw4bNb+2Rx6O4G+aGkCfjGoFog0AVP48ENFeflkrlrwACKXWRQRyksKoGWSvjt/NavVWeN3fUE/eQRzU/4ouIAMGRGddBpCeUkONMvLz11qgVBepvDhh8pyozbrf+ZkWcECEMpLCqAlZCy6VoYBuCFBqUifkdBs2HjHyu4+rqsoCYdQXpKDggIlz5FfZ+kAhnUUKIHweJSEjAArb6uFwh8oqgjlJQXQLC+W46sB6CpU8o8IEpcVK6Avow3YxuD/a9NbnIRhchIunYZ1WsFmHYURzMn69dALjGlDIx/pKU3icP48jI9L5Jg9ZF9QO2OhvEQVobwkOa6xIW4O9QJQ2qooL8N36ymRIBwkCVYt2gccJfMXr+gtTsJw+jTc61FmEshFwAq9JRLMxoYNYKcNeXIb0CaGjlS0IaNPrRhGGpNgMSD65agS99xGguiiDRmV5hdTeknJ5ZT5sJ4SCWalqwt6ekCSuHdMSbexqOcAOJ4BWYaiIli9Wmch9SNwyEjajHBwTHDWr4cy9pEzoWb/Pi3GqwE++EBZfjJX9cbfhDAVRBmhvCQ5WnyXe5aUUT6ilK38nI4CCWanrMz/M1t9Mxvk7qnh8MNLN5aSCH+XJEFVwh/MkCjmTvZv3n0G2oUSrikvNk35Fs66UUcoL0mO5u+yrG81mUAfUPZJPSUSzMr+/fDss+Dx+LN+a5lmyciAl1/WSbDE4Phx2KH9s1lHQQSzoyrhGwGf3zzWDb+wg2Z8SWMlXFNe7h7KVH4If5eoIwxZSY5meSk4VQaAMx+MQiVNXHbuhNbW4OtaW5X1aYrPByc+UCzsgFBeEpn9+xVlGzAEU8L379dHrgTg5k24fRsMkkzJTSUxo7C8RB+hvCQxE55JOvquAbBCddZ1lespkSAiDNrjJx5DgEuXoHgEcgHZDNyrt0SCkMymhH83vZVwzeqyrRwyR03ImTLcp6tIKYnoNZMYZ+81PD4v+VmLWXNqCQAZj+gslGBuiouVRIN2Oy0rXgDseI0i8eAUZ92NiEHtJMGnvkb8WcAv6ihMAqApL79Solii5A0ymPSTJ1URyksS89HtDgAqXRncO/wE0MZKEVk38SktVTLLtrZy5lO1QCvDmZ1pn2lWOOsmGaoS3ldup5YX6DfZgRK4kd5KuKa8PKS6u4i2HBuE8pLEfHRTUV6q3nBi4ChjvIJ1m85CCcLDbAZJougTABL5Y2Yl2lcaM/xmG/8eNV7IZr2lEcyJqoRfPtxKI7U8b2oFOuFyeivhE++28QbbWHdLCT4pP5C+jsuxRCgvyUpXF6O/fJe1V/vY1Ko8JBnSAYwnHNDerkxlFCQ8a7cqUdUB5DTOyivLcP+H+yhCjReyWW+JBGFhNrN2nYQkQeuomv07jQPVDQzAp67uYxtHsXQqTsvyZqG8xAIxqpyslJXxZ+pPWZ2qmCHihSQd69bBG0A5MPge5KfbNHc1XsjNmxJfHAuIF+IT8UKShZwcqKiAM5qvy3WgHyjQUah4o7bj820SO9S4N4axg8jSM8heD3SViHYcZYTykqRc/79/RfHvfZMMn+yPFyKJeCFJR3Y2XC0E+lTlRW+B4o0aL2Q5d5Rw6IbHhBKeTKxfDxcvwmAB5PWjWF8e1VuqOKK2461MjXsjyVswPa7+K9pxVBHDRklK6yfuZ+fvVYZYmd5TFZON8QplKaWjuT0gXkjQoH1pHC8kmdiwQVl2LVYL0q0tzxL3RhbtOCYI5SVJ+ehWR8B/6lRFg7idyYhZNTIUXMP/3k4bRNC+lGD9emV5wqMWpJv/1izt2PPuu6IdxwDxtktSztxy0rc4i5HsQsDOxdwXkOx2JX5ImscLSTZKPgkeYNEkir9AmqFZ0/1xQiTRLSUbmuXl5/1qQbpZXoChIWXpRQSfjAfi6iYhHp+X891d3Lbk8GZFO9BKx9ZaRfPv7Ez7eCHJxn0PwHn1t+8DPSXRh66xYm5SAtiBF+B+oYQnG/feCyYTHBtXC9JQeTl5q5gblHDOpLRjGRvjFgssXaq3aCmJUF6SEGfvVSY8bhZnZrPx/F2AxOJPAZKkxA8RJBVr1sAp9Unse1NXUXSh9Vopv0EnEq2wvBaOCyU82cjMVGbOfaQV3Aa6dRRIB97tKqWMTn5sawVqYVMrzS++KNpxjBDKSxJyRg1Od3d+OXdNKrfQWq2nRIKFYDTC7RLl92gI949Upq0NNmEGJMX4IpTwpOT++2EU6NemSKeZ9aW9HSYx8/EsZbaRvAV8JpEXIFYI5SUJOXPLCcDy20oWxksZsEwksUtqPGriNvNZfeXQg7Y2RWcBYIuekggWwv33K8tLWWpBGiovAPcOKEvfFp9+wqQBQnlJQrSZRiUnrABcWa6nNIJokPcJZVnUA4zPumlK4fMpnb5febHPtrUgkdGUF8eEWpBGyovLpcS5kYAC5dsS2Z5uUwfji1BekgzNWRfg3mOK5cUtOvykZ80nldRGRpkAx4HU5/x58A3BOq1AtOWkRVNe/DOO0mi6tMOhLB9bAYZBIAtYr6dEqY9QXpKMjt5rjHsmWZSZzcMXFEeJJZ/TWSjBgrl/E5xQf4+8p6socaVNzcFoAFiBEmpXkJSUlCgTaz7QDA4nSJu4RdqQ0RdXqgWbAeHuElOE8pJknL6p2CStpjIsPgMjwFrhrJv0WCzQkav8TqcZR21tAW4uwuqS1EiSYn05C/iMwABwRWeh4sSxY8rykUy1YKtuoqQNaZ/byOv14na7w9rW7XaTkZHB+Pg4Xq83xpIFp6v7GiU5hdztWsv46nHOLoL1mTAeBT+JRKhfrEnkOvoehfEzp8g7+ReMt337TuSvCEnkOk6nsxN+c7Xq5vNJwvL3Sab6zZdkreOjjyq+H1cLoFjLcRQkXM/0+plMJoxGY7zFjRpacN17VGddobzEHkmWUytb1ODgIPn5+QwMDJCXlxdyO1mWuXnzJi6XK+xjy7LM2NgY2dnZSJI09w4xoGfExaTXzeLxXPLGshjLhOwomdoToX6xJpHrONQLucN9wBDk5kJh4byOk8h1DESW4coVWC6rFvZiIDuc/ZKjfgshWes4PAy9vVBsgGwfYCFottFg9bNYLJSUlCRVfQFu3oTlyxVLwGQOSKPAR+CucHPkyBGeeuopTCk4Zdrtjn79wn1/QxpbXjTFpbi4mJycnLAeGJ/Px/DwMIsXL8agQx4hWZbx9V7DJ/tY0VfCokWZjCyBRVFSXvSuXzxIyDpOToLHw8giyLk9joRZCf6ybJmyPiNDiQIWJglZxyCMjsLYKNyNMkuDuwnLTyBZ6rcQkrWOo6PK8FGJNEqhfAsWLYPynBnbBdZPkiRGR0e5ffs2AMuXJ5fjkzZk9DkrSE4gF7gHSB6DWVISc+XF5XJx6NAhDh8+THNzc1j71NfXY7FY/Pvv2rUrqjJ5vV6/4rJkyZKw9/P5fExOTpKVlaVLhzLunoAMiVy3RKH7BhKlSMWLMGfNvW846F2/eJCQdTylTMvIQvFvlADZ60VyOu9ssyX8ACgJWccgDAzAIlRjiwml0w+DZKnfQkjWOmo6do58myxGYHwYsmZaEKfXLztbMbndvn2b4uLipBpC0oaMvlACOIEHUbxJhfISU2L6VDgcDg4dOoTL5aKvry+sferr6wGoqamhpqYGm81GbW1tVOXSfFxycmZ+ESQy455JAApGPEgM4aGXzOSqgiAY5eXK5yqqBSJgiSQp61OQkRFFeQECfgiSlokJDGMjFJhHWITa33v7YHhEudkTE7PurvXH4fogJgrvv68sH9IKHtZLkvQippYXm82GzWajqakp7H327NlDR0eH///KykqqqqpoaGiIunxJNbY6MYFnaJCsSS+LR8cAMNCHNKpajjIyREj1ZGXJEsjKgo+CBHhZuxYWpeab3Tc0QglXgVLISc06phUnTwJQQeAMaQ+cDWjXs1gQk6o/VvH57igvZTfVQqG8xIWEskc6nU5cLpd/yCiQlpaW+AuUSJw8SdHlG1TcHsTgU8JOS3iUF95HH/k7DkFyI09bpipuN+S6ezExBPQKy0sqEMSC6CdFLYgXLsCagTbekbaR7WxTCh+afR9BdEgoh11n4Bh/ABaLJeSsoImJCSYCzJGDg4OAYnoMZX50u92K86vPh88Xfv4JbWKWtm9cKStD6uxEYubQgixJsHo18jSZ/uiP/oi/+Iu/4Kc//SmVlZVznkLX+s2TrVu3YrfbeeGFF8LaPlp1fPLJJ7HZbPyv//W/5n0MCJD/O99BMpnwSCZMk0uBbuQMN3JGhvJ5FwHzrWNLSwtf//rXcTqdfPvb346obk8++SRVVVV8+9vfnn1D1Tl5fFiiUBtaoA+ftASG5bCck/Vsp5G2t/kSTh2bmppoaWnBYrH4ffe+/e1v+z/0wnnmo0pBAWRlYQhiQfStXQvZ2f62HKx+Pp8PWZZxu91J4/Py7rsSv80+HpWPAq8gV9jxWDzgvjP8lWzDYOESi/pFcqyEUl5CUVhYGNJnZs+ePTz//PMzyl9//fWQPi0ZGRmUlJQwPDzM5ORkxPIMDQ1FvM9CmTTIDBXnUXF7cMa64ZUr8WZkwODUdY2NjZSVlfHqq6/y4IMPhn0uPeo3X77xjW+Qn5/vV1rDZaF19Hq9TExMRHze6fjlHxuDVasYn8yg6GoeGRQxtGwQ79gYjI3N69iR1HFgYICnn36a1157jU2bNjEwMBBR3bxeL+Pj43PuY7l4EVB8c+9YmTwYzt954bnWrAnrnHq00/m2t/kSrI4DAwP8/u//Pg888IDfR1Djm9/8Jv/0T//Ehx9+GFMZ33zzTV5++WUef/xxysrKOHr0KDabjS99+tP+extofRkZGsIb5MUUWL/JyUnGxsZ4++238Xg8MZM9GmTfvk3m4CAf/Mvd/DEH1dID3Mr7NGf/5iMm8/IYK1YC3IQ7USVZiWb9RkdHw942bOWlqamJgwcPzrnd7t27sdlsYQsQDrM5++7evZtvfetb/v8HBwdZtWoVTz75ZMh54uPj41y5coXFixeTlRX+VB1ZlhkaGiI3Nzfu47P9Y4MwPE0elA5iUW6u8lUTgMPhYMmSJXz7299m9+7d/MM//MOc55heP+1L/MKFC9GryDwJJctXvvKViI4TrXtoNBoxm81zxiLQCFf+XBlGrsrkIZEl5WHMi3wAaT51fP3111myZAmf+ISSITJUvULVw2g0kpWVNef1kMvKoKsLSZZDWhDnPEYcnsNotbf5EqqOLpeLbdu28fd///dBLSuf/exn+dGPfsSqVatiKt/k5CRvvfUWr732GlarlW9/+9vKtXG7kW/dYtRtYpClLKcHmUkWFRRAQCyQYPUbHx8nOzubxx57LKJ+WQ9MqnXwccDnb8HdlBz/LCXHlf9GR0Zobm6mqqoqZeO8RLt+kSjcYSsv27dvZ/v27fMSKFysVmvQcpfLFXKd2WzGHMRR1WQyhbygXq8XSZIwGAwRTUPUzJvavvFkzD2B1yjhlYwY5SzGMovINvXA5CQGkwmmyfPiiy9SWVnJk08+yde//nV+9rOfzWlGnl4/rY6JMFUzWrJE6x5KkhTRMSKR350BeMA3LGEqifzlPJ86hitfqO3Cvh5FRcjZ2UGdkyXVOXmuGsfjOdS77Yeq444dO/zPdTAefPBBKisrYy63wWCgo6Njpn+i2QwbN3L1vMTwsEQJRUjISJJhiodlsPoZDAYkSZq1704Y9u9HfvZZJI8Hw3RPtYwMePllfx2Soj4LIJr1i+Q4+r+VArBarVgslqC+L3Efv00wxtwTuI0Ghsz3AGvxFC5VZqJs3BjUR+DQoUNUV1djtVqxWq1BZ2tVVVVRV1fn/9/hcFBQUABAdXU1VVVVOJ1O/4sp0O+orq6OiooKCgoKZkxlr66upr6+ntraWgoKCqioqKClpYWWlhYqKiqQJInq6qkJmZqamrDb7UiSREVFxZQZarPJMr0OoEy3185jt9tndfauq6ujoKAg6Laz1TGca6l9UUYq/5/8bR0VX6ygxBb+tQ2HUPWpq6ujurraL1+ous7VJnp7e2eVy79uw0Yav//9sJySA4/X2Ng4Zd3TTz9NQUEBdrudxsbGKRaK2e4HRLe9zedZmA+NjY20tLTMOfMy2qElIsZgIDtHQgY8GRJggPmNfCYuO3dy8rutwde1tsLOnfGVJw2Ji/ISatjH6XTOGLPdvXv3lIe7qamJmpqamMoHSqjykZHE+tMSN3h8Xia9ynhx7mQWMhLmJSge/EG+sFpaWnC5XH6Fb/v27RFNVwc4fPgwhw8fxmq1Issysiz7v7Kqq6txOBw0NzfT0dFBX18fVVVV/n1dLpf/hdjR0YHNZqO6upqGhgba29tpb2+nqalpysuor6+PF198EVmWaWho8J9jLlmmU1tby8GDBzl8+DD9/f3s3bs3pLN3S0sLTU1NdHR0IMsye/fupVANyT9XHaN1LadTXV3NyXMOmv+2mYv/Gt61DedlNVt99u7dO0W+UC/HuepRX18fUi5NOero6KDpwBHq/vZvOX62C6TVyjRpk2nKsML0fZqbm6mrq/O3iV/7tV/j+PHjvPHGG7zxxhsRh1KIVnubz7MwX+WioaFhTuu3xWKJ+rB9KA4dOuR/jqcrdNoo9pimL6aa8gKcVNPA+/yv0YSyBaQ8MXXYdTqdfl8Zh8NBXV0dW7du9T+A2ldEYATdXbt2UV9f73/ZHjt2LCYxXqYzOgqLF8+1lQElWUd8GB5WQnyMuZXZVCY5E6PPyKgEObPkgGloaJhiqdqxY4f/mi506M/hcNDU1ER/f7+/Qz98+DAFBQW0tLT4z2uz2fy/a2traWpqora21t+52mw2Ll265D9uoIJaWVmJ1WqlpaUloo7Y5XLR2NjIpUuX/MOMs1nstOCJWj20bcOtY7TRztvV2c+qbgsScPifD1OwbPZrO5dSFa/6hJJL6we08xcus/Dt363nYHMrNvuvw9oiRVMPUMSn72OxWNi7dy8HDx4kLy+PN998k2PHjvnbx+7du2dY82YjGu1tvs/CfJVgh8MRFatKuMew2+0hPxy166Q9Z42NjVRXV3P48GEAtLkSI17IAwjfDzNpeOdcMdsoIW/RKhaNfBWWvQTSFSgOkolSEHViqrxYrVZ27doVMry/FkV3OoHbx9rPJhkYcyupds1uxbfHPUcsuqampikKn81mw2KxhPXlNhdtbW3+4b1AtmzZQnNzs7+T3hIQjEqzZgSWWa3WGRaRxsZGmpubcTqdIafNz4Y2bTSUf9R0KisrKSwsRJIkKisrqa2tZfv27WHXMdpo512+wsJ4txI23zcS3rUN57ixrk8ouTSLRrka58PrVT4Dtq7bosR3kSR/fJBQ+wSew+FwkJ+fP0XRCPeeB7LQ9raQZyFSNPm2zJEmwul0znktovExOP0cTz/9NLW1tf44XZq/7ZAPlkPKKS+yDD88Xso/0knPiky4IMFf18AXJ0Ww0DiRFFOl40FOjmLpmA2fz8fg4CB5eXlxceTTvl40y0v2mPpQzJIDRrNY1dXVTTHlulwu/3BSKBN4OISbhTvYOWY7r91up7CwkLq6OiorK7Hb7fMTMAIsFguXLl3yv8Sqq6vZu3dvzM8bCu3amkwwrGbldQdxvo/0/kWSOX0hzCaXzWajvb0dtxs+/BDWAoth1uB02j7TOXTo0EJFjUp7W8izECmasjDbzEtN4YsH0624Wh2dTic2mw2jUQkaPTaubjCOkusnOcK3zMmVK3D9OuQazCzWAsJ/TBKKSxwRyouKJM0dhd3nU74aFy0K6moSE2RZ9isviyeUB8M8y8ebZl3RzLcaDocDu93OoUOHQpqCw8k/VVlZSV1d3QwlqK2tbd4mbafTicPh8Aeumi82mw2XyxXW12cgmgWwsbGRhoYGXnzxxQXXMdxcXoEEXltPpgXGQR5Z2LWdftxo3bNIsNlsOBwO9WWvDIf5IzCFGKoN3Gf6y99qtTIwMIDT6WSNGhNmrusduD5a7S3e17WyshKHwxHSUtbW1haWf+BCh41cLhfV1dVThmc1RS7wucvJgb5x8BrA6EPxe5lzaD45ePddZfnlNSCdB1YBd+kpUfohPIwSnEmvB6/Pi4RE1qSZccAcogPQrCvBOifNzyTQZGy1Wv1fa06nk927d0/Zx2q1+lM2tLS0+L+qKisreeKJJ/zrtFlN8x2S0kzpmgNvU1PTjK/IYLJMx2q1UlNT43f0dLlcNDU1zXAm1GhqaqK+vh6Xy4XL5aK5uRmr1TqvOk6/ltPPGY78geft6nHiGnKx8xsLu7bTj7vQexZOPYLto92X06ed5ADff6OJ+v31ECKY7vR7CXful81mY9OmTezYscMvS7DrHep+RKu9ReO6ar494dDQ0MCePXtmyKL5eoU7saGhoSGsv1DHs1gs7Nq1a4qi0tjYyPbt26cocZrleFyztqTQ0NF77ynLX9U+JD+umyhpi1BeEhzN3yXTa8aAxGTmDPcAP4cOHcJqtYb8MqutrQ34Alb+b2tr80/xfO655ygrK/Nvryk85eXlU4ZTtPF8u91OeXk5hYWFQc374aJ1htp0Uu34gR1hKFmmozkrV1VVUVBQQENDAzt27Ai6rdVqpbm5mfLycgoKCnC5XLz44ovzquP0a1lbWzulcw9Xfu28lV+0U/6FcoryCmlvnf+1nX7chd6zcOsxnYaGBmw2G089ZefhbQU0fL+ByscrgyTBmbmP3W7330utbb/22mv+adLV1dUz7vFs9yOa7W2h17WlpYXnnnsurG2tVisdHR3s3buXuro66uvraWxsDNviEk12795NfX29/6+3t3eGtdfvtKtlN0hB5cWm1elR3URJWyR5obbTBGNwcJD8/HwGBgZmjbDb0dFBeXl5RJEc4+3zAnBjsIe+0QHyR/Ip7S9ioADyK2JzLj3qF2+SpY6TkyCfADPguxsM+eHvm6h19Png+HEol6EQYCWqN2ekx5lZP21YNNm6M81aMz3EeqLew0jweOCDD5T5mWtAGSu8T1kXrH7z7ZfjzfAwWCwge8G9GAzDwAfApqnbud1ujhw5wlNPPZWSQepiUb9w3t8ayflUpBGjk4rlJXdceZgzCvSURhAvTKY7MTLcA/rKEi202EX+Uc8U8X9YCC0tLRFN8U4mtByb/hAvY0By5HudlfffV3wfnyxRFZc8YIPeUqUfwmE3gfH6vEx4Jsme9JI7dhU3q8i2zOFVLEgJJAk8ZhSn3TlmwSULw8OKi0smMNVrN31J9VAQOTngmgSfBAYZZdZRkt93bcjo6RXATeBjpMwsqmRCWF4SmDH3BDIyBSNuDAzjM/TGbZaTIAFQO3nDhL5iRIvh4QBjSw6iw08DNL+XiRRy2tVmGvl9dIW/iy6IV2GiMjHB5OAAWZNe8ka1CLt9d3IHTKTIG00QElMewAgm7zkYGtFbnAUhy9OUlygPGdlstqTzd0kH/E67WkGSKy8+H/ziF8rvsqtqoVBedEEMGyUqJ09SiOLYqHXJkuyZmo13jmibguQmOw+89GJkCPlWL1Ju8g4Zjo+D2TvCEq4CpbA4eesiCB9NeRnyQBEkvfJy5gwMDMC9WZB5G+UN+qDeUqUnwvKSoMjl5XeUlmlLJAmmhU0XpBATEzAygsk9goQaXG0wua1uQ0NQpCpi0CucddOEzEzF+dzHCHAORkcIK514gvLWW2CnjZ8YtgFtYCPpfXiSFWF5SVDG8xZzvTiPittB4sOvXTt3OGBB8nLyJDAtBIovSa1uExPg8TDpghJNEaMPJpfAJMqUFBFSPaXJyYHFA73AEPh6YWJRyOCEic5bb8Fvs4+y0aPAK/BokjyHKYhQXhIUbYq0IA0pL4fOTpDl4Fa3gECCCY+qiJUS+MGdpIqYIDJUxTXfBAWBimv/EsiVkdxuXcWLiK4u5O4e+lokdnBQLTwAy5+BdhmKimD1al1FTDeE8pKgjLnH8RolvJIRo5zFZHERmSM9SvSyFAx4JAhgyRIlq13gC14j2axu5eXIHZ1IyDOD6SabIiaIDFVxLWaa4nrtIwxAPuBbskQX0SKmrAwJaAF8/pbcDd8OSOgpHMbjivB5SVBG3eO4jQbGM+9ljLWYVi1VXlwbNyoDyYK0IOm7wyVL6C9ZG3zd2rWKoiZITcrL/blMpiuusiQxsmxZ/GWaL/v34zMo3/oG/1OpLjMyYP9+feRKY4TykoBMet24vR41GWMWE2ZJ6QMkKaJ01nV1dUiSREtLS+yE1Rm73R6XzMjTqaqqCpnwMRJCym8ygcmEtGgRY9JqYBGywRRTq1tLSwsVFRVIkhRx3aqqqqivrw+6bjS5Z3lPQa/2FoympiZqa2v9eY6069/S0pIYz/ySJYqCGgT53rW4c3PjLNAC2LmT/1LVGnxdayvs3BlfeQRi2CgR0fxdzJNmjLIB5vmMNzY2YrVaOXz4cMhkjcnO7t27pyTUSzZCyp+ZqVjZJInR0xLZ40WMm2WyM2PzvaHl2HnjjTew2Wz+5J0LRZZhYNTEckwYyYTsIjAk7/BnIrQ3l8vFc889x9atW6dkiQflg6WxsZGOjo64yHHo0CEOHz48IzcTQH19PZbsbLhxg/7hYep++7eRUa0wkyTVp7Msg5ZvU8aAhA8kA8gpkO8gSUmi5pM+jEwq2UAWT2ThA8xFkR/D4XBQWFhIXV0dhw4dmpcc2pd4IhBKlu3btyeFYjYv+Q0GkCSkxQASGROxe1xbWlooLCzEZrMBhHxBR9omxsdhzJvJMBuBtVCUHMOfidreXC6X3/qza9euGeurqqooLCyMuYLlcDg4dOgQLpeLvr6+Ges1K1BNbS01O3Zw//pNfPnP/wZZWgSYkCaT67v50iU43VPMbUqQsAMvwGY7lJRAcbHe4qUlQnlJQDTLy6KJbEYlyJqHf2ZDQwOVlZVUVlbicrkSw4wsmBcmi7r0AUk0QQNgUJ3pvxgDIClWxAiHPwV3qK6u9j/XwdiyZUtclCubzUZNTQ1WqzXo+j179lBTU+O3ID6y43f43r+8Qp9hLbARJhJXcQ3GW2/BNUr5+zWdQCtsqoX2VmVWYGmpztKlJ6IHSTAmPW4mvW4kJLInsnBn+33eIuLQoUNUV1djtVqxWq0zzMsw02/D4XBQUKCkra6urqaqqgqn04kkSUiSNGUooa6ujoqKCgoKCmb4AFRXV1NfX09tbS0FBQVUVFT4x+E1n4rpmXSbmpqw2+1IkkRFRQVNTU1TjhdKlmC+J/X19f7z2O32WRW3uro6CgoKgm47Wx3DuZaSeuMilX/6eXNy72Tm/fVfD35twyFUferq6qiurvbLF6quc7WJ3t7eGXINDcEilDRGtXtqKViprGtsbAxL5sDjTd/n6aefpqCgALvdTmNjo/96w+z3A6Lb3ubzLMyHxsZGWlpagj7Lgejtk+N0OnG5XHesPwYDOYuUa3/kvTcAQ0C+gOTgrbeU5WdyzYAEn0LpmEWMIt0QyouGjPJA6fw36hpHGpXIdmVh9BkwFkZelZaWFlwul/8LbPv27VM653A4fPgwhw8fxmq1Issysiz7O6Pq6mocDgfNzc10dHTQ19dHVVWVf1+Xy+V/IXZ0dGCz2aiurqahoYH29nba29tpamqa8jLq6+vjxRdfRJZlGhoa/OeYS5bp1NbWcvDgQQ4fPkx/fz979+4N6b/R0tJCU1MTHR0dyLLM3r17KSwsDKuO0bqW0wl23s98psqf2M7VN/PahvOymq0+e/funSJfqJfjXPWor6+fIdfwsGJsqf6japw3nXR0dNDc3ExdXZ3//s4ms9MZfJ9f+7Vf4/jx47zxxhu88cYbc77QpxOt9jafZ2G+ykVDQ8OcWagtFot/6E8vnE7njDKTCXJzLXQPu5SCMSCJ3EU05WVDt1rwuF6SCDSSa+AxlowyZ8hyAwYsWGIqhoVcLKqHrvttyJmHv4s2ZKSxY8cO6uvraWpqmrPzmwuHw0FTUxP9/f3+Dv3w4cMUFBTQ0tLiP6/NZvP/rq2t9c+M0DpXm83GpUuX/Metqanx/66srMRqtdLS0hJRR+xyuWhsbOTSpUt+c/ZsJnRtvF6rh7ZtuHWMNrOd92fHW/jS/ZXgnXlt51Kq4lWfYHJ5PNB71UnTG030n1POb7FY2Lt3LwcPHgx5f51O5xSZA/fJy8vjzTff5NixY/79d+/ePcOaNxvRaG/zfRbmqwQ7HI6oWFXCPYbdbp9ynRaKxVJI70AfXgMYfRLGyeRIK37pEly+DKVGWHQVxeP4Mb2lEgjlJYEZzYD8edyhpqamKV+iNpsNi8US1pfbXLS1tWG1Wmd8iW7ZsoXm5mZ/J70lIGqqZs0ILLNarTMsIo2NjTQ3N+N0OoN+vc1FS0sLFosl5Dj8dCorKyksLESSJCorK6mtrWX79u1h1zHazHbet9qb+dL9lUg+2GKfeW3ne9xo1ifYPZeAc+cUi0b5Q+Uht5+OZgUpL5+5j8PhID8/f4qiEe49D2Sh7W0hz0KkaPLNds207ea6FpFaqaLF4KDi2DtuhEU+ME4kh/KijfJ9bQ1wDtgMFOgnj0BBKC8aOcDw7Jv4fD4GBwfJy8vDEAOHw0mPm6FL51gyMomPInxL74r4GNrwUF1d3ZSxec1pd8pY9DwIdwptsHPMdl673e6fHVVZWYndbg+5bbSwWCxcunTJ/xKrrq5m7969MT9vKGa7thlZipVdAvJzLVE7bjQJdn9zUMambetstJ9unxmtbBZsNhvt2vzUAOY7ey6QaLS3hTwLkaIpJMFm9mjMNQwXL0IpTwMDLlautDLkVf2gxpNDedFmgX9es8w/rpckgkCE8qIhoTxRs+EDvOp20dRd1Bwg495R8pmAbBmJPrLylii+MBEkr9OsK4cPH55S7nA4sNvtHDp0KKQpeLaOUaOyspK6uroZSlBbW9u8TdpOpxOHw4G8wPDaWnyScL4+A6mpqaGmpobGxkYaGhp48cUXF1zHcK7ldGa7tjU1tYypL35fhGmvYnHP5sLrVZa5gO1eG46PHLgGwlecbTYbDocjqLJttVoZGBjA6XSyZs0aYO7rHbg+Wu0t3te1srISh8MR0lKmtJO5h3liPWykWaOCPYcPPVTJkA9KgIyJxH/9eL3ws58pv9dfUwsTPzJDWiAcdhOBkyfho4/Ic3Zh9Gkdqofsyx8p+W3UHCFzoVlXgnVOmp9JoMnYarX6v9acTie7d++eso/VavXPHGhpacHpdPrH75944gn/Om1W03yHpDRTuubA29TUNOMrMpgs07FardTU1PgdPV0uF01NTSGjxTY1NVFfX4/L5cLlctHc3IzVap1XHadfy+nnDEf+2c5bXb2dSTWmm29ilosZhGjes3DqATA6qizzJbCWWqn5yp37Aneu/WznCbyXgfvYbDY2bdrEjh07/LIEu96h7ke02ls0rqvm2xMODQ0N7NmzZ4Ysmq9XuIpGQ0NDWH/hHC+U0rh79+4ps6qampqoqakhJ+fORCPDpCHhnXbb26G/H2yLwXwTJRv2J/WWSgBCeUkMysuRp+UAmZJFeNq4fygOHTqE1WoN+WVWW1vr/5rV/m9ra/NP8XzuuecoC0iUpyk85eXlU4ZTtPF8u91OeXk5hYWFQc374WKxWNi1a5d/Oql2/MCv2VCyTEdzVq6qqqKgoICGhgZ27NgRdFur1UpzczPl5eUUFBTgcrl48cUX51XH6deytrZ2yldnuPLPel7VMih5Q+4+v+NGQLj1GFaHYBfL54ARGhoasNls2O12/32Zy9dmtn1ee+01/zTp6urqGfd4tvsRzfa20Ova0tLCc889F9a2VquVjo4O9u7d608J0NjYGLbFJZo4nU7q6+tpaGjA4XBQV1c3RQnbtWuX/+OhqamJY8eO0dDQwKJF4AE8RhkJCWlkHnEg4oimf9Vqj/KjzG2hF8QFSV6o7TTBGBwcJD8/n4GBAfLy8oJuMz4+TkdHB+Xl5WRlZYV97Fj6vEwODpB5/sLMFevWxS2LcKx9ehKBZK7jUC/kdigfqwYbIT89EqGOp07ByvHLFHAbDMXwwF0R+bvMRrD6acOiydadadaa6eH1E+EexoLubujqgnsyZPI8EvJyGWml0jDm2y/Hkk99CobebOOdRbvIHqmH/7UFwkz75Xa7OXLkCE899RSmJEyFMRexqF8472+N1Hkqkpwxt+LIMC1fqUDgJycf3Ixg4Bye2wka5WtigknXCIbxEfJQhxTkPiU748iI4t8l8NPS0hLRFO9kZ7Hq9DqgWQ8TtBmD0lzffReeZR/ZI0eBV+BJvaUSaCS+x1SaMORzk2OQgEzcUgk5WcmbvE4QG4wZ4JF6QR5C7umFkgS0X588SSZwHwEKuOxRfLc05pjum04sNHRBspGVBUYjDHpVM9ww3MnWmEB0dfHhv/awwS3xFemgKuMB8D4D7TIUFcHq1XpLmdYI5SUB8Pl8DPkmGVxuwXprFROWTHJWFympTFPIZCyYJ+psNACTas3IGO+DkSXK+ghmo8Wc8nLkjk4k5JnvI0mCAJ8qQfohSYr1xTMwgsxVJF8pjC1S5tQnEmVlPAI4AFlWW7LcDVsDptQn2RBlqhHzN6PmCR9uVEnNjKrl8ZjuCJaKjLrH8ckyGb4MMjwmspchktcJ7qDORuOjj5Bkj1ro8ZeFOxstHsiFSzhvWBt85dq1sGRJTM5rs9mSzt8lXcnNhUJ6kRgCemFIb4mCsH8/HvXbXpo+mJ+RAfv36yOXwE9MLS8Oh4O2traQadODoU1NbGpqwmq1UldXl/Km1eEJZV7p4vEcRowS+dk6CyRILMrLley1shx8NloCWTOGh8Gb4NNfBTqhWhDzMu5YEKEPBpYoqVm885hGFyMuf2InX2QdDoIEL2xtBZ3zRwlirLxo0w0jtZx0dHREJSplsjA0oeQMXjyegydXZ2EEiceSJYqzQKDfiMbatXGbjRYOLhd4MCFjQiITlhXBsPDfEuC3EOYQOCHBA4MfwaD6b4K05R//OPA/A+BTLOE+oZknCsLnRWcmPW4mvZNIskTOeBYTq/SWSCCYH7KsKC8ZZCKxEQwSrJRAEv5bAoJaEP1IEqxYoTSgBOBHP4LbFOPOKMHkWQVVXwXXS3DlChQX6y2egARVXg4dOkRhYSF9fX1cunRp1iBRExMTTARMvxwcVFR4t9uN2+0Ouo/b7UaWZXw+H74INGltTF3bNxoMqUNGOZNZjGFg0WKfbsp9LOqXaCRtHTMykEwmMJkYzywi29WLzCSyMWPG16BedRwfh4kJAyuQAQNynoyM785ndpRkSdp7GAEpWceCAsjKwhDEgigvX4svX0Lu78ftdmM06pf3aGQE3ngjAw+lSNkdMGTG85+9yI/8O8WCaDZDiHdLINr7J9R7KNmJRf0iOVbCKS9aplgtGmZjYyPV1dUzcvVo7Nmzh+eff35G+euvv05OTnAX9oyMDEpKShgeHmZycjJiGYeGoudhNjbcR1n/EG7jIkbNHryD+gc+iGb9EpWkrOOqVSBJeL0GMl1rMSIz0D8C2WNBN493Hfv7zUA2FoMMPokx0xiTg5E/X+GSlPcwQlKtjsbxcXK5MztaW7oH3AxnTDI2Nsbbb7+Nx+OZ9Tix5P33lzEx8TBfsIyR4cpmIneCn/T9BP5tfsebHoAw1Yhm/Ua1vCJhELby0tTUxMGDB+fcbvfu3VNS1UfK9EReTz/9NLW1tSGzIe/evZtvfetb/v8HBwdZtWoVTz755KwRdq9cucLixYsjiuQoyzJDQ0Pk5uYiSQsPTCDLMr6bV1k04cFtGMNYnsGiOaIKxpJo1y8RSZU6jl6GxT4DponFZC+bOstGrzreuCGRAWT7lHNmLcsiKzP6kVJT5R7ORsrWMTsb+dYt5AwTl8eKWEYvWUxi8uSweHEm2dnZPPbYY7pG2P3xj5Xhzd9bbQYXmL5g4qnPPxXxcdxuN83NzVRVVaVshN1o108bOQmHsJWX7du3x2XWT1NT05TzaAqLlhRwOmazGXOQGBcmkynkBfV6vUiShMFgiCj0tma+1fadN6rX/fDkGHmjypCX0TfA4sxRpDF0i9sRtfolMKlSR082SnTSYQmDYerLTY86ut2KuX0JICFBDhiyYnPuVLmHs5GydTSbYeNGZGDwtI/+iaVsRkaaMGDw+pAkada+O9bIMhw5ovx+uFu57oYvGjCY5n8P9KxPPIhm/SI5TkI9FVqej8CsqVoSwekWmaRGjdux+FKnP4u0hAcpynE76urqkCRpSnbXVMNutwfNoh1rqqqqQmarjoT5ym9QDXSZE0Qtl0RLSwsVFRVIkhRx3Sorq9i3r54izVXBEh2ZEg292lswmpqaqK2t9Sdp1LJ0t7S0JPYzrypjWVlePEhMZqivIf1HzPnwQ7h2DTZnwaLrKFmkRUqAhCQuPi+hYrxo6eB37doF3Mn2GqioNDY2sn379tSaOl1ejtzZiRTjuB2NjY1YrVYOHz48ZwbfZGX37t1J3TbmK392EfhugEkGzzBkLHCKvfbh8MYbb2Cz2fwfDeHic3tZSjeLvSPAopRVXhKhvblcLp577jm2bt1KQ0PDlHV1dXU0NjbS0dERV5mqqqoi9n3IzvYwMGBmECiChFBefvQjZfkHVuAM8ClAhK9ISGJqeZkrbXpLS8uMh2/37t3+r4j6+np6e3tDOusmLUuWMFpRFnxdlKKQOhwOCgsLqaur49ChQ/M6hvYlngiEkmX79u1JoZhFW36TGUbVp3eie6HSKfIVFhb6h2ZDvaCD1cPjAYPXjZkJJHpB8dtNahK1vblcLr/1R/voC6SqqorCwsK4KlhNTU3zsvRkZ3sAmT7NN3c4qmLNi7P723iDbXx2sE0p+Ly+8ghCE1PLi9VqZdeuXUEfMoCamhpqamqmlGnWl1RnZGKMRcQuJ1lDQwOVlZVUVlb6oxYnw0teED6eRcAQSHpNSNF8twbAhDbFsQ8WLYFREivnUopQXV3tf66DsWXLlrg+55FET5+O0SizaBEMj4AsAW50TdDodMKD5/axjaPIV18BtgjlJYFJKJ+XdMEnywx6JnAbJHxSNu4Vq5XIkmocj2hw6NAhqqursVqtWK3WGRYumOm34XA4KCgoAJROsqqqCqfTiSRJSJI0ZSihrq6OiooKCgoKZvgAVFdXU19fT21tLQUFBVRUVPjH4TWfiurq6in7NDU1YbfbkSSJioqKKRa62WQJ5ntSX1/vP4/dbp/1q7Curo6CgoKg285Wx3CupTZLJFL5w7225Q8VUPHFCn7+8xbkMCKrhzpuXV2d39dMkqSQdZ1Rj6wsXO+/j+X6R0jI9A4MUPvnf0rB5uVU3HsvLd/97pT9A9tDY2Pj3ALPsc/TTz9NQUEBdrudxsbGKbNyZrsfEN32Np9nYT5o+d6CPcuBxNMn59ChQzz99NPz3j8vD3zAuOYrNR4VsSKjqwva23n7rxzsQJlRK3EA7nFAd7uyXpBwCOVFRZZlRifHZ/0bc48z5p5gzD37dnP99Y64GJAn+WhJLrex4i7KZbSijNG1dzOKz7/dfBPNtbS04HK5/F9g27dvjzhFw+HDhzl8+DBWqxVZlpFl2W+Krq6uxuFw0NzcTEdHB319fVMSb7pcLv8LsaOjA5vNRnV1NQ0NDbS3t9Pe3k5TU9OUl1FfXx8vvvgisizT0NDgP8dcskyntraWgwcPcvjwYfr7+9m7d29I/w0th1ZHRweyLLN3714KCwvDqmO0ruV0Irm2l5wdbF57P1/f8zu4r8/uMDDbcffu3TtFvlAvxxn16OkhP2Bqf/2+fVQ/8QQdr72Gbe1aav/yL6ec3+l00tHRQXNzM3V1df77O5vMofb5tV/7NY4fP84bb7zBG2+8MecLfTrRam/zeRbmq1w0NDTMOePTYrEsKFRFJETDmqs1n34tDl/wkEWxpawMtmzh2e/YWYo2BtsN5+2wZUtC5Q4T3CHhgtTpxZh7go/9zbP6nPxo8OJffONlcuYRJ0MbMtLYsWMH9fX1M6ahzweHw0FTUxP9/f3+Dv3w4cMUFBRM6cxsNpv/d21trX9mhNa52mw2Ll265D9u4PBhZWUlVquVlpaWiDpiLYP5pUuX/E7fs3Wumslbq4e2bbh1jDbzubZf/fVqPvsffoDU1wurgueFiVl9liyhfyyLwptKxFTb2rVUPvQQALV/8AdUfV6xuWuO+dr5LRYLe/fu5eDBgyHv72z75OXl8eabb3Ls2DH//rt3755hzZuNaLS3+T4L81WCHQ5HVKwq4R7DbrfPGNYPxOVyYbVaI3buDiQnR8ZolHB5oRAUy4sbiGeYl/37kZ99FsnjwRAsg/TLL8dRGEG4COUlBWlqapryJWqz2bBYLGF9uc1FW1sbVqt1xpfoli1baG5u9nfSW7Zs8a/TrBmBZcE6vcbGRpqbm3E6nVOmy4dLS0sLFosl7Gn1lZWVFBYWIkkSlZWV1NbWsn379rDrGG3CvrYPPKAEVQGK85ShkAx3H4yojt7TQqvHsj6Dg+pLB9iybp2/vFAdfgT8Fo3y8vIZ5w/FbPs4HA7y8/OnKBrzCaWw0Pa2kGchUjT5Zrtm2nZzXYtIrVTBaGxsnFWxCRdJgtxcJaWRVwu5exJ4ZMGHDp+dO/mn1nU8+x2RQTqZEMqLSrbJzC++8fKs28iyj8HBIfLycpGk+Y24jbh6ka5dozc3l8XjZRTcE9pDLdsUubOjNjxUV1c3ZWxec9oNFak4XML9ygp2jtnOa7fb/bOjKisrsduDdCRRxmKxcOnSJf9LrLq6etY8WrEm7GvrdvszTEuqs4uEx19mAFizJuLjRsrkJAyOmvBgAoxY8lZCziJwT4J3qhOOzWajvb09ouOH2me+s+cCiUZ7W8izECmaQjKbc+xcw3DRwuFwzKlERUJeHky6RpC5DEzAe8RXeQGam+FZQMaAhA8kA8gpklMqRRHKi4okSXMO0fh8PtymSbJNWfOOeunpHyLPayB71IjBmk125rwOExLNujJ9ernD4cBut3Po0KGQX0zhzBqorKykrq5uhhLU1tY2b5O20+nE4XDM28dHQ4tPEs7XZyDarLfGxkYaGhp48cUXF1zH+czACPva5ucrn6whsvPKq1fP77gR0t8Pk2QykrERWATZObBurRKm9IMP/NvZbDYcDkdEivNs+1itVgYGBnA6naxRlbS5rnfg+mi1t1hd19nO53A4QlrK2trawrKGLHTYqK+vD4fD4Xc81oZ/6+vrsVqtEVt38/JAppcMeQSYgHeBP4zoEAvC6YS3zhZzmxKWsgr4Kmx8CW6LDNKJjFBe4oE6pdQj+8hRzf3Zk6MYpBElMFOUppRq1pVgwaI0P5OGhgZ/h2S1Wv1fa06nk927d0/Zx2q14nQ6cblcfhO5Nn7/xBNPcPjwYQoLC3nuuefm1WlpaKZ0zRTd1NSEw+Fgx44ds8oyXUGxWq3U1NT4E3kWFhbS0tLCsWPHglpUmpqacDqd/uvR3Nw87zpOv5bTZ6SEI3/Y583OVuIBBcnOy9q1yNnZynhOpMcNA60e/f0ufvjDNoqLrNhWqfXIQlGqpuXiCbwvDQ0NWK1W/7UPFRZhtn3+8A//kE2bNrFjxw5/fYJd71D3I1rtLRrXVVOkwtm+oaEBu93O9u3bp8jicrlm/SgJdpyFMH2qtsPhoLGxMeIQF5LbDaOjZEkSRvpQbHUj8MFpeANYUwTTFPFYcPAgXKOU76/p5N9fzISPS/BOzZ0M0oKERMw2igdqOoCMs+empAMgyukADh06hNVqDfllVltb6/+a1f5va2vzT/F87rnnKAvwrNcUnvLy8ikvf2083263U15eTmFhYcRDAoFosX206aTa8QO/ZkPJMh3NWbmqqoqCggIaGhqmvJQCsVqtNDc3U15eTkFBAS6XixdffHFedZx+LWtra6e8YMKVf6HX1hdiynS07plWD6u1nBdf3ItFAoOM0pPMMsu/oaEBm82G3W7335e5fG1m2+e1117zT5Ourq6ecY9nux/RbG8Lva4tLS0899xzYW1rtVrp6Ohg7969/pQAjY2NYVtcYkFTUxN79uwBlKHqSKaB53d1YTh7Vukb0SLV+UD+daiMzywfWYZ//Efl95d8ZkCCHSgKuFBcEhpJXqjtNMEYHBwkPz+fgYGBWbNKd3R0UF5eHlH2Up/Px+DgIHl5eZENG/X2+tMBzEBLBxCFqLoLZd71SyJSpo6Tk4rim5mJvKQI+XIPBiYZKVpH9l0ZMa9jZyf09MB9JshxAyvUvzgQ7B5qw6LJ1p1paRmmW0tTpp2GwOfzMXbtGjm3b/v7xXGgo6eH8n//78m6fA1eeRl27oypHD//OXziE7AxG06MoQTJuwYsX/ix3W43R44c4amnnkrJxIyxqF84728NMWwUD5YsYcwokXMxyIyGtWuVAHUCwf/f3vnHtnGed/xzlGRZliWfKNmRPSuLqLVxijpoSGfF0CzYEqpbMrQbFsnGmqZNmllCsRUYik2Mtj+CFBgcEluLAcEQKWnmOcEaW9ywFViGRfTSZfvHs8QFbRcnrUXbteMkikXRpPWTFG9/HO9MSqRESqSoOz0fQKB4vB/vQ9499733fZ7nLYUdO+DwYcgUUbt1o42mWY103IFe9qtyLC3B/NQMd3ONhuRBoPF2ypFQEqFQqKQUbzuRbGpCczr1CWmXU38OHqt8ls8rr+ivz/8KepbTb1EW4SJUHhEvm8T0zVvsonLTAQjbkKwn8tp9ClxW2LVYeOioXExNQYs2RRMJYAp2N25uXQ4bsdHSBXbD7DebB0apaHn+RAJ+/oMx/oMBHrgSAI5AcSN4whbAfv2RW5D55CIztfp0AClHgx6EVubpAITtzc5WWARqgNkPKySPFxZgZoZbkzM4MbJ3orB7Rq87s7BQmeMK9qW2VveBjY3MthxgkR3MK3uBfbDxjPhVGRmB3vlT/CZvURd/Fe5A5jKyENLzsglM3rhJssbBh+oB9u9vhwYF2tr0aDEbjmcLm4+iwGIj7JgBZRpYfbh4fWQCy11kPSGTgo8uwEeZt2Ws/1EKbrfbcvEuArpwyQx/1s8u8PHF/fyldpYRmuGf0acLKPcM5VeuwI0b/NffKDyfmcsIXodHvg4/1nTfvAlZTsLGEPFSYZLxOK03rpPa00DDkkpdQ+apOE9KqSBshLr9wEVoTML0YgVEcWcn2qXLKOSvLyNzwAjrIvMAV1OjVwEYYwfTTdCSAP4N+P0yHy9znv4dkDbP5E/gpAdOZt6KEN7yyGN/hZn/4EMaF5K0JpZQD0pggFA56vfAggMczNL0wXWU2dmy7n+xqZX3OJT/w0OHtkTGnGBtGhv1Xr0zhnb4QQUO8tprLCn6c3veuYxee60CBxXKjYiXSpCJDViMxWmYuwVA0/wCdelZiQ0QKocCqWaAKXYs3YKp0qv8rsbkZPZwkSCUn4YGfa6jF29lFvwQmCrvMWK/8zgP1p/L/+G5cxVPzxbKg4iXSpApSrfj4s9uF6XTyl+UThBMMoK5oW0GLRNMq01FdbFcBsGcSuniZYk6NOqARtgrgedCeVEUeOQReAe43IIehf4P5T3GK6/A3LzxLnMLlNhDyyG/WCXo7DSfUJVlrygKLJspVxA2TEYwOy5egEy1UiVdPsH88ceQTsPeuh0oHIb6Q3DnXn246PBhve6MIJQBI3v8n+JjwEPwwljZ9r24CN/7Hkyyj7SjHfDA774IHg+0t8tcRhZCxEs5mZmB998nVbeDX7S15F9HYgOEStDZaQaAl1swJ5OQ+HiGu3mffekZwAHtin4ARZGnVqGsHD4M994LjUungLfgZ69CgVGeUnn1Vbh2Db7gPIgjfRkazsEr/fpw0eXLcPBgeQ4kVBzxOuVkagoSCWauXifl0CuFSYyAsCm0turCOB8bFMwffggtab0onWNpCnYAor+FSpBKobz7f/z5I2F+LzuN+dkwjI/rac7r3zXPP6///91mgHo4rujVoWUuI8sh4qUElNlZGj/4IDeLIxNrwMwMRPVYg8aFW9Sm06QdNSgNm1eULhAIoCgKLS0ttLS0oCgKXV1d+Hw+czLGYgiFQnR1daEoyorZequJx+Ohv7+/6PW7u7tLbn84HEaxeAr7csG87nCXhQXmo3mK0jlnYE4Cz4UK8MEH8NhjHPN72MsnmYWfwL979BpCG0jHP3UK9lwc48c1D/FLl8egHtg67k0oEanzUgrRKHVzc2jRKOzerQuWrHk5jNL/NWmNX76RCZefm4O9ezetKJ2qqkxPT5vvI5EIPp8Pj8fD+Ph4zsy5+bh58ybHjh3j7NmzuN3ukkRPpRkcHFyz/dsaQxzX1TG7q5GGGzMoJElcr6N+PXMP/eQn7AQ+w9YrSifYlLY2PV2ZPGnMjlo4dXJdu52bg2efhT/jFIeX3gJehf4jmzaZqFB+pOdlLbJ6VtKfZHL2opksjo90D54TnPvuuyjf/Ca8+25urEGVYgNcLhcjIyNEo1HOnFm73vaPfvQjnE4nbrc+KVo1xILR87Ocnp4evF7vprdnNQq1tSpkJmvUDh1iUd1D8o57gMM0z+8gHit9d/G2TrOIV96idBJ4LpSbxkY4fTr/Z3Xn4OF1pDFfucLrA+PsvRbmKSVrKOrRjQ9FCdVDel7WIitLw5QeqVROj0taUagxKjK+8QaMjemvjz0mM0YLm4vDoacFAbUHNFKfONiRnkGbuMbSpw9S01Tc+Tg/DxejrexkJ58hz6y/Mhu6UGky5/ISDmpIwwLgB75X4n7uuoungKcATcuqqPvbntvrSEVdyyE9L2uxShaHcbo7rl/Xxcx778Gbb+oL33wT3nmn6so+EonQ29uL0+mkr6/PXN7f309LSwtdXV0MDw8D8Mwzz/Dkk08SiURQFCUnviTf+gC9vb0MDw8zPDxMV1cXoVCoqG0CgUDO58Z2vb29dHd3m21QFMUculoewxIMBvF4PGZsTzAYLPn7icVidHd3oygKHo8np/1rHWO1tpajbRtGAeUgwBT1WoLZy8VV+1pagosXM6nRkgEtbDatrXrassfDTf+LvOPwEKUd2Ad/C7xf/K40Df76vtdIZp7TFamoaxtEvKzFKlkcppj58pfha1+DJ54AI0ZkehoeeGDDQWalEovFzBupceN0uVyMj4+b6/T29hKJRLh06RKjo6P4fD7C4TDPP/88J0+exOVyoWkaQ0NDq65vHG9oaAi/34/f7zeHddbaxufz0dvby6VLl3C73aZQGhkZYWRkxGyDpmkFh66i0SgvvfSS2dbe3l7zGMXS29tLNBplYmKCs2fPcv78+aKPsVpby9G2jaAkkzA7S03j7aJ1jQtRpq6uXrTOEC6O+RnuUd6nLZkE6qCuUWZDFzaH9nY9bfncOfYM9PPDvzjHfi7zP3UfweJD8MRY0WmcL78Mf/q/j/NNh1TUtRsybFQCRkCu8Wryne/Ac8/pnn9592NtLZw8uVlNzAnYDYfDeDyenEDXSCRCMBhkenoaVVVRVRW/38/p06f53Oc+t2J/q61vxMUYIqWYYxjbuN1uU+j09/fT3d1dsq3ZPUlerxeXy0UoFDKPsRaRSIRQKMTExAQulwvQg4Kze0nWe4yNtm2j7Mnq7TPOVQcpWj++AB9nFiwLtl1chEgEbt2Cu5QpGrUE0AC7D8OnFXDIbOjCJpGVtvzMoMIPXq/n3Z+f4ld5C86/Ci8fgeOr7+KnP4W//9YYbzOAp/4b+gzVOIB0zvCqYE1EvBRDdhZHYyMNiYQeFGDwyCP68NITT6zc9tw52KQb1nIMgeDz+cxeFOPpv3NZsOWRAlkjxazv9XpzekeK2Sb7f6dzPakwOsPDw4yOjhKJRIhEIiVtGw6HUVXVFC7lPsZG2rZRZu64g12Tkyh5xvI1YNGxk9TkDDtbG0ml9I7C6EcLkErRrECrlpUavb9Vd/y1tfpNxeKp5IKFuHKFhhs3OO1TOPCHWcG2f/R12KXBA216j+Ayrl+HRx8F38Ipfp23YO4uqGmHezug/2n4/vfh6lWpqGthRLwUg5HFASzG4+xsb0d57z19eVsb3LhhpveZin6LKHu/34/H48Hn85k3abfbnTOMZJAu0N5C6xvku/mvtU05spg8Hg9OpxOfz4fX68Xj8ay90SYdYzPathrJpiY0pxPlQp5gW1qoT08T+8UUF37RyC5mOMg12knoH+fonRT8PGsfkhotbCaZIff7AC3Th6jxCUrSA1/NrLNMoF/97yv8yVdvsP+qwnGz0N2/wql/gU/X6qUr+vr0rkYpTGdZRLwUS7YYqavTa1griv7X1qbHArS3Q0cHPL11lH1278vIyAhut5twOEwsFitKQJS6/nq3KZVIJEI4HEbbQJaAy+UiFosRiUTyCrD1HqMcbaskCnEA9hJlgVaa+YhmEizVNuNIJbKCGrM3UjY1dksQAD2Y9skn9cq7mfPy9vlZg1ZzN8oLY/DHR9A0eP11+IOv3MU/mjvIyi56/Ndu71fTRLhYnIoPXAcCATOzpNjqqIFAwMxgCQQCFW7hOnE4bnefKwrceacZZEb/1porw+/3EwwGCYfDuFwu+vr6zIBa0DNjCn3Ppa6/3m2Wbx+JRIjFYoRCobxDLsZQk5HFZNhXCm63G7fbTW9vrylijh+/PZBezDHytbUcbSsLtbW60F6R0qxPXeEgxZ1cQEWPkapJzaJwV/59yZxcQjV4/HHdl+Zhmh6UpXfhW6/y0wPwijqG+ysP8WO+g2Y+l0t2kV2pqHjx+XwMDAwwMDBgxlysFZhp3OD6+vro6+vLyUTZ8mTHA2yhuTKye18AhoaGcLvdeDweWlpaGBoaWrX4W6nrr3eb7Pa63W46Ozvx+/1511FVlYGBATPdenR0dEXsTTGcPXsWp9NJS0uLKbCNXphijpGvreVq24YxeggPHcpJ+S9MCri0GS0ThNLJBIlrRukKRS9LofE6n/0wzNPxv+Ju3uJebqAUmslRsotsg6JVqG87FovR29vLyMiI6bSN7Jfs7I7ltLS05GSuACiKUnQXfDweZ8+ePdy8eZPm5ua868zPz3Pp0iU6OzvZuXNn0Tal02ni8TjNzc04bJhtYXf7YJvbuGw6i1Wpr9eHQW/c0GMD7rlHj/HaAmzr39Am5LOvoF++dg3uv18fks8qZaChoKCtzP7ctQ9834Vnv6oLdiM7Lp3W625tUgJFMpnkjTfe4NFHH6XOhqUFKmFfMfdvg4peFWNjYzld/oZgKTRfjtH9nu8JdXnxMEEQKsShQ/DZz+qBjYcO6b03W0S4CNuQgwdvD8m/9pqZHHE7BmYZc5/owgV04fLii+Dx6GJcsotsQ8UCdpdPEAi3BUihXpdC6aSqqhYUPAsLCyxkFdyKx/VgxGQySTKZzLtNMplE0zTS6XTBDJt8GL0/xrZ2w+72wTa3sbYWxUj7b2uDyUmU+fkV9YvSxtNqdm/nFvqutvVvaBPy2ZdOp9E0jWQySU1NTe4GDoc+LcvRo/CpT1H3+c+vtnP9paaGpZdfRnv8cXjqqdvZRQXuC+XGuP8Uug9ZnUrYV8q+NjXb6MSJEwwNDZU89u90OolGo3k/O3HiBM8999yK5W+++Sa7du3Ku01tbS3t7e3cunWLxcXFktoCkEgkSt7GStjdPtjGNnZ0mFlyyh130HTtGunaWhabm9kRj+NIpUjMzqJZwOFu29/QRmTbt7i4yNzcHG+//TapVKrgNnsmJvgN9NgXRcszbJThPwMBbra06PPMVZHR0dGqHr/SlNO+2dnZotctWrwEg0FOF5rtM4vBwcG8VUR9Ph/Hjh3LqTxaLIWEi3G8b3/72+b7eDxOR0cHX/ziF1eNebl69Sq7d+8uKeZF0zQSiQRNTU0oNizUZXf7QGxcgdOJQ1EwrwJNo2mLfy/yG1qffPbNz8/T0NDAgw8+uLpfvnYNLRBAO3iQ9De+gfLCCygXLtwWMw4HSjrNFx54AO67b5MsWkkymWR0dJTu7m7bxryU2z5j5KQYihYvPT099PT0rKtBwWCQrq6uNYVLoeGkWCxW8LP6+nrq82T11NXVFfxCl5aWUBQFh8NRUjCc0b1pbGs37G4fiI12wO72gf1tzGefw+FAUZRVfTegZ85duYKyY4fei/ilL8H996PceSc8/TRKpsZW3YEDW2IOrjXtsTjltK+U/VR82MiIczGESywWIxqN5hUjLpcLVVXzFg0rNs1WEARBsDnZD6wdHXDlih5UrihSPXebUFFJHw6HCYfDuN1uc36X4eFhs4hXJBJZUbhscHAwJ7MoGAyua6ipGLZqBVRBEITtxob88RatsSVUjoqJl1gsxsMPP4zP56Orq8v88/l8ZsBuKBQyi9cZDAwMEIvFCAaDBINBzp8/v2KdjWJ0TZUSHCQIgiBUDsMf23mIRSgfm5oqvRyjiu5yBgYGzP/XG2ezGjU1NaiqyuTkJAC7du0qKigunU6zuLjI/Py8bceh7WwfiI12wO72gf1tzLZPURRmZ2eZnJxEVdWVadKCkIdtOzFje3s7gClgikHTNObm5mhoaLBtBoCd7QOx0Q7Y3T6wv4357FNV1fTLgrAW21a8KIrC/v372bdvX9GFcZLJJG+//TYPPvigLbs27W4fiI12wO72gf1tXG5fXV2d9LgIJbFtxYtBTU1N0RdNTU0NqVSKnTt32tKh2N0+EBvtgN3tA/vbaHf7hMpjv8FUQRAEQRBsjYgXQRAEQRAshYgXQRAEQRAshYgXQRAEQRAshYgXQRAEQRAshe2yjYwS06XMTlksyWSS2dlZ4vG4LSPk7W4fiI12wO72gf1ttLt9YH8bK2Gfcd8uZqoI24mXRCIBQEdHR5VbIgiCIAhCqSQSCfbs2bPqOopms9kJ0+k0169fp6mpqeyVKePxOB0dHVy9epXm5uay7nsrYHf7QGy0A3a3D+xvo93tA/vbWAn7NE0jkUhw4MCBNafFsF3Pi8Ph4ODBgxU9RnNzsy1PRgO72wdiox2wu31gfxvtbh/Y38Zy27dWj4uBBOwKgiAIgmApRLwIgiAIgmApRLyUQH19Pc8++yz19fXVbkpFsLt9IDbaAbvbB/a30e72gf1trLZ9tgvYFQRBEATB3kjPiyAIgiAIlkLEiyAIgiAIlkLEiyAIgiAIlsJ2dV4qRSAQQFVVAGKxGAMDA9VtUJkJBAIATExMADA0NFTN5lSc7u5uRkdHq92MiuDz+ejq6gLA6XTS09NT5RaVj+HhYWKxGKqqMjExweDgoHldWpFYLMaZM2cYGRnJez5a3e8UYx9Y2++sZWM2VvQ7xdhXDZ8j4qUIjAusr68PgFAoRH9/vyUvtHz4fD78fr/5vr+/35IXWbEEg0FCoVC1m1F2YrEYDz/8MGfPnkVVVcLhMB6Pp6h5QqxAIBCgr68v52Z+/PhxRkZGqtuwdRIOhxkbGyMWixGNRld8bnW/s5Z9dvA7a9mYjRX9zlr2VdXnaMKaqKqqTU9P5yyzy1c3PT2teb3eHPvGx8c1QJuYmKhewyrE9PS0NjQ0ZJvfL5u+vj7N7/fnLBsdHa1Sa8qP1+stapnVGBkZ0dxu94rldvE7+eyzm98p9BsaWN3vFLKvmj5HYl7WIBKJmN3Uy7Gaii7E2NgYkUjEfO9yuQBdVduNM2fOcPTo0Wo3oyIMDw/T09NDJBIxz02v11vlVpUPVVXp7u42z8tIJGKeq3ZD/I69sKvfqabPEfGyBtkXVzaqqtriIlNVlenpadxut7nMOAntdmMIhUK2uplnY5yn4XCYWCyGy+Wiv7/fNjc6gJdeeolIJEJLSws+n49QKGSZIZRSEb9jH+zqd6rtcyTmZZ04nc41xzityokTJxgaGrJ0IGQ+jAvMDs5/OYYjUVXVvCH4/X46OzuZnp6uZtPKhqqq+Hw+RkdHCQQCeL1ejh49arvzdDXE71gPu/qdavsc6XlZJ3Z1ID6fj2PHjplBgnbB6N60O0eOHDH/N57S7dL74vP5cLlcjIyMMDExQTQaxePxVLtZm4r4HWuxHfxOtXyOiJc1KNSFaahpOxEMBunq6rJcOuZahMPhnAvMjhQ6F1VVLTgEYSWMGBCj+93lcjE+Po6qqgSDwSq3rvyI37E+dvc71fY5Mmy0Bi6Xy/wxlv9YdhrHNJSy8eRjpMbZwVFGo1HC4bBpo1FTIhAI4HK5bPFk5HK5cLlcRCKRnDiCWCxmCwcaiUTyDif09/dvfmM2AfE74ne2OtX2OSJeimBwcJBQKGReYMFg0Fbdm+FwmHA4bEaNg71s9Hq9OQ4/HA4zPDxsuyc9v9/P6dOnTUcSDAbxer05jsWqeL1e/H7/igyc8fFxywftFhoKsovfKWSfnfxOPhvt5HcK/YbV9Dkyq3SRGGoZ4Pz58znFlaxMLBajs7MzbzCZHU+NYDDI6dOnCQaDDAwM0N3dbasnWaMCLcDU1JRtzlPQz9UTJ07Q2tpqjq1nF62zGpFIxDwfw+EwAwMD3H///TlP5Fb2O6vZZxe/U8xvCNb1O8XYVy2fI+JFEARBEARLIQG7giAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYChEvgiAIgiBYiv8HeAuC5oWvVxYAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -39759,17 +5727,17 @@ } ], "source": [ - "mae_compl_3_5 = np.mean(np.abs((pred_u_compl_3_5 - x_test)/x_test))\n", + "mae_compl_4 = np.mean(np.abs((pred_u_compl_4 - x_test)/x_test))\n", "mae_compl_5 = np.mean(np.abs((pred_u_compl_5 - x_test)/x_test))\n", - "mae_compl_7 = np.mean(np.abs((pred_u_compl_7 - x_test)/x_test))\n", + "mae_compl_10 = np.mean(np.abs((pred_u_compl_10 - x_test)/x_test))\n", "\n", - "print(f'MAPE on the test dataset for eq with $C = 3.5$ is {mae_compl_3_5}')\n", + "print(f'MAPE on the test dataset for eq with $C = 10$ is {mae_compl_10}')\n", "print(f'MAPE on the test dataset for eq with $C = 5$ is {mae_compl_5}')\n", - "print(f'MAPE on the test dataset for eq with $C = 7$ is {mae_compl_7}')\n", + "print(f'MAPE on the test dataset for eq with $C = 4$ is {mae_compl_4}')\n", "\n", "plt.plot(t_test, pred_u_compl_5, color = 'b', label = 'Automatic solution of the equation, $C = 5$')\n", - "plt.plot(t_test, pred_u_compl_3_5, color = 'magenta', label = 'Automatic solution of the equation, $C = 3.5$')\n", - "plt.plot(t_test, pred_u_compl_7, color = 'seagreen', label = 'Automatic solution of the equation, $C = 7$')\n", + "plt.plot(t_test, pred_u_compl_10, color = 'magenta', label = 'Automatic solution of the equation, $C = 10$')\n", + "plt.plot(t_test, pred_u_compl_4, color = 'seagreen', label = 'Automatic solution of the equation, $C = 4$')\n", "\n", "plt.plot(t_test[::3], x_test[::3], '*', color = 'r', label = 'Referential data')\n", "plt.grid()\n", @@ -39784,22 +5752,6 @@ "source": [ "Here, we can notice, that the \"correct\" governing equation, which closely matches the Van der Pol equation, has the lowest MAPE metric on the test dataset, even outperforming the equations with higher complexities. The equation with lower complexity can represent only the averaged values of the system. Thus, we can select the equation like $u'' = -u + 0.2 u' - 0.2 u^2 u' + \\delta$ for system representation." ] - }, - { - "cell_type": "markdown", - "id": "605d8d08", - "metadata": {}, - "source": [ - "## Lotka-Volterra system" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e89c0d69", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -39818,7 +5770,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.8" + "version": "3.9.12" } }, "nbformat": 4, diff --git a/examples/PDE discovery.ipynb b/examples/PDE discovery.ipynb new file mode 100644 index 0000000..d810d85 --- /dev/null +++ b/examples/PDE discovery.ipynb @@ -0,0 +1,9737 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "427e4684", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import torch\n", + "\n", + "import sys\n", + "import os\n", + "__file__ = os.path.abspath('')\n", + "\n", + "import helpers\n", + "\n", + "sys.path.append('..')\n", + "\n", + "\n", + "import epde\n", + "from epde.preprocessing.deriv_calculators import Heatmap" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "1db0b26a", + "metadata": {}, + "outputs": [], + "source": [ + "def get_wave_inputs():\n", + " shape = 80\n", + " \n", + " print(os.path.dirname( __file__ ))\n", + " data_file = os.path.join(os.path.dirname( __file__ ), f'examples/data/wave_sln_{shape}.csv')\n", + " data = np.loadtxt(data_file, delimiter = ',').T\n", + " t = np.linspace(0, 1, shape+1); x = np.linspace(0, 1, shape+1)\n", + " grids = np.meshgrid(t, x, indexing = 'ij')\n", + " return grids, data" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "d520e5a5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "c:\\Users\\Mike\\Documents\\Work\\EPDE\n" + ] + } + ], + "source": [ + "grids, data = get_wave_inputs()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "7d8d3ac9", + "metadata": {}, + "outputs": [], + "source": [ + "train_max = 40\n", + "grids_training = (grids[0][:train_max, ...], grids[1][:train_max, ...])\n", + "grids_test = (grids[0][train_max:, ...], grids[1][train_max:, ...])\n", + "\n", + "data_training, data_test = data[:train_max, ...], data[train_max:, ...]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "846f8788", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(41, 81)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grids_test[0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "ff42165b", + "metadata": {}, + "outputs": [], + "source": [ + "def epde_discovery(grids, data, derivs, use_ann = True, multiobjective_mode = True):\n", + " dimensionality = data.ndim - 1\n", + " \n", + " epde_search_obj = epde.EpdeSearch(multiobjective_mode=multiobjective_mode, use_solver = use_ann, \n", + " dimensionality = dimensionality, boundary = 15,\n", + " coordinate_tensors = grids) \n", + " epde_search_obj.set_preprocessor(default_preprocessor_type='FD',\n", + " preprocessor_kwargs={}) \n", + " popsize = 12\n", + " if multiobjective_mode:\n", + " epde_search_obj.set_moeadd_params(population_size = popsize, \n", + " training_epochs=2)\n", + " else:\n", + " epde_search_obj.set_singleobjective_params(population_size = popsize, \n", + " training_epochs=100)\n", + " \n", + " custom_grid_tokens = epde.CacheStoredTokens(token_type = 'grid',\n", + " token_labels = ['t', 'x'],\n", + " token_tensors={'t' : grids[0], 'x' : grids[1]},\n", + " params_ranges = {'power' : (1, 1)},\n", + " params_equality_ranges = None) \n", + " trig_tokens = epde.TrigonometricTokens(dimensionality = dimensionality)\n", + " \n", + " factors_max_number = {'factors_num' : [1, 2], 'probas' : [0.8, 0.2]}\n", + " \n", + " opt_val = 5e-1\n", + " bounds = (1e-8, 1e0) if multiobjective_mode else (opt_val, opt_val) \n", + " epde_search_obj.fit(data=data, variable_names=['u',], max_deriv_order=(2, 2), derivs = derivs,\n", + " equation_terms_max_number=5, data_fun_pow = 1, additional_tokens=[trig_tokens, custom_grid_tokens], \n", + " equation_factors_max_number=factors_max_number,\n", + " eq_sparsity_interval=bounds, fourier_layers=False)\n", + "\n", + " return epde_search_obj" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "b9934718", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'terms_number': {'optimizable': False, 'value': 5},\n", + " 'max_factors_in_term': {'optimizable': False,\n", + " 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}},\n", + " ('sparsity', 'u'): {'optimizable': False, 'value': 0.5}}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "epde_search_obj.equations(only_print=False)[0].vals['u'].metaparameters" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "5d59ad61", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "setting builder with \n", + "setting builder with \n", + "setting builder with \n", + "trig_token_params: VALUES = (0, 1)\n", + "Do changes even work?\n", + "Deriv orders after definition [[0], [0, 0], [1], [1, 1]]\n", + "initial_shape (40, 81) derivs_tensor.shape (3240, 4)\n", + "Size of linked labels is 5\n", + "initial_shape (40, 81) derivs_tensor.shape (3240, 4)\n", + "Using ann of operators [Linear(in_features=2, out_features=112, bias=True), Tanh(), Linear(in_features=112, out_features=112, bias=True), Tanh(), Linear(in_features=112, out_features=1, bias=True)].\n", + "Training NN to represent data for 1000.0 epochs\n", + "deriv_tensor (40, 81), indices tensor([2558, 100, 340, ..., 1389, 2065, 1386])\n", + "deriv_tensor (40, 81), indices tensor([2558, 100, 340, ..., 1389, 2065, 1386])\n", + "deriv_tensor (40, 81), indices tensor([2558, 100, 340, ..., 1389, 2065, 1386])\n", + "deriv_tensor (40, 81), indices tensor([2558, 100, 340, ..., 1389, 2065, 1386])\n", + "deriv_tensor (40, 81), indices tensor([2502, 180, 2499, ..., 3062, 2368, 35])\n", + "deriv_tensor (40, 81), indices tensor([2502, 180, 2499, ..., 3062, 2368, 35])\n", + "deriv_tensor (40, 81), indices tensor([2502, 180, 2499, ..., 3062, 2368, 35])\n", + "deriv_tensor (40, 81), indices tensor([2502, 180, 2499, ..., 3062, 2368, 35])\n", + "deriv_tensor (40, 81), indices tensor([2750, 2704, 1009, ..., 1372, 2389, 2689])\n", + "deriv_tensor (40, 81), indices tensor([2750, 2704, 1009, ..., 1372, 2389, 2689])\n", + "deriv_tensor (40, 81), indices tensor([2750, 2704, 1009, ..., 1372, 2389, 2689])\n", + "deriv_tensor (40, 81), indices tensor([2750, 2704, 1009, ..., 1372, 2389, 2689])\n", + "deriv_tensor (40, 81), indices tensor([1347, 2573, 2810, ..., 2880, 1790, 1311])\n", + "deriv_tensor (40, 81), indices tensor([1347, 2573, 2810, ..., 2880, 1790, 1311])\n", + "deriv_tensor (40, 81), indices tensor([1347, 2573, 2810, ..., 2880, 1790, 1311])\n", + "deriv_tensor (40, 81), indices tensor([1347, 2573, 2810, ..., 2880, 1790, 1311])\n", + "deriv_tensor (40, 81), indices tensor([3236, 1967, 1884, ..., 929, 1615, 1076])\n", + "deriv_tensor (40, 81), indices tensor([3236, 1967, 1884, ..., 929, 1615, 1076])\n", + "deriv_tensor (40, 81), indices tensor([3236, 1967, 1884, ..., 929, 1615, 1076])\n", + "deriv_tensor (40, 81), indices tensor([3236, 1967, 1884, ..., 929, 1615, 1076])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 3026, 1415, ..., 2396, 225, 210])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 3026, 1415, ..., 2396, 225, 210])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 3026, 1415, ..., 2396, 225, 210])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 3026, 1415, ..., 2396, 225, 210])\n", + "deriv_tensor (40, 81), indices tensor([2433, 2114, 293, ..., 2231, 2073, 702])\n", + "deriv_tensor (40, 81), indices tensor([2433, 2114, 293, ..., 2231, 2073, 702])\n", + "deriv_tensor (40, 81), indices tensor([2433, 2114, 293, ..., 2231, 2073, 702])\n", + "deriv_tensor (40, 81), indices tensor([2433, 2114, 293, ..., 2231, 2073, 702])\n", + "deriv_tensor (40, 81), indices tensor([ 879, 781, 2478, ..., 1069, 2992, 674])\n", + "deriv_tensor (40, 81), indices tensor([ 879, 781, 2478, ..., 1069, 2992, 674])\n", + "deriv_tensor (40, 81), indices tensor([ 879, 781, 2478, ..., 1069, 2992, 674])\n", + "deriv_tensor (40, 81), indices tensor([ 879, 781, 2478, ..., 1069, 2992, 674])\n", + "deriv_tensor (40, 81), indices tensor([1403, 1875, 2510, ..., 1454, 795, 149])\n", + "deriv_tensor (40, 81), indices tensor([1403, 1875, 2510, ..., 1454, 795, 149])\n", + "deriv_tensor (40, 81), indices tensor([1403, 1875, 2510, ..., 1454, 795, 149])\n", + "deriv_tensor (40, 81), indices tensor([1403, 1875, 2510, ..., 1454, 795, 149])\n", + "deriv_tensor (40, 81), indices tensor([2070, 420, 2589, ..., 1848, 1456, 780])\n", + "deriv_tensor (40, 81), indices tensor([2070, 420, 2589, ..., 1848, 1456, 780])\n", + "deriv_tensor (40, 81), indices tensor([2070, 420, 2589, ..., 1848, 1456, 780])\n", + "deriv_tensor (40, 81), indices tensor([2070, 420, 2589, ..., 1848, 1456, 780])\n", + "deriv_tensor (40, 81), indices tensor([2649, 479, 2353, ..., 3097, 1707, 941])\n", + "deriv_tensor (40, 81), indices tensor([2649, 479, 2353, ..., 3097, 1707, 941])\n", + "deriv_tensor (40, 81), indices tensor([2649, 479, 2353, ..., 3097, 1707, 941])\n", + "deriv_tensor (40, 81), indices tensor([2649, 479, 2353, ..., 3097, 1707, 941])\n", + "deriv_tensor (40, 81), indices tensor([2625, 9, 2385, ..., 2552, 2331, 2530])\n", + "deriv_tensor (40, 81), indices tensor([2625, 9, 2385, ..., 2552, 2331, 2530])\n", + "deriv_tensor (40, 81), indices tensor([2625, 9, 2385, ..., 2552, 2331, 2530])\n", + "deriv_tensor (40, 81), indices tensor([2625, 9, 2385, ..., 2552, 2331, 2530])\n", + "deriv_tensor (40, 81), indices tensor([2087, 835, 889, ..., 2768, 1030, 661])\n", + "deriv_tensor (40, 81), indices tensor([2087, 835, 889, ..., 2768, 1030, 661])\n", + "deriv_tensor (40, 81), indices tensor([2087, 835, 889, ..., 2768, 1030, 661])\n", + "deriv_tensor (40, 81), indices tensor([2087, 835, 889, ..., 2768, 1030, 661])\n", + "deriv_tensor (40, 81), indices tensor([1667, 639, 775, ..., 2324, 2030, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1667, 639, 775, ..., 2324, 2030, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1667, 639, 775, ..., 2324, 2030, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1667, 639, 775, ..., 2324, 2030, 2514])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1964, 1018, ..., 84, 1751, 975])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1964, 1018, ..., 84, 1751, 975])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1964, 1018, ..., 84, 1751, 975])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1964, 1018, ..., 84, 1751, 975])\n", + "deriv_tensor (40, 81), indices tensor([ 900, 2701, 2089, ..., 7, 2894, 3005])\n", + "deriv_tensor (40, 81), indices tensor([ 900, 2701, 2089, ..., 7, 2894, 3005])\n", + "deriv_tensor (40, 81), indices tensor([ 900, 2701, 2089, ..., 7, 2894, 3005])\n", + "deriv_tensor (40, 81), indices tensor([ 900, 2701, 2089, ..., 7, 2894, 3005])\n", + "deriv_tensor (40, 81), indices tensor([3076, 1728, 3202, ..., 2440, 1480, 114])\n", + "deriv_tensor (40, 81), indices tensor([3076, 1728, 3202, ..., 2440, 1480, 114])\n", + "deriv_tensor (40, 81), indices tensor([3076, 1728, 3202, ..., 2440, 1480, 114])\n", + "deriv_tensor (40, 81), indices tensor([3076, 1728, 3202, ..., 2440, 1480, 114])\n", + "deriv_tensor (40, 81), indices tensor([1160, 1774, 83, ..., 2408, 719, 907])\n", + "deriv_tensor (40, 81), indices tensor([1160, 1774, 83, ..., 2408, 719, 907])\n", + "deriv_tensor (40, 81), indices tensor([1160, 1774, 83, ..., 2408, 719, 907])\n", + "deriv_tensor (40, 81), indices tensor([1160, 1774, 83, ..., 2408, 719, 907])\n", + "deriv_tensor (40, 81), indices tensor([2774, 2761, 3222, ..., 1417, 1929, 406])\n", + "deriv_tensor (40, 81), indices tensor([2774, 2761, 3222, ..., 1417, 1929, 406])\n", + "deriv_tensor (40, 81), indices tensor([2774, 2761, 3222, ..., 1417, 1929, 406])\n", + "deriv_tensor (40, 81), indices tensor([2774, 2761, 3222, ..., 1417, 1929, 406])\n", + "deriv_tensor (40, 81), indices tensor([ 874, 1943, 1684, ..., 1146, 2835, 2232])\n", + "deriv_tensor (40, 81), indices tensor([ 874, 1943, 1684, ..., 1146, 2835, 2232])\n", + "deriv_tensor (40, 81), indices tensor([ 874, 1943, 1684, ..., 1146, 2835, 2232])\n", + "deriv_tensor (40, 81), indices tensor([ 874, 1943, 1684, ..., 1146, 2835, 2232])\n", + "deriv_tensor (40, 81), indices tensor([2215, 318, 2624, ..., 2081, 830, 932])\n", + "deriv_tensor (40, 81), indices tensor([2215, 318, 2624, ..., 2081, 830, 932])\n", + "deriv_tensor (40, 81), indices tensor([2215, 318, 2624, ..., 2081, 830, 932])\n", + "deriv_tensor (40, 81), indices tensor([2215, 318, 2624, ..., 2081, 830, 932])\n", + "deriv_tensor (40, 81), indices tensor([1329, 2684, 325, ..., 2867, 1463, 530])\n", + "deriv_tensor (40, 81), indices tensor([1329, 2684, 325, ..., 2867, 1463, 530])\n", + "deriv_tensor (40, 81), indices tensor([1329, 2684, 325, ..., 2867, 1463, 530])\n", + "deriv_tensor (40, 81), indices tensor([1329, 2684, 325, ..., 2867, 1463, 530])\n", + "deriv_tensor (40, 81), indices tensor([1751, 883, 3179, ..., 1076, 2606, 1401])\n", + "deriv_tensor (40, 81), indices tensor([1751, 883, 3179, ..., 1076, 2606, 1401])\n", + "deriv_tensor (40, 81), indices tensor([1751, 883, 3179, ..., 1076, 2606, 1401])\n", + "deriv_tensor (40, 81), indices tensor([1751, 883, 3179, ..., 1076, 2606, 1401])\n", + "deriv_tensor (40, 81), indices tensor([1905, 559, 2650, ..., 1341, 435, 1932])\n", + "deriv_tensor (40, 81), indices tensor([1905, 559, 2650, ..., 1341, 435, 1932])\n", + "deriv_tensor (40, 81), indices tensor([1905, 559, 2650, ..., 1341, 435, 1932])\n", + "deriv_tensor (40, 81), indices tensor([1905, 559, 2650, ..., 1341, 435, 1932])\n", + "deriv_tensor (40, 81), indices tensor([3200, 91, 2563, ..., 2320, 346, 2178])\n", + "deriv_tensor (40, 81), indices tensor([3200, 91, 2563, ..., 2320, 346, 2178])\n", + "deriv_tensor (40, 81), indices tensor([3200, 91, 2563, ..., 2320, 346, 2178])\n", + "deriv_tensor (40, 81), indices tensor([3200, 91, 2563, ..., 2320, 346, 2178])\n", + "deriv_tensor (40, 81), indices tensor([2892, 375, 3192, ..., 1964, 4, 2770])\n", + "deriv_tensor (40, 81), indices tensor([2892, 375, 3192, ..., 1964, 4, 2770])\n", + "deriv_tensor (40, 81), indices tensor([2892, 375, 3192, ..., 1964, 4, 2770])\n", + "deriv_tensor (40, 81), indices tensor([2892, 375, 3192, ..., 1964, 4, 2770])\n", + "deriv_tensor (40, 81), indices tensor([1019, 1741, 691, ..., 506, 1918, 405])\n", + "deriv_tensor (40, 81), indices tensor([1019, 1741, 691, ..., 506, 1918, 405])\n", + "deriv_tensor (40, 81), indices tensor([1019, 1741, 691, ..., 506, 1918, 405])\n", + "deriv_tensor (40, 81), indices tensor([1019, 1741, 691, ..., 506, 1918, 405])\n", + "deriv_tensor (40, 81), indices tensor([1959, 1869, 2071, ..., 1991, 2452, 1560])\n", + "deriv_tensor (40, 81), indices tensor([1959, 1869, 2071, ..., 1991, 2452, 1560])\n", + "deriv_tensor (40, 81), indices tensor([1959, 1869, 2071, ..., 1991, 2452, 1560])\n", + "deriv_tensor (40, 81), indices tensor([1959, 1869, 2071, ..., 1991, 2452, 1560])\n", + "deriv_tensor (40, 81), indices tensor([2264, 2995, 2063, ..., 634, 1356, 3178])\n", + "deriv_tensor (40, 81), indices tensor([2264, 2995, 2063, ..., 634, 1356, 3178])\n", + "deriv_tensor (40, 81), indices tensor([2264, 2995, 2063, ..., 634, 1356, 3178])\n", + "deriv_tensor (40, 81), indices tensor([2264, 2995, 2063, ..., 634, 1356, 3178])\n", + "deriv_tensor (40, 81), indices tensor([3034, 13, 1476, ..., 75, 1475, 2603])\n", + "deriv_tensor (40, 81), indices tensor([3034, 13, 1476, ..., 75, 1475, 2603])\n", + "deriv_tensor (40, 81), indices tensor([3034, 13, 1476, ..., 75, 1475, 2603])\n", + "deriv_tensor (40, 81), indices tensor([3034, 13, 1476, ..., 75, 1475, 2603])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1846, 1024, ..., 1531, 1031, 363])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1846, 1024, ..., 1531, 1031, 363])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1846, 1024, ..., 1531, 1031, 363])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1846, 1024, ..., 1531, 1031, 363])\n", + "deriv_tensor (40, 81), indices tensor([3130, 1159, 2466, ..., 717, 891, 659])\n", + "deriv_tensor (40, 81), indices tensor([3130, 1159, 2466, ..., 717, 891, 659])\n", + "deriv_tensor (40, 81), indices tensor([3130, 1159, 2466, ..., 717, 891, 659])\n", + "deriv_tensor (40, 81), indices tensor([3130, 1159, 2466, ..., 717, 891, 659])\n", + "deriv_tensor (40, 81), indices tensor([2646, 222, 3183, ..., 3097, 3142, 1465])\n", + "deriv_tensor (40, 81), indices tensor([2646, 222, 3183, ..., 3097, 3142, 1465])\n", + "deriv_tensor (40, 81), indices tensor([2646, 222, 3183, ..., 3097, 3142, 1465])\n", + "deriv_tensor (40, 81), indices tensor([2646, 222, 3183, ..., 3097, 3142, 1465])\n", + "deriv_tensor (40, 81), indices tensor([1828, 1489, 583, ..., 202, 1474, 2824])\n", + "deriv_tensor (40, 81), indices tensor([1828, 1489, 583, ..., 202, 1474, 2824])\n", + "deriv_tensor (40, 81), indices tensor([1828, 1489, 583, ..., 202, 1474, 2824])\n", + "deriv_tensor (40, 81), indices tensor([1828, 1489, 583, ..., 202, 1474, 2824])\n", + "deriv_tensor (40, 81), indices tensor([2290, 3057, 2352, ..., 2350, 1343, 723])\n", + "deriv_tensor (40, 81), indices tensor([2290, 3057, 2352, ..., 2350, 1343, 723])\n", + "deriv_tensor (40, 81), indices tensor([2290, 3057, 2352, ..., 2350, 1343, 723])\n", + "deriv_tensor (40, 81), indices tensor([2290, 3057, 2352, ..., 2350, 1343, 723])\n", + "deriv_tensor (40, 81), indices tensor([1091, 326, 2185, ..., 916, 1244, 2145])\n", + "deriv_tensor (40, 81), indices tensor([1091, 326, 2185, ..., 916, 1244, 2145])\n", + "deriv_tensor (40, 81), indices tensor([1091, 326, 2185, ..., 916, 1244, 2145])\n", + "deriv_tensor (40, 81), indices tensor([1091, 326, 2185, ..., 916, 1244, 2145])\n", + "deriv_tensor (40, 81), indices tensor([ 859, 1671, 2660, ..., 1506, 2864, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 859, 1671, 2660, ..., 1506, 2864, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 859, 1671, 2660, ..., 1506, 2864, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 859, 1671, 2660, ..., 1506, 2864, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 199, 44, 3238, ..., 475, 2999, 232])\n", + "deriv_tensor (40, 81), indices tensor([ 199, 44, 3238, ..., 475, 2999, 232])\n", + "deriv_tensor (40, 81), indices tensor([ 199, 44, 3238, ..., 475, 2999, 232])\n", + "deriv_tensor (40, 81), indices tensor([ 199, 44, 3238, ..., 475, 2999, 232])\n", + "deriv_tensor (40, 81), indices tensor([2438, 2295, 2627, ..., 873, 749, 412])\n", + "deriv_tensor (40, 81), indices tensor([2438, 2295, 2627, ..., 873, 749, 412])\n", + "deriv_tensor (40, 81), indices tensor([2438, 2295, 2627, ..., 873, 749, 412])\n", + "deriv_tensor (40, 81), indices tensor([2438, 2295, 2627, ..., 873, 749, 412])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 1531, 3008, ..., 3168, 1476, 189])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 1531, 3008, ..., 3168, 1476, 189])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 1531, 3008, ..., 3168, 1476, 189])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 1531, 3008, ..., 3168, 1476, 189])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1509, 59, ..., 711, 1727, 2331])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1509, 59, ..., 711, 1727, 2331])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1509, 59, ..., 711, 1727, 2331])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1509, 59, ..., 711, 1727, 2331])\n", + "deriv_tensor (40, 81), indices tensor([2215, 1795, 2103, ..., 2726, 1742, 1774])\n", + "deriv_tensor (40, 81), indices tensor([2215, 1795, 2103, ..., 2726, 1742, 1774])\n", + "deriv_tensor (40, 81), indices tensor([2215, 1795, 2103, ..., 2726, 1742, 1774])\n", + "deriv_tensor (40, 81), indices tensor([2215, 1795, 2103, ..., 2726, 1742, 1774])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 2106, 1329, ..., 581, 1955, 426])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 2106, 1329, ..., 581, 1955, 426])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 2106, 1329, ..., 581, 1955, 426])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 2106, 1329, ..., 581, 1955, 426])\n", + "deriv_tensor (40, 81), indices tensor([1650, 2803, 351, ..., 2634, 3227, 2764])\n", + "deriv_tensor (40, 81), indices tensor([1650, 2803, 351, ..., 2634, 3227, 2764])\n", + "deriv_tensor (40, 81), indices tensor([1650, 2803, 351, ..., 2634, 3227, 2764])\n", + "deriv_tensor (40, 81), indices tensor([1650, 2803, 351, ..., 2634, 3227, 2764])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 347, 1122, ..., 83, 1286, 962])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 347, 1122, ..., 83, 1286, 962])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 347, 1122, ..., 83, 1286, 962])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 347, 1122, ..., 83, 1286, 962])\n", + "deriv_tensor (40, 81), indices tensor([2856, 1037, 1984, ..., 449, 1175, 1532])\n", + "deriv_tensor (40, 81), indices tensor([2856, 1037, 1984, ..., 449, 1175, 1532])\n", + "deriv_tensor (40, 81), indices tensor([2856, 1037, 1984, ..., 449, 1175, 1532])\n", + "deriv_tensor (40, 81), indices tensor([2856, 1037, 1984, ..., 449, 1175, 1532])\n", + "deriv_tensor (40, 81), indices tensor([ 480, 1153, 2949, ..., 174, 2006, 2603])\n", + "deriv_tensor (40, 81), indices tensor([ 480, 1153, 2949, ..., 174, 2006, 2603])\n", + "deriv_tensor (40, 81), indices tensor([ 480, 1153, 2949, ..., 174, 2006, 2603])\n", + "deriv_tensor (40, 81), indices tensor([ 480, 1153, 2949, ..., 174, 2006, 2603])\n", + "deriv_tensor (40, 81), indices tensor([2773, 412, 3137, ..., 1027, 729, 465])\n", + "deriv_tensor (40, 81), indices tensor([2773, 412, 3137, ..., 1027, 729, 465])\n", + "deriv_tensor (40, 81), indices tensor([2773, 412, 3137, ..., 1027, 729, 465])\n", + "deriv_tensor (40, 81), indices tensor([2773, 412, 3137, ..., 1027, 729, 465])\n", + "deriv_tensor (40, 81), indices tensor([1830, 1364, 1061, ..., 1084, 396, 2405])\n", + "deriv_tensor (40, 81), indices tensor([1830, 1364, 1061, ..., 1084, 396, 2405])\n", + "deriv_tensor (40, 81), indices tensor([1830, 1364, 1061, ..., 1084, 396, 2405])\n", + "deriv_tensor (40, 81), indices tensor([1830, 1364, 1061, ..., 1084, 396, 2405])\n", + "deriv_tensor (40, 81), indices tensor([2355, 2394, 105, ..., 548, 1846, 2130])\n", + "deriv_tensor (40, 81), indices tensor([2355, 2394, 105, ..., 548, 1846, 2130])\n", + "deriv_tensor (40, 81), indices tensor([2355, 2394, 105, ..., 548, 1846, 2130])\n", + "deriv_tensor (40, 81), indices tensor([2355, 2394, 105, ..., 548, 1846, 2130])\n", + "deriv_tensor (40, 81), indices tensor([2307, 1257, 672, ..., 1148, 522, 803])\n", + "deriv_tensor (40, 81), indices tensor([2307, 1257, 672, ..., 1148, 522, 803])\n", + "deriv_tensor (40, 81), indices tensor([2307, 1257, 672, ..., 1148, 522, 803])\n", + "deriv_tensor (40, 81), indices tensor([2307, 1257, 672, ..., 1148, 522, 803])\n", + "deriv_tensor (40, 81), indices tensor([3184, 797, 2187, ..., 3049, 2645, 588])\n", + "deriv_tensor (40, 81), indices tensor([3184, 797, 2187, ..., 3049, 2645, 588])\n", + "deriv_tensor (40, 81), indices tensor([3184, 797, 2187, ..., 3049, 2645, 588])\n", + "deriv_tensor (40, 81), indices tensor([3184, 797, 2187, ..., 3049, 2645, 588])\n", + "deriv_tensor (40, 81), indices tensor([2498, 31, 2437, ..., 970, 2411, 356])\n", + "deriv_tensor (40, 81), indices tensor([2498, 31, 2437, ..., 970, 2411, 356])\n", + "deriv_tensor (40, 81), indices tensor([2498, 31, 2437, ..., 970, 2411, 356])\n", + "deriv_tensor (40, 81), indices tensor([2498, 31, 2437, ..., 970, 2411, 356])\n", + "deriv_tensor (40, 81), indices tensor([2998, 2099, 1220, ..., 2935, 1129, 3163])\n", + "deriv_tensor (40, 81), indices tensor([2998, 2099, 1220, ..., 2935, 1129, 3163])\n", + "deriv_tensor (40, 81), indices tensor([2998, 2099, 1220, ..., 2935, 1129, 3163])\n", + "deriv_tensor (40, 81), indices tensor([2998, 2099, 1220, ..., 2935, 1129, 3163])\n", + "deriv_tensor (40, 81), indices tensor([2523, 350, 1667, ..., 454, 731, 354])\n", + "deriv_tensor (40, 81), indices tensor([2523, 350, 1667, ..., 454, 731, 354])\n", + "deriv_tensor (40, 81), indices tensor([2523, 350, 1667, ..., 454, 731, 354])\n", + "deriv_tensor (40, 81), indices tensor([2523, 350, 1667, ..., 454, 731, 354])\n", + "deriv_tensor (40, 81), indices tensor([2159, 539, 190, ..., 329, 2450, 1646])\n", + "deriv_tensor (40, 81), indices tensor([2159, 539, 190, ..., 329, 2450, 1646])\n", + "deriv_tensor (40, 81), indices tensor([2159, 539, 190, ..., 329, 2450, 1646])\n", + "deriv_tensor (40, 81), indices tensor([2159, 539, 190, ..., 329, 2450, 1646])\n", + "deriv_tensor (40, 81), indices tensor([1378, 3084, 46, ..., 1044, 1029, 2265])\n", + "deriv_tensor (40, 81), indices tensor([1378, 3084, 46, ..., 1044, 1029, 2265])\n", + "deriv_tensor (40, 81), indices tensor([1378, 3084, 46, ..., 1044, 1029, 2265])\n", + "deriv_tensor (40, 81), indices tensor([1378, 3084, 46, ..., 1044, 1029, 2265])\n", + "deriv_tensor (40, 81), indices tensor([2289, 1775, 1249, ..., 1062, 538, 1473])\n", + "deriv_tensor (40, 81), indices tensor([2289, 1775, 1249, ..., 1062, 538, 1473])\n", + "deriv_tensor (40, 81), indices tensor([2289, 1775, 1249, ..., 1062, 538, 1473])\n", + "deriv_tensor (40, 81), indices tensor([2289, 1775, 1249, ..., 1062, 538, 1473])\n", + "deriv_tensor (40, 81), indices tensor([2853, 2363, 1635, ..., 2603, 2330, 2729])\n", + "deriv_tensor (40, 81), indices tensor([2853, 2363, 1635, ..., 2603, 2330, 2729])\n", + "deriv_tensor (40, 81), indices tensor([2853, 2363, 1635, ..., 2603, 2330, 2729])\n", + "deriv_tensor (40, 81), indices tensor([2853, 2363, 1635, ..., 2603, 2330, 2729])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1463, 71, ..., 125, 2708, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1463, 71, ..., 125, 2708, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1463, 71, ..., 125, 2708, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1463, 71, ..., 125, 2708, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 581, 812, ..., 1399, 2651, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 581, 812, ..., 1399, 2651, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 581, 812, ..., 1399, 2651, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 581, 812, ..., 1399, 2651, 245])\n", + "deriv_tensor (40, 81), indices tensor([2723, 1850, 2962, ..., 3057, 1088, 2822])\n", + "deriv_tensor (40, 81), indices tensor([2723, 1850, 2962, ..., 3057, 1088, 2822])\n", + "deriv_tensor (40, 81), indices tensor([2723, 1850, 2962, ..., 3057, 1088, 2822])\n", + "deriv_tensor (40, 81), indices tensor([2723, 1850, 2962, ..., 3057, 1088, 2822])\n", + "deriv_tensor (40, 81), indices tensor([2812, 272, 2934, ..., 1875, 1560, 2406])\n", + "deriv_tensor (40, 81), indices tensor([2812, 272, 2934, ..., 1875, 1560, 2406])\n", + "deriv_tensor (40, 81), indices tensor([2812, 272, 2934, ..., 1875, 1560, 2406])\n", + "deriv_tensor (40, 81), indices tensor([2812, 272, 2934, ..., 1875, 1560, 2406])\n", + "deriv_tensor (40, 81), indices tensor([1533, 2690, 2952, ..., 2762, 1596, 2593])\n", + "deriv_tensor (40, 81), indices tensor([1533, 2690, 2952, ..., 2762, 1596, 2593])\n", + "deriv_tensor (40, 81), indices tensor([1533, 2690, 2952, ..., 2762, 1596, 2593])\n", + "deriv_tensor (40, 81), indices tensor([1533, 2690, 2952, ..., 2762, 1596, 2593])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1746, 584, ..., 321, 408, 2042])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1746, 584, ..., 321, 408, 2042])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1746, 584, ..., 321, 408, 2042])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1746, 584, ..., 321, 408, 2042])\n", + "deriv_tensor (40, 81), indices tensor([2523, 1189, 551, ..., 1879, 1571, 1969])\n", + "deriv_tensor (40, 81), indices tensor([2523, 1189, 551, ..., 1879, 1571, 1969])\n", + "deriv_tensor (40, 81), indices tensor([2523, 1189, 551, ..., 1879, 1571, 1969])\n", + "deriv_tensor (40, 81), indices tensor([2523, 1189, 551, ..., 1879, 1571, 1969])\n", + "deriv_tensor (40, 81), indices tensor([1172, 2417, 3155, ..., 2292, 2566, 1281])\n", + "deriv_tensor (40, 81), indices tensor([1172, 2417, 3155, ..., 2292, 2566, 1281])\n", + "deriv_tensor (40, 81), indices tensor([1172, 2417, 3155, ..., 2292, 2566, 1281])\n", + "deriv_tensor (40, 81), indices tensor([1172, 2417, 3155, ..., 2292, 2566, 1281])\n", + "deriv_tensor (40, 81), indices tensor([2360, 3168, 647, ..., 849, 1566, 683])\n", + "deriv_tensor (40, 81), indices tensor([2360, 3168, 647, ..., 849, 1566, 683])\n", + "deriv_tensor (40, 81), indices tensor([2360, 3168, 647, ..., 849, 1566, 683])\n", + "deriv_tensor (40, 81), indices tensor([2360, 3168, 647, ..., 849, 1566, 683])\n", + "deriv_tensor (40, 81), indices tensor([1101, 1135, 220, ..., 272, 3152, 679])\n", + "deriv_tensor (40, 81), indices tensor([1101, 1135, 220, ..., 272, 3152, 679])\n", + "deriv_tensor (40, 81), indices tensor([1101, 1135, 220, ..., 272, 3152, 679])\n", + "deriv_tensor (40, 81), indices tensor([1101, 1135, 220, ..., 272, 3152, 679])\n", + "deriv_tensor (40, 81), indices tensor([3047, 397, 462, ..., 818, 770, 3175])\n", + "deriv_tensor (40, 81), indices tensor([3047, 397, 462, ..., 818, 770, 3175])\n", + "deriv_tensor (40, 81), indices tensor([3047, 397, 462, ..., 818, 770, 3175])\n", + "deriv_tensor (40, 81), indices tensor([3047, 397, 462, ..., 818, 770, 3175])\n", + "deriv_tensor (40, 81), indices tensor([1201, 3040, 1046, ..., 2685, 1453, 2873])\n", + "deriv_tensor (40, 81), indices tensor([1201, 3040, 1046, ..., 2685, 1453, 2873])\n", + "deriv_tensor (40, 81), indices tensor([1201, 3040, 1046, ..., 2685, 1453, 2873])\n", + "deriv_tensor (40, 81), indices tensor([1201, 3040, 1046, ..., 2685, 1453, 2873])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1639, 347, ..., 2501, 2353, 1113])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1639, 347, ..., 2501, 2353, 1113])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1639, 347, ..., 2501, 2353, 1113])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1639, 347, ..., 2501, 2353, 1113])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 1652, 465, ..., 2025, 673, 2831])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 1652, 465, ..., 2025, 673, 2831])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 1652, 465, ..., 2025, 673, 2831])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 1652, 465, ..., 2025, 673, 2831])\n", + "deriv_tensor (40, 81), indices tensor([1074, 1035, 2927, ..., 845, 982, 2908])\n", + "deriv_tensor (40, 81), indices tensor([1074, 1035, 2927, ..., 845, 982, 2908])\n", + "deriv_tensor (40, 81), indices tensor([1074, 1035, 2927, ..., 845, 982, 2908])\n", + "deriv_tensor (40, 81), indices tensor([1074, 1035, 2927, ..., 845, 982, 2908])\n", + "deriv_tensor (40, 81), indices tensor([2732, 1159, 1843, ..., 766, 513, 1471])\n", + "deriv_tensor (40, 81), indices tensor([2732, 1159, 1843, ..., 766, 513, 1471])\n", + "deriv_tensor (40, 81), indices tensor([2732, 1159, 1843, ..., 766, 513, 1471])\n", + "deriv_tensor (40, 81), indices tensor([2732, 1159, 1843, ..., 766, 513, 1471])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 763, 2724, ..., 2060, 498, 968])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 763, 2724, ..., 2060, 498, 968])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 763, 2724, ..., 2060, 498, 968])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 763, 2724, ..., 2060, 498, 968])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 3000, 2625, ..., 798, 89, 1545])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 3000, 2625, ..., 798, 89, 1545])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 3000, 2625, ..., 798, 89, 1545])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 3000, 2625, ..., 798, 89, 1545])\n", + "deriv_tensor (40, 81), indices tensor([3126, 792, 140, ..., 2768, 2042, 3217])\n", + "deriv_tensor (40, 81), indices tensor([3126, 792, 140, ..., 2768, 2042, 3217])\n", + "deriv_tensor (40, 81), indices tensor([3126, 792, 140, ..., 2768, 2042, 3217])\n", + "deriv_tensor (40, 81), indices tensor([3126, 792, 140, ..., 2768, 2042, 3217])\n", + "deriv_tensor (40, 81), indices tensor([1996, 706, 926, ..., 2186, 1346, 681])\n", + "deriv_tensor (40, 81), indices tensor([1996, 706, 926, ..., 2186, 1346, 681])\n", + "deriv_tensor (40, 81), indices tensor([1996, 706, 926, ..., 2186, 1346, 681])\n", + "deriv_tensor (40, 81), indices tensor([1996, 706, 926, ..., 2186, 1346, 681])\n", + "deriv_tensor (40, 81), indices tensor([2663, 2475, 413, ..., 2917, 2989, 1246])\n", + "deriv_tensor (40, 81), indices tensor([2663, 2475, 413, ..., 2917, 2989, 1246])\n", + "deriv_tensor (40, 81), indices tensor([2663, 2475, 413, ..., 2917, 2989, 1246])\n", + "deriv_tensor (40, 81), indices tensor([2663, 2475, 413, ..., 2917, 2989, 1246])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 3194, 1917, ..., 2170, 1184, 1114])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 3194, 1917, ..., 2170, 1184, 1114])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 3194, 1917, ..., 2170, 1184, 1114])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 3194, 1917, ..., 2170, 1184, 1114])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 1441, 2043, ..., 1132, 2731, 1401])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 1441, 2043, ..., 1132, 2731, 1401])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 1441, 2043, ..., 1132, 2731, 1401])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 1441, 2043, ..., 1132, 2731, 1401])\n", + "deriv_tensor (40, 81), indices tensor([ 117, 2700, 581, ..., 511, 1307, 2047])\n", + "deriv_tensor (40, 81), indices tensor([ 117, 2700, 581, ..., 511, 1307, 2047])\n", + "deriv_tensor (40, 81), indices tensor([ 117, 2700, 581, ..., 511, 1307, 2047])\n", + "deriv_tensor (40, 81), indices tensor([ 117, 2700, 581, ..., 511, 1307, 2047])\n", + "deriv_tensor (40, 81), indices tensor([ 645, 2120, 1318, ..., 376, 2309, 1723])\n", + "deriv_tensor (40, 81), indices tensor([ 645, 2120, 1318, ..., 376, 2309, 1723])\n", + "deriv_tensor (40, 81), indices tensor([ 645, 2120, 1318, ..., 376, 2309, 1723])\n", + "deriv_tensor (40, 81), indices tensor([ 645, 2120, 1318, ..., 376, 2309, 1723])\n", + "deriv_tensor (40, 81), indices tensor([2379, 282, 2173, ..., 756, 2551, 2765])\n", + "deriv_tensor (40, 81), indices tensor([2379, 282, 2173, ..., 756, 2551, 2765])\n", + "deriv_tensor (40, 81), indices tensor([2379, 282, 2173, ..., 756, 2551, 2765])\n", + "deriv_tensor (40, 81), indices tensor([2379, 282, 2173, ..., 756, 2551, 2765])\n", + "deriv_tensor (40, 81), indices tensor([2405, 784, 2574, ..., 1662, 1433, 809])\n", + "deriv_tensor (40, 81), indices tensor([2405, 784, 2574, ..., 1662, 1433, 809])\n", + "deriv_tensor (40, 81), indices tensor([2405, 784, 2574, ..., 1662, 1433, 809])\n", + "deriv_tensor (40, 81), indices tensor([2405, 784, 2574, ..., 1662, 1433, 809])\n", + "deriv_tensor (40, 81), indices tensor([2997, 871, 759, ..., 227, 24, 455])\n", + "deriv_tensor (40, 81), indices tensor([2997, 871, 759, ..., 227, 24, 455])\n", + "deriv_tensor (40, 81), indices tensor([2997, 871, 759, ..., 227, 24, 455])\n", + "deriv_tensor (40, 81), indices tensor([2997, 871, 759, ..., 227, 24, 455])\n", + "deriv_tensor (40, 81), indices tensor([ 254, 2311, 286, ..., 1449, 22, 1064])\n", + "deriv_tensor (40, 81), indices tensor([ 254, 2311, 286, ..., 1449, 22, 1064])\n", + "deriv_tensor (40, 81), indices tensor([ 254, 2311, 286, ..., 1449, 22, 1064])\n", + "deriv_tensor (40, 81), indices tensor([ 254, 2311, 286, ..., 1449, 22, 1064])\n", + "deriv_tensor (40, 81), indices tensor([1944, 1753, 3209, ..., 1386, 874, 2116])\n", + "deriv_tensor (40, 81), indices tensor([1944, 1753, 3209, ..., 1386, 874, 2116])\n", + "deriv_tensor (40, 81), indices tensor([1944, 1753, 3209, ..., 1386, 874, 2116])\n", + "deriv_tensor (40, 81), indices tensor([1944, 1753, 3209, ..., 1386, 874, 2116])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2092, 3230, ..., 1600, 1671, 677])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2092, 3230, ..., 1600, 1671, 677])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2092, 3230, ..., 1600, 1671, 677])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2092, 3230, ..., 1600, 1671, 677])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 1545, 777, ..., 2908, 2250, 2048])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 1545, 777, ..., 2908, 2250, 2048])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 1545, 777, ..., 2908, 2250, 2048])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 1545, 777, ..., 2908, 2250, 2048])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 2565, 1106, ..., 2603, 1785, 3192])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 2565, 1106, ..., 2603, 1785, 3192])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 2565, 1106, ..., 2603, 1785, 3192])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 2565, 1106, ..., 2603, 1785, 3192])\n", + "deriv_tensor (40, 81), indices tensor([ 687, 1332, 1143, ..., 3123, 118, 1609])\n", + "deriv_tensor (40, 81), indices tensor([ 687, 1332, 1143, ..., 3123, 118, 1609])\n", + "deriv_tensor (40, 81), indices tensor([ 687, 1332, 1143, ..., 3123, 118, 1609])\n", + "deriv_tensor (40, 81), indices tensor([ 687, 1332, 1143, ..., 3123, 118, 1609])\n", + "deriv_tensor (40, 81), indices tensor([1427, 134, 1445, ..., 1003, 145, 165])\n", + "deriv_tensor (40, 81), indices tensor([1427, 134, 1445, ..., 1003, 145, 165])\n", + "deriv_tensor (40, 81), indices tensor([1427, 134, 1445, ..., 1003, 145, 165])\n", + "deriv_tensor (40, 81), indices tensor([1427, 134, 1445, ..., 1003, 145, 165])\n", + "deriv_tensor (40, 81), indices tensor([1062, 443, 1513, ..., 358, 740, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1062, 443, 1513, ..., 358, 740, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1062, 443, 1513, ..., 358, 740, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1062, 443, 1513, ..., 358, 740, 2035])\n", + "deriv_tensor (40, 81), indices tensor([ 613, 1023, 459, ..., 1971, 3134, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 613, 1023, 459, ..., 1971, 3134, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 613, 1023, 459, ..., 1971, 3134, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 613, 1023, 459, ..., 1971, 3134, 953])\n", + "deriv_tensor (40, 81), indices tensor([1822, 1570, 209, ..., 753, 2548, 2457])\n", + "deriv_tensor (40, 81), indices tensor([1822, 1570, 209, ..., 753, 2548, 2457])\n", + "deriv_tensor (40, 81), indices tensor([1822, 1570, 209, ..., 753, 2548, 2457])\n", + "deriv_tensor (40, 81), indices tensor([1822, 1570, 209, ..., 753, 2548, 2457])\n", + "deriv_tensor (40, 81), indices tensor([1140, 180, 2788, ..., 1008, 1546, 3112])\n", + "deriv_tensor (40, 81), indices tensor([1140, 180, 2788, ..., 1008, 1546, 3112])\n", + "deriv_tensor (40, 81), indices tensor([1140, 180, 2788, ..., 1008, 1546, 3112])\n", + "deriv_tensor (40, 81), indices tensor([1140, 180, 2788, ..., 1008, 1546, 3112])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1595, 80, ..., 2778, 1127, 802])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1595, 80, ..., 2778, 1127, 802])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1595, 80, ..., 2778, 1127, 802])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1595, 80, ..., 2778, 1127, 802])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 2419, 2162, ..., 202, 707, 1245])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 2419, 2162, ..., 202, 707, 1245])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 2419, 2162, ..., 202, 707, 1245])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 2419, 2162, ..., 202, 707, 1245])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 1271, 254, ..., 844, 2884, 1203])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 1271, 254, ..., 844, 2884, 1203])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 1271, 254, ..., 844, 2884, 1203])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 1271, 254, ..., 844, 2884, 1203])\n", + "deriv_tensor (40, 81), indices tensor([1275, 2088, 2142, ..., 613, 1943, 1018])\n", + "deriv_tensor (40, 81), indices tensor([1275, 2088, 2142, ..., 613, 1943, 1018])\n", + "deriv_tensor (40, 81), indices tensor([1275, 2088, 2142, ..., 613, 1943, 1018])\n", + "deriv_tensor (40, 81), indices tensor([1275, 2088, 2142, ..., 613, 1943, 1018])\n", + "deriv_tensor (40, 81), indices tensor([2687, 2217, 1562, ..., 2888, 1742, 3179])\n", + "deriv_tensor (40, 81), indices tensor([2687, 2217, 1562, ..., 2888, 1742, 3179])\n", + "deriv_tensor (40, 81), indices tensor([2687, 2217, 1562, ..., 2888, 1742, 3179])\n", + "deriv_tensor (40, 81), indices tensor([2687, 2217, 1562, ..., 2888, 1742, 3179])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 2717, 803, ..., 2778, 840, 2349])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 2717, 803, ..., 2778, 840, 2349])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 2717, 803, ..., 2778, 840, 2349])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 2717, 803, ..., 2778, 840, 2349])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 1685, 2652, ..., 1358, 1636, 2032])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 1685, 2652, ..., 1358, 1636, 2032])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 1685, 2652, ..., 1358, 1636, 2032])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 1685, 2652, ..., 1358, 1636, 2032])\n", + "deriv_tensor (40, 81), indices tensor([ 722, 149, 1604, ..., 1178, 1068, 657])\n", + "deriv_tensor (40, 81), indices tensor([ 722, 149, 1604, ..., 1178, 1068, 657])\n", + "deriv_tensor (40, 81), indices tensor([ 722, 149, 1604, ..., 1178, 1068, 657])\n", + "deriv_tensor (40, 81), indices tensor([ 722, 149, 1604, ..., 1178, 1068, 657])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2902, 2530, ..., 24, 2818, 1195])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2902, 2530, ..., 24, 2818, 1195])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2902, 2530, ..., 24, 2818, 1195])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2902, 2530, ..., 24, 2818, 1195])\n", + "deriv_tensor (40, 81), indices tensor([ 417, 1717, 2025, ..., 1615, 2482, 2397])\n", + "deriv_tensor (40, 81), indices tensor([ 417, 1717, 2025, ..., 1615, 2482, 2397])\n", + "deriv_tensor (40, 81), indices tensor([ 417, 1717, 2025, ..., 1615, 2482, 2397])\n", + "deriv_tensor (40, 81), indices tensor([ 417, 1717, 2025, ..., 1615, 2482, 2397])\n", + "deriv_tensor (40, 81), indices tensor([2818, 321, 2258, ..., 532, 890, 35])\n", + "deriv_tensor (40, 81), indices tensor([2818, 321, 2258, ..., 532, 890, 35])\n", + "deriv_tensor (40, 81), indices tensor([2818, 321, 2258, ..., 532, 890, 35])\n", + "deriv_tensor (40, 81), indices tensor([2818, 321, 2258, ..., 532, 890, 35])\n", + "deriv_tensor (40, 81), indices tensor([2640, 1463, 3175, ..., 1191, 1749, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2640, 1463, 3175, ..., 1191, 1749, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2640, 1463, 3175, ..., 1191, 1749, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2640, 1463, 3175, ..., 1191, 1749, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2669, 590, 835, ..., 1995, 2650, 841])\n", + "deriv_tensor (40, 81), indices tensor([2669, 590, 835, ..., 1995, 2650, 841])\n", + "deriv_tensor (40, 81), indices tensor([2669, 590, 835, ..., 1995, 2650, 841])\n", + "deriv_tensor (40, 81), indices tensor([2669, 590, 835, ..., 1995, 2650, 841])\n", + "deriv_tensor (40, 81), indices tensor([2222, 834, 184, ..., 3079, 2309, 1555])\n", + "deriv_tensor (40, 81), indices tensor([2222, 834, 184, ..., 3079, 2309, 1555])\n", + "deriv_tensor (40, 81), indices tensor([2222, 834, 184, ..., 3079, 2309, 1555])\n", + "deriv_tensor (40, 81), indices tensor([2222, 834, 184, ..., 3079, 2309, 1555])\n", + "deriv_tensor (40, 81), indices tensor([1916, 2784, 1566, ..., 2968, 426, 2484])\n", + "deriv_tensor (40, 81), indices tensor([1916, 2784, 1566, ..., 2968, 426, 2484])\n", + "deriv_tensor (40, 81), indices tensor([1916, 2784, 1566, ..., 2968, 426, 2484])\n", + "deriv_tensor (40, 81), indices tensor([1916, 2784, 1566, ..., 2968, 426, 2484])\n", + "deriv_tensor (40, 81), indices tensor([1164, 1853, 2882, ..., 2017, 1432, 2048])\n", + "deriv_tensor (40, 81), indices tensor([1164, 1853, 2882, ..., 2017, 1432, 2048])\n", + "deriv_tensor (40, 81), indices tensor([1164, 1853, 2882, ..., 2017, 1432, 2048])\n", + "deriv_tensor (40, 81), indices tensor([1164, 1853, 2882, ..., 2017, 1432, 2048])\n", + "deriv_tensor (40, 81), indices tensor([1424, 1406, 3099, ..., 1111, 2005, 2109])\n", + "deriv_tensor (40, 81), indices tensor([1424, 1406, 3099, ..., 1111, 2005, 2109])\n", + "deriv_tensor (40, 81), indices tensor([1424, 1406, 3099, ..., 1111, 2005, 2109])\n", + "deriv_tensor (40, 81), indices tensor([1424, 1406, 3099, ..., 1111, 2005, 2109])\n", + "deriv_tensor (40, 81), indices tensor([ 955, 855, 460, ..., 1205, 61, 1143])\n", + "deriv_tensor (40, 81), indices tensor([ 955, 855, 460, ..., 1205, 61, 1143])\n", + "deriv_tensor (40, 81), indices tensor([ 955, 855, 460, ..., 1205, 61, 1143])\n", + "deriv_tensor (40, 81), indices tensor([ 955, 855, 460, ..., 1205, 61, 1143])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2519, 1886, ..., 2948, 1365, 2516])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2519, 1886, ..., 2948, 1365, 2516])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2519, 1886, ..., 2948, 1365, 2516])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2519, 1886, ..., 2948, 1365, 2516])\n", + "deriv_tensor (40, 81), indices tensor([ 817, 2505, 908, ..., 281, 2384, 1565])\n", + "deriv_tensor (40, 81), indices tensor([ 817, 2505, 908, ..., 281, 2384, 1565])\n", + "deriv_tensor (40, 81), indices tensor([ 817, 2505, 908, ..., 281, 2384, 1565])\n", + "deriv_tensor (40, 81), indices tensor([ 817, 2505, 908, ..., 281, 2384, 1565])\n", + "deriv_tensor (40, 81), indices tensor([1372, 138, 3215, ..., 1258, 420, 2840])\n", + "deriv_tensor (40, 81), indices tensor([1372, 138, 3215, ..., 1258, 420, 2840])\n", + "deriv_tensor (40, 81), indices tensor([1372, 138, 3215, ..., 1258, 420, 2840])\n", + "deriv_tensor (40, 81), indices tensor([1372, 138, 3215, ..., 1258, 420, 2840])\n", + "deriv_tensor (40, 81), indices tensor([ 52, 826, 1765, ..., 2658, 2973, 425])\n", + "deriv_tensor (40, 81), indices tensor([ 52, 826, 1765, ..., 2658, 2973, 425])\n", + "deriv_tensor (40, 81), indices tensor([ 52, 826, 1765, ..., 2658, 2973, 425])\n", + "deriv_tensor (40, 81), indices tensor([ 52, 826, 1765, ..., 2658, 2973, 425])\n", + "deriv_tensor (40, 81), indices tensor([1951, 1212, 2580, ..., 508, 1030, 2913])\n", + "deriv_tensor (40, 81), indices tensor([1951, 1212, 2580, ..., 508, 1030, 2913])\n", + "deriv_tensor (40, 81), indices tensor([1951, 1212, 2580, ..., 508, 1030, 2913])\n", + "deriv_tensor (40, 81), indices tensor([1951, 1212, 2580, ..., 508, 1030, 2913])\n", + "deriv_tensor (40, 81), indices tensor([3066, 2780, 3173, ..., 667, 2225, 1151])\n", + "deriv_tensor (40, 81), indices tensor([3066, 2780, 3173, ..., 667, 2225, 1151])\n", + "deriv_tensor (40, 81), indices tensor([3066, 2780, 3173, ..., 667, 2225, 1151])\n", + "deriv_tensor (40, 81), indices tensor([3066, 2780, 3173, ..., 667, 2225, 1151])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 3110, 2694, ..., 1054, 2727, 2387])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 3110, 2694, ..., 1054, 2727, 2387])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 3110, 2694, ..., 1054, 2727, 2387])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 3110, 2694, ..., 1054, 2727, 2387])\n", + "deriv_tensor (40, 81), indices tensor([1175, 1038, 2706, ..., 1168, 1367, 547])\n", + "deriv_tensor (40, 81), indices tensor([1175, 1038, 2706, ..., 1168, 1367, 547])\n", + "deriv_tensor (40, 81), indices tensor([1175, 1038, 2706, ..., 1168, 1367, 547])\n", + "deriv_tensor (40, 81), indices tensor([1175, 1038, 2706, ..., 1168, 1367, 547])\n", + "deriv_tensor (40, 81), indices tensor([ 299, 2517, 1356, ..., 1138, 1776, 304])\n", + "deriv_tensor (40, 81), indices tensor([ 299, 2517, 1356, ..., 1138, 1776, 304])\n", + "deriv_tensor (40, 81), indices tensor([ 299, 2517, 1356, ..., 1138, 1776, 304])\n", + "deriv_tensor (40, 81), indices tensor([ 299, 2517, 1356, ..., 1138, 1776, 304])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 2807, 1918, ..., 976, 1432, 328])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 2807, 1918, ..., 976, 1432, 328])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 2807, 1918, ..., 976, 1432, 328])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 2807, 1918, ..., 976, 1432, 328])\n", + "deriv_tensor (40, 81), indices tensor([2342, 49, 2332, ..., 254, 206, 2857])\n", + "deriv_tensor (40, 81), indices tensor([2342, 49, 2332, ..., 254, 206, 2857])\n", + "deriv_tensor (40, 81), indices tensor([2342, 49, 2332, ..., 254, 206, 2857])\n", + "deriv_tensor (40, 81), indices tensor([2342, 49, 2332, ..., 254, 206, 2857])\n", + "deriv_tensor (40, 81), indices tensor([2031, 2636, 1424, ..., 402, 1352, 2694])\n", + "deriv_tensor (40, 81), indices tensor([2031, 2636, 1424, ..., 402, 1352, 2694])\n", + "deriv_tensor (40, 81), indices tensor([2031, 2636, 1424, ..., 402, 1352, 2694])\n", + "deriv_tensor (40, 81), indices tensor([2031, 2636, 1424, ..., 402, 1352, 2694])\n", + "deriv_tensor (40, 81), indices tensor([ 248, 2700, 1977, ..., 540, 1794, 316])\n", + "deriv_tensor (40, 81), indices tensor([ 248, 2700, 1977, ..., 540, 1794, 316])\n", + "deriv_tensor (40, 81), indices tensor([ 248, 2700, 1977, ..., 540, 1794, 316])\n", + "deriv_tensor (40, 81), indices tensor([ 248, 2700, 1977, ..., 540, 1794, 316])\n", + "deriv_tensor (40, 81), indices tensor([1407, 2905, 1711, ..., 675, 1578, 479])\n", + "deriv_tensor (40, 81), indices tensor([1407, 2905, 1711, ..., 675, 1578, 479])\n", + "deriv_tensor (40, 81), indices tensor([1407, 2905, 1711, ..., 675, 1578, 479])\n", + "deriv_tensor (40, 81), indices tensor([1407, 2905, 1711, ..., 675, 1578, 479])\n", + "deriv_tensor (40, 81), indices tensor([2425, 2845, 2909, ..., 246, 118, 1798])\n", + "deriv_tensor (40, 81), indices tensor([2425, 2845, 2909, ..., 246, 118, 1798])\n", + "deriv_tensor (40, 81), indices tensor([2425, 2845, 2909, ..., 246, 118, 1798])\n", + "deriv_tensor (40, 81), indices tensor([2425, 2845, 2909, ..., 246, 118, 1798])\n", + "deriv_tensor (40, 81), indices tensor([1832, 488, 763, ..., 235, 1573, 392])\n", + "deriv_tensor (40, 81), indices tensor([1832, 488, 763, ..., 235, 1573, 392])\n", + "deriv_tensor (40, 81), indices tensor([1832, 488, 763, ..., 235, 1573, 392])\n", + "deriv_tensor (40, 81), indices tensor([1832, 488, 763, ..., 235, 1573, 392])\n", + "deriv_tensor (40, 81), indices tensor([1478, 3010, 2998, ..., 517, 2283, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1478, 3010, 2998, ..., 517, 2283, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1478, 3010, 2998, ..., 517, 2283, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1478, 3010, 2998, ..., 517, 2283, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1660, 2839, 2769, ..., 3115, 1441, 3077])\n", + "deriv_tensor (40, 81), indices tensor([1660, 2839, 2769, ..., 3115, 1441, 3077])\n", + "deriv_tensor (40, 81), indices tensor([1660, 2839, 2769, ..., 3115, 1441, 3077])\n", + "deriv_tensor (40, 81), indices tensor([1660, 2839, 2769, ..., 3115, 1441, 3077])\n", + "deriv_tensor (40, 81), indices tensor([ 934, 383, 904, ..., 2356, 2710, 1421])\n", + "deriv_tensor (40, 81), indices tensor([ 934, 383, 904, ..., 2356, 2710, 1421])\n", + "deriv_tensor (40, 81), indices tensor([ 934, 383, 904, ..., 2356, 2710, 1421])\n", + "deriv_tensor (40, 81), indices tensor([ 934, 383, 904, ..., 2356, 2710, 1421])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2574, 1310, ..., 146, 346, 733])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2574, 1310, ..., 146, 346, 733])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2574, 1310, ..., 146, 346, 733])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2574, 1310, ..., 146, 346, 733])\n", + "deriv_tensor (40, 81), indices tensor([2426, 588, 1499, ..., 525, 566, 474])\n", + "deriv_tensor (40, 81), indices tensor([2426, 588, 1499, ..., 525, 566, 474])\n", + "deriv_tensor (40, 81), indices tensor([2426, 588, 1499, ..., 525, 566, 474])\n", + "deriv_tensor (40, 81), indices tensor([2426, 588, 1499, ..., 525, 566, 474])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1293, 8, ..., 1073, 2520, 3052])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1293, 8, ..., 1073, 2520, 3052])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1293, 8, ..., 1073, 2520, 3052])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1293, 8, ..., 1073, 2520, 3052])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 2385, 1635, ..., 3119, 666, 1271])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 2385, 1635, ..., 3119, 666, 1271])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 2385, 1635, ..., 3119, 666, 1271])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 2385, 1635, ..., 3119, 666, 1271])\n", + "deriv_tensor (40, 81), indices tensor([ 775, 1172, 1089, ..., 2133, 398, 2746])\n", + "deriv_tensor (40, 81), indices tensor([ 775, 1172, 1089, ..., 2133, 398, 2746])\n", + "deriv_tensor (40, 81), indices tensor([ 775, 1172, 1089, ..., 2133, 398, 2746])\n", + "deriv_tensor (40, 81), indices tensor([ 775, 1172, 1089, ..., 2133, 398, 2746])\n", + "deriv_tensor (40, 81), indices tensor([ 135, 1920, 1903, ..., 1378, 2543, 237])\n", + "deriv_tensor (40, 81), indices tensor([ 135, 1920, 1903, ..., 1378, 2543, 237])\n", + "deriv_tensor (40, 81), indices tensor([ 135, 1920, 1903, ..., 1378, 2543, 237])\n", + "deriv_tensor (40, 81), indices tensor([ 135, 1920, 1903, ..., 1378, 2543, 237])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2131, 1985, ..., 2949, 3012, 1362])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2131, 1985, ..., 2949, 3012, 1362])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2131, 1985, ..., 2949, 3012, 1362])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2131, 1985, ..., 2949, 3012, 1362])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 488, 465, ..., 3073, 687, 412])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 488, 465, ..., 3073, 687, 412])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 488, 465, ..., 3073, 687, 412])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 488, 465, ..., 3073, 687, 412])\n", + "deriv_tensor (40, 81), indices tensor([2407, 2149, 288, ..., 1800, 1093, 1413])\n", + "deriv_tensor (40, 81), indices tensor([2407, 2149, 288, ..., 1800, 1093, 1413])\n", + "deriv_tensor (40, 81), indices tensor([2407, 2149, 288, ..., 1800, 1093, 1413])\n", + "deriv_tensor (40, 81), indices tensor([2407, 2149, 288, ..., 1800, 1093, 1413])\n", + "deriv_tensor (40, 81), indices tensor([1328, 2766, 1958, ..., 2566, 460, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1328, 2766, 1958, ..., 2566, 460, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1328, 2766, 1958, ..., 2566, 460, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1328, 2766, 1958, ..., 2566, 460, 2035])\n", + "deriv_tensor (40, 81), indices tensor([2923, 2235, 1559, ..., 1045, 1498, 1459])\n", + "deriv_tensor (40, 81), indices tensor([2923, 2235, 1559, ..., 1045, 1498, 1459])\n", + "deriv_tensor (40, 81), indices tensor([2923, 2235, 1559, ..., 1045, 1498, 1459])\n", + "deriv_tensor (40, 81), indices tensor([2923, 2235, 1559, ..., 1045, 1498, 1459])\n", + "deriv_tensor (40, 81), indices tensor([ 205, 533, 3198, ..., 2064, 1758, 274])\n", + "deriv_tensor (40, 81), indices tensor([ 205, 533, 3198, ..., 2064, 1758, 274])\n", + "deriv_tensor (40, 81), indices tensor([ 205, 533, 3198, ..., 2064, 1758, 274])\n", + "deriv_tensor (40, 81), indices tensor([ 205, 533, 3198, ..., 2064, 1758, 274])\n", + "deriv_tensor (40, 81), indices tensor([1962, 3131, 2749, ..., 3012, 2512, 1956])\n", + "deriv_tensor (40, 81), indices tensor([1962, 3131, 2749, ..., 3012, 2512, 1956])\n", + "deriv_tensor (40, 81), indices tensor([1962, 3131, 2749, ..., 3012, 2512, 1956])\n", + "deriv_tensor (40, 81), indices tensor([1962, 3131, 2749, ..., 3012, 2512, 1956])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 13, 730, ..., 498, 1333, 289])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 13, 730, ..., 498, 1333, 289])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 13, 730, ..., 498, 1333, 289])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 13, 730, ..., 498, 1333, 289])\n", + "deriv_tensor (40, 81), indices tensor([2446, 1147, 65, ..., 1530, 3010, 2560])\n", + "deriv_tensor (40, 81), indices tensor([2446, 1147, 65, ..., 1530, 3010, 2560])\n", + "deriv_tensor (40, 81), indices tensor([2446, 1147, 65, ..., 1530, 3010, 2560])\n", + "deriv_tensor (40, 81), indices tensor([2446, 1147, 65, ..., 1530, 3010, 2560])\n", + "deriv_tensor (40, 81), indices tensor([2687, 1665, 1858, ..., 678, 454, 1441])\n", + "deriv_tensor (40, 81), indices tensor([2687, 1665, 1858, ..., 678, 454, 1441])\n", + "deriv_tensor (40, 81), indices tensor([2687, 1665, 1858, ..., 678, 454, 1441])\n", + "deriv_tensor (40, 81), indices tensor([2687, 1665, 1858, ..., 678, 454, 1441])\n", + "deriv_tensor (40, 81), indices tensor([2744, 1886, 2368, ..., 3043, 905, 493])\n", + "deriv_tensor (40, 81), indices tensor([2744, 1886, 2368, ..., 3043, 905, 493])\n", + "deriv_tensor (40, 81), indices tensor([2744, 1886, 2368, ..., 3043, 905, 493])\n", + "deriv_tensor (40, 81), indices tensor([2744, 1886, 2368, ..., 3043, 905, 493])\n", + "deriv_tensor (40, 81), indices tensor([2563, 237, 2265, ..., 1326, 1861, 1352])\n", + "deriv_tensor (40, 81), indices tensor([2563, 237, 2265, ..., 1326, 1861, 1352])\n", + "deriv_tensor (40, 81), indices tensor([2563, 237, 2265, ..., 1326, 1861, 1352])\n", + "deriv_tensor (40, 81), indices tensor([2563, 237, 2265, ..., 1326, 1861, 1352])\n", + "deriv_tensor (40, 81), indices tensor([2018, 5, 129, ..., 104, 2271, 1889])\n", + "deriv_tensor (40, 81), indices tensor([2018, 5, 129, ..., 104, 2271, 1889])\n", + "deriv_tensor (40, 81), indices tensor([2018, 5, 129, ..., 104, 2271, 1889])\n", + "deriv_tensor (40, 81), indices tensor([2018, 5, 129, ..., 104, 2271, 1889])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1939, 1027, ..., 2027, 3206, 629])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1939, 1027, ..., 2027, 3206, 629])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1939, 1027, ..., 2027, 3206, 629])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1939, 1027, ..., 2027, 3206, 629])\n", + "deriv_tensor (40, 81), indices tensor([2352, 2243, 432, ..., 1692, 1800, 1700])\n", + "deriv_tensor (40, 81), indices tensor([2352, 2243, 432, ..., 1692, 1800, 1700])\n", + "deriv_tensor (40, 81), indices tensor([2352, 2243, 432, ..., 1692, 1800, 1700])\n", + "deriv_tensor (40, 81), indices tensor([2352, 2243, 432, ..., 1692, 1800, 1700])\n", + "deriv_tensor (40, 81), indices tensor([1157, 834, 3032, ..., 2039, 2343, 1154])\n", + "deriv_tensor (40, 81), indices tensor([1157, 834, 3032, ..., 2039, 2343, 1154])\n", + "deriv_tensor (40, 81), indices tensor([1157, 834, 3032, ..., 2039, 2343, 1154])\n", + "deriv_tensor (40, 81), indices tensor([1157, 834, 3032, ..., 2039, 2343, 1154])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2207, 1429, ..., 2190, 228, 1867])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2207, 1429, ..., 2190, 228, 1867])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2207, 1429, ..., 2190, 228, 1867])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2207, 1429, ..., 2190, 228, 1867])\n", + "deriv_tensor (40, 81), indices tensor([1382, 1739, 227, ..., 1715, 1286, 527])\n", + "deriv_tensor (40, 81), indices tensor([1382, 1739, 227, ..., 1715, 1286, 527])\n", + "deriv_tensor (40, 81), indices tensor([1382, 1739, 227, ..., 1715, 1286, 527])\n", + "deriv_tensor (40, 81), indices tensor([1382, 1739, 227, ..., 1715, 1286, 527])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1191, 2128, ..., 1415, 2774, 2413])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1191, 2128, ..., 1415, 2774, 2413])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1191, 2128, ..., 1415, 2774, 2413])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1191, 2128, ..., 1415, 2774, 2413])\n", + "deriv_tensor (40, 81), indices tensor([ 599, 1395, 1462, ..., 1909, 325, 2717])\n", + "deriv_tensor (40, 81), indices tensor([ 599, 1395, 1462, ..., 1909, 325, 2717])\n", + "deriv_tensor (40, 81), indices tensor([ 599, 1395, 1462, ..., 1909, 325, 2717])\n", + "deriv_tensor (40, 81), indices tensor([ 599, 1395, 1462, ..., 1909, 325, 2717])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 1746, 868, ..., 2559, 2862, 1522])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 1746, 868, ..., 2559, 2862, 1522])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 1746, 868, ..., 2559, 2862, 1522])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 1746, 868, ..., 2559, 2862, 1522])\n", + "deriv_tensor (40, 81), indices tensor([1886, 2295, 1716, ..., 386, 1309, 2487])\n", + "deriv_tensor (40, 81), indices tensor([1886, 2295, 1716, ..., 386, 1309, 2487])\n", + "deriv_tensor (40, 81), indices tensor([1886, 2295, 1716, ..., 386, 1309, 2487])\n", + "deriv_tensor (40, 81), indices tensor([1886, 2295, 1716, ..., 386, 1309, 2487])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 1478, 2621, ..., 97, 1567, 844])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 1478, 2621, ..., 97, 1567, 844])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 1478, 2621, ..., 97, 1567, 844])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 1478, 2621, ..., 97, 1567, 844])\n", + "deriv_tensor (40, 81), indices tensor([1127, 2486, 491, ..., 893, 511, 1627])\n", + "deriv_tensor (40, 81), indices tensor([1127, 2486, 491, ..., 893, 511, 1627])\n", + "deriv_tensor (40, 81), indices tensor([1127, 2486, 491, ..., 893, 511, 1627])\n", + "deriv_tensor (40, 81), indices tensor([1127, 2486, 491, ..., 893, 511, 1627])\n", + "deriv_tensor (40, 81), indices tensor([1357, 3205, 965, ..., 2584, 2577, 2440])\n", + "deriv_tensor (40, 81), indices tensor([1357, 3205, 965, ..., 2584, 2577, 2440])\n", + "deriv_tensor (40, 81), indices tensor([1357, 3205, 965, ..., 2584, 2577, 2440])\n", + "deriv_tensor (40, 81), indices tensor([1357, 3205, 965, ..., 2584, 2577, 2440])\n", + "deriv_tensor (40, 81), indices tensor([2500, 2452, 3019, ..., 632, 2326, 304])\n", + "deriv_tensor (40, 81), indices tensor([2500, 2452, 3019, ..., 632, 2326, 304])\n", + "deriv_tensor (40, 81), indices tensor([2500, 2452, 3019, ..., 632, 2326, 304])\n", + "deriv_tensor (40, 81), indices tensor([2500, 2452, 3019, ..., 632, 2326, 304])\n", + "deriv_tensor (40, 81), indices tensor([2157, 642, 3076, ..., 3204, 626, 1793])\n", + "deriv_tensor (40, 81), indices tensor([2157, 642, 3076, ..., 3204, 626, 1793])\n", + "deriv_tensor (40, 81), indices tensor([2157, 642, 3076, ..., 3204, 626, 1793])\n", + "deriv_tensor (40, 81), indices tensor([2157, 642, 3076, ..., 3204, 626, 1793])\n", + "deriv_tensor (40, 81), indices tensor([ 380, 1621, 858, ..., 2241, 1594, 1517])\n", + "deriv_tensor (40, 81), indices tensor([ 380, 1621, 858, ..., 2241, 1594, 1517])\n", + "deriv_tensor (40, 81), indices tensor([ 380, 1621, 858, ..., 2241, 1594, 1517])\n", + "deriv_tensor (40, 81), indices tensor([ 380, 1621, 858, ..., 2241, 1594, 1517])\n", + "deriv_tensor (40, 81), indices tensor([1177, 2923, 2255, ..., 1706, 1617, 973])\n", + "deriv_tensor (40, 81), indices tensor([1177, 2923, 2255, ..., 1706, 1617, 973])\n", + "deriv_tensor (40, 81), indices tensor([1177, 2923, 2255, ..., 1706, 1617, 973])\n", + "deriv_tensor (40, 81), indices tensor([1177, 2923, 2255, ..., 1706, 1617, 973])\n", + "deriv_tensor (40, 81), indices tensor([2669, 3073, 697, ..., 1532, 816, 1961])\n", + "deriv_tensor (40, 81), indices tensor([2669, 3073, 697, ..., 1532, 816, 1961])\n", + "deriv_tensor (40, 81), indices tensor([2669, 3073, 697, ..., 1532, 816, 1961])\n", + "deriv_tensor (40, 81), indices tensor([2669, 3073, 697, ..., 1532, 816, 1961])\n", + "deriv_tensor (40, 81), indices tensor([2285, 225, 2859, ..., 1480, 1423, 1388])\n", + "deriv_tensor (40, 81), indices tensor([2285, 225, 2859, ..., 1480, 1423, 1388])\n", + "deriv_tensor (40, 81), indices tensor([2285, 225, 2859, ..., 1480, 1423, 1388])\n", + "deriv_tensor (40, 81), indices tensor([2285, 225, 2859, ..., 1480, 1423, 1388])\n", + "deriv_tensor (40, 81), indices tensor([2930, 948, 2933, ..., 858, 1505, 1026])\n", + "deriv_tensor (40, 81), indices tensor([2930, 948, 2933, ..., 858, 1505, 1026])\n", + "deriv_tensor (40, 81), indices tensor([2930, 948, 2933, ..., 858, 1505, 1026])\n", + "deriv_tensor (40, 81), indices tensor([2930, 948, 2933, ..., 858, 1505, 1026])\n", + "deriv_tensor (40, 81), indices tensor([2211, 2768, 1917, ..., 2518, 1250, 2052])\n", + "deriv_tensor (40, 81), indices tensor([2211, 2768, 1917, ..., 2518, 1250, 2052])\n", + "deriv_tensor (40, 81), indices tensor([2211, 2768, 1917, ..., 2518, 1250, 2052])\n", + "deriv_tensor (40, 81), indices tensor([2211, 2768, 1917, ..., 2518, 1250, 2052])\n", + "deriv_tensor (40, 81), indices tensor([2258, 2989, 1339, ..., 1304, 2415, 2886])\n", + "deriv_tensor (40, 81), indices tensor([2258, 2989, 1339, ..., 1304, 2415, 2886])\n", + "deriv_tensor (40, 81), indices tensor([2258, 2989, 1339, ..., 1304, 2415, 2886])\n", + "deriv_tensor (40, 81), indices tensor([2258, 2989, 1339, ..., 1304, 2415, 2886])\n", + "deriv_tensor (40, 81), indices tensor([1955, 400, 94, ..., 17, 2893, 586])\n", + "deriv_tensor (40, 81), indices tensor([1955, 400, 94, ..., 17, 2893, 586])\n", + "deriv_tensor (40, 81), indices tensor([1955, 400, 94, ..., 17, 2893, 586])\n", + "deriv_tensor (40, 81), indices tensor([1955, 400, 94, ..., 17, 2893, 586])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2529, 2578, ..., 1441, 2853, 864])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2529, 2578, ..., 1441, 2853, 864])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2529, 2578, ..., 1441, 2853, 864])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2529, 2578, ..., 1441, 2853, 864])\n", + "deriv_tensor (40, 81), indices tensor([2544, 2072, 2786, ..., 1377, 2375, 133])\n", + "deriv_tensor (40, 81), indices tensor([2544, 2072, 2786, ..., 1377, 2375, 133])\n", + "deriv_tensor (40, 81), indices tensor([2544, 2072, 2786, ..., 1377, 2375, 133])\n", + "deriv_tensor (40, 81), indices tensor([2544, 2072, 2786, ..., 1377, 2375, 133])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 1031, 1552, ..., 1345, 3022, 1570])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 1031, 1552, ..., 1345, 3022, 1570])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 1031, 1552, ..., 1345, 3022, 1570])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 1031, 1552, ..., 1345, 3022, 1570])\n", + "deriv_tensor (40, 81), indices tensor([2132, 2097, 2531, ..., 184, 350, 506])\n", + "deriv_tensor (40, 81), indices tensor([2132, 2097, 2531, ..., 184, 350, 506])\n", + "deriv_tensor (40, 81), indices tensor([2132, 2097, 2531, ..., 184, 350, 506])\n", + "deriv_tensor (40, 81), indices tensor([2132, 2097, 2531, ..., 184, 350, 506])\n", + "deriv_tensor (40, 81), indices tensor([1938, 7, 521, ..., 2214, 2561, 3207])\n", + "deriv_tensor (40, 81), indices tensor([1938, 7, 521, ..., 2214, 2561, 3207])\n", + "deriv_tensor (40, 81), indices tensor([1938, 7, 521, ..., 2214, 2561, 3207])\n", + "deriv_tensor (40, 81), indices tensor([1938, 7, 521, ..., 2214, 2561, 3207])\n", + "deriv_tensor (40, 81), indices tensor([1302, 1410, 485, ..., 248, 621, 1901])\n", + "deriv_tensor (40, 81), indices tensor([1302, 1410, 485, ..., 248, 621, 1901])\n", + "deriv_tensor (40, 81), indices tensor([1302, 1410, 485, ..., 248, 621, 1901])\n", + "deriv_tensor (40, 81), indices tensor([1302, 1410, 485, ..., 248, 621, 1901])\n", + "deriv_tensor (40, 81), indices tensor([1167, 1234, 1716, ..., 2843, 183, 1629])\n", + "deriv_tensor (40, 81), indices tensor([1167, 1234, 1716, ..., 2843, 183, 1629])\n", + "deriv_tensor (40, 81), indices tensor([1167, 1234, 1716, ..., 2843, 183, 1629])\n", + "deriv_tensor (40, 81), indices tensor([1167, 1234, 1716, ..., 2843, 183, 1629])\n", + "deriv_tensor (40, 81), indices tensor([2959, 1149, 2404, ..., 819, 2337, 2252])\n", + "deriv_tensor (40, 81), indices tensor([2959, 1149, 2404, ..., 819, 2337, 2252])\n", + "deriv_tensor (40, 81), indices tensor([2959, 1149, 2404, ..., 819, 2337, 2252])\n", + "deriv_tensor (40, 81), indices tensor([2959, 1149, 2404, ..., 819, 2337, 2252])\n", + "deriv_tensor (40, 81), indices tensor([2591, 2416, 1119, ..., 3090, 1605, 2975])\n", + "deriv_tensor (40, 81), indices tensor([2591, 2416, 1119, ..., 3090, 1605, 2975])\n", + "deriv_tensor (40, 81), indices tensor([2591, 2416, 1119, ..., 3090, 1605, 2975])\n", + "deriv_tensor (40, 81), indices tensor([2591, 2416, 1119, ..., 3090, 1605, 2975])\n", + "deriv_tensor (40, 81), indices tensor([ 56, 1708, 1594, ..., 2438, 2371, 1831])\n", + "deriv_tensor (40, 81), indices tensor([ 56, 1708, 1594, ..., 2438, 2371, 1831])\n", + "deriv_tensor (40, 81), indices tensor([ 56, 1708, 1594, ..., 2438, 2371, 1831])\n", + "deriv_tensor (40, 81), indices tensor([ 56, 1708, 1594, ..., 2438, 2371, 1831])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1464, 1722, ..., 1861, 1661, 3094])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1464, 1722, ..., 1861, 1661, 3094])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1464, 1722, ..., 1861, 1661, 3094])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1464, 1722, ..., 1861, 1661, 3094])\n", + "deriv_tensor (40, 81), indices tensor([2954, 3069, 2069, ..., 2981, 81, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2954, 3069, 2069, ..., 2981, 81, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2954, 3069, 2069, ..., 2981, 81, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2954, 3069, 2069, ..., 2981, 81, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2661, 2082, 2616, ..., 2709, 2214, 785])\n", + "deriv_tensor (40, 81), indices tensor([2661, 2082, 2616, ..., 2709, 2214, 785])\n", + "deriv_tensor (40, 81), indices tensor([2661, 2082, 2616, ..., 2709, 2214, 785])\n", + "deriv_tensor (40, 81), indices tensor([2661, 2082, 2616, ..., 2709, 2214, 785])\n", + "deriv_tensor (40, 81), indices tensor([2335, 438, 2410, ..., 1657, 1910, 2552])\n", + "deriv_tensor (40, 81), indices tensor([2335, 438, 2410, ..., 1657, 1910, 2552])\n", + "deriv_tensor (40, 81), indices tensor([2335, 438, 2410, ..., 1657, 1910, 2552])\n", + "deriv_tensor (40, 81), indices tensor([2335, 438, 2410, ..., 1657, 1910, 2552])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 3067, 3215, ..., 3134, 2523, 653])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 3067, 3215, ..., 3134, 2523, 653])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 3067, 3215, ..., 3134, 2523, 653])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 3067, 3215, ..., 3134, 2523, 653])\n", + "deriv_tensor (40, 81), indices tensor([2721, 235, 543, ..., 1240, 2849, 3164])\n", + "deriv_tensor (40, 81), indices tensor([2721, 235, 543, ..., 1240, 2849, 3164])\n", + "deriv_tensor (40, 81), indices tensor([2721, 235, 543, ..., 1240, 2849, 3164])\n", + "deriv_tensor (40, 81), indices tensor([2721, 235, 543, ..., 1240, 2849, 3164])\n", + "deriv_tensor (40, 81), indices tensor([2006, 958, 2468, ..., 2616, 918, 2517])\n", + "deriv_tensor (40, 81), indices tensor([2006, 958, 2468, ..., 2616, 918, 2517])\n", + "deriv_tensor (40, 81), indices tensor([2006, 958, 2468, ..., 2616, 918, 2517])\n", + "deriv_tensor (40, 81), indices tensor([2006, 958, 2468, ..., 2616, 918, 2517])\n", + "deriv_tensor (40, 81), indices tensor([ 647, 1275, 742, ..., 877, 328, 3091])\n", + "deriv_tensor (40, 81), indices tensor([ 647, 1275, 742, ..., 877, 328, 3091])\n", + "deriv_tensor (40, 81), indices tensor([ 647, 1275, 742, ..., 877, 328, 3091])\n", + "deriv_tensor (40, 81), indices tensor([ 647, 1275, 742, ..., 877, 328, 3091])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2146, 1310, ..., 2637, 2386, 2046])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2146, 1310, ..., 2637, 2386, 2046])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2146, 1310, ..., 2637, 2386, 2046])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2146, 1310, ..., 2637, 2386, 2046])\n", + "deriv_tensor (40, 81), indices tensor([1796, 2112, 1183, ..., 762, 1506, 972])\n", + "deriv_tensor (40, 81), indices tensor([1796, 2112, 1183, ..., 762, 1506, 972])\n", + "deriv_tensor (40, 81), indices tensor([1796, 2112, 1183, ..., 762, 1506, 972])\n", + "deriv_tensor (40, 81), indices tensor([1796, 2112, 1183, ..., 762, 1506, 972])\n", + "deriv_tensor (40, 81), indices tensor([ 664, 1353, 509, ..., 328, 156, 496])\n", + "deriv_tensor (40, 81), indices tensor([ 664, 1353, 509, ..., 328, 156, 496])\n", + "deriv_tensor (40, 81), indices tensor([ 664, 1353, 509, ..., 328, 156, 496])\n", + "deriv_tensor (40, 81), indices tensor([ 664, 1353, 509, ..., 328, 156, 496])\n", + "deriv_tensor (40, 81), indices tensor([2070, 945, 1536, ..., 3236, 1458, 804])\n", + "deriv_tensor (40, 81), indices tensor([2070, 945, 1536, ..., 3236, 1458, 804])\n", + "deriv_tensor (40, 81), indices tensor([2070, 945, 1536, ..., 3236, 1458, 804])\n", + "deriv_tensor (40, 81), indices tensor([2070, 945, 1536, ..., 3236, 1458, 804])\n", + "deriv_tensor (40, 81), indices tensor([1977, 2960, 3167, ..., 3098, 29, 2545])\n", + "deriv_tensor (40, 81), indices tensor([1977, 2960, 3167, ..., 3098, 29, 2545])\n", + "deriv_tensor (40, 81), indices tensor([1977, 2960, 3167, ..., 3098, 29, 2545])\n", + "deriv_tensor (40, 81), indices tensor([1977, 2960, 3167, ..., 3098, 29, 2545])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2543, 2246, ..., 1699, 2892, 2629])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2543, 2246, ..., 1699, 2892, 2629])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2543, 2246, ..., 1699, 2892, 2629])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2543, 2246, ..., 1699, 2892, 2629])\n", + "deriv_tensor (40, 81), indices tensor([1889, 2696, 1567, ..., 517, 2844, 768])\n", + "deriv_tensor (40, 81), indices tensor([1889, 2696, 1567, ..., 517, 2844, 768])\n", + "deriv_tensor (40, 81), indices tensor([1889, 2696, 1567, ..., 517, 2844, 768])\n", + "deriv_tensor (40, 81), indices tensor([1889, 2696, 1567, ..., 517, 2844, 768])\n", + "deriv_tensor (40, 81), indices tensor([1630, 327, 1337, ..., 587, 2874, 2337])\n", + "deriv_tensor (40, 81), indices tensor([1630, 327, 1337, ..., 587, 2874, 2337])\n", + "deriv_tensor (40, 81), indices tensor([1630, 327, 1337, ..., 587, 2874, 2337])\n", + "deriv_tensor (40, 81), indices tensor([1630, 327, 1337, ..., 587, 2874, 2337])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 486, 927, ..., 2438, 521, 3056])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 486, 927, ..., 2438, 521, 3056])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 486, 927, ..., 2438, 521, 3056])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 486, 927, ..., 2438, 521, 3056])\n", + "deriv_tensor (40, 81), indices tensor([1557, 36, 3167, ..., 1148, 3014, 1573])\n", + "deriv_tensor (40, 81), indices tensor([1557, 36, 3167, ..., 1148, 3014, 1573])\n", + "deriv_tensor (40, 81), indices tensor([1557, 36, 3167, ..., 1148, 3014, 1573])\n", + "deriv_tensor (40, 81), indices tensor([1557, 36, 3167, ..., 1148, 3014, 1573])\n", + "deriv_tensor (40, 81), indices tensor([1118, 604, 1963, ..., 2344, 3213, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1118, 604, 1963, ..., 2344, 3213, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1118, 604, 1963, ..., 2344, 3213, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1118, 604, 1963, ..., 2344, 3213, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1266, 3236, 2380, ..., 3168, 782, 1000])\n", + "deriv_tensor (40, 81), indices tensor([1266, 3236, 2380, ..., 3168, 782, 1000])\n", + "deriv_tensor (40, 81), indices tensor([1266, 3236, 2380, ..., 3168, 782, 1000])\n", + "deriv_tensor (40, 81), indices tensor([1266, 3236, 2380, ..., 3168, 782, 1000])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1363, 1903, ..., 439, 850, 2026])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1363, 1903, ..., 439, 850, 2026])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1363, 1903, ..., 439, 850, 2026])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1363, 1903, ..., 439, 850, 2026])\n", + "deriv_tensor (40, 81), indices tensor([2551, 3169, 2619, ..., 657, 1413, 1069])\n", + "deriv_tensor (40, 81), indices tensor([2551, 3169, 2619, ..., 657, 1413, 1069])\n", + "deriv_tensor (40, 81), indices tensor([2551, 3169, 2619, ..., 657, 1413, 1069])\n", + "deriv_tensor (40, 81), indices tensor([2551, 3169, 2619, ..., 657, 1413, 1069])\n", + "deriv_tensor (40, 81), indices tensor([2886, 2938, 248, ..., 758, 532, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2886, 2938, 248, ..., 758, 532, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2886, 2938, 248, ..., 758, 532, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2886, 2938, 248, ..., 758, 532, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2211, 119, 218, ..., 625, 1366, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2211, 119, 218, ..., 625, 1366, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2211, 119, 218, ..., 625, 1366, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2211, 119, 218, ..., 625, 1366, 2006])\n", + "deriv_tensor (40, 81), indices tensor([1499, 4, 1117, ..., 255, 2011, 2599])\n", + "deriv_tensor (40, 81), indices tensor([1499, 4, 1117, ..., 255, 2011, 2599])\n", + "deriv_tensor (40, 81), indices tensor([1499, 4, 1117, ..., 255, 2011, 2599])\n", + "deriv_tensor (40, 81), indices tensor([1499, 4, 1117, ..., 255, 2011, 2599])\n", + "deriv_tensor (40, 81), indices tensor([1695, 762, 1972, ..., 2073, 2803, 686])\n", + "deriv_tensor (40, 81), indices tensor([1695, 762, 1972, ..., 2073, 2803, 686])\n", + "deriv_tensor (40, 81), indices tensor([1695, 762, 1972, ..., 2073, 2803, 686])\n", + "deriv_tensor (40, 81), indices tensor([1695, 762, 1972, ..., 2073, 2803, 686])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 366, 2149, ..., 2339, 275, 1025])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 366, 2149, ..., 2339, 275, 1025])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 366, 2149, ..., 2339, 275, 1025])\n", + "deriv_tensor (40, 81), indices tensor([ 618, 366, 2149, ..., 2339, 275, 1025])\n", + "deriv_tensor (40, 81), indices tensor([2138, 1138, 2493, ..., 2121, 1799, 2011])\n", + "deriv_tensor (40, 81), indices tensor([2138, 1138, 2493, ..., 2121, 1799, 2011])\n", + "deriv_tensor (40, 81), indices tensor([2138, 1138, 2493, ..., 2121, 1799, 2011])\n", + "deriv_tensor (40, 81), indices tensor([2138, 1138, 2493, ..., 2121, 1799, 2011])\n", + "deriv_tensor (40, 81), indices tensor([ 980, 2881, 823, ..., 2215, 1954, 558])\n", + "deriv_tensor (40, 81), indices tensor([ 980, 2881, 823, ..., 2215, 1954, 558])\n", + "deriv_tensor (40, 81), indices tensor([ 980, 2881, 823, ..., 2215, 1954, 558])\n", + "deriv_tensor (40, 81), indices tensor([ 980, 2881, 823, ..., 2215, 1954, 558])\n", + "deriv_tensor (40, 81), indices tensor([2803, 1248, 2277, ..., 3128, 2171, 1911])\n", + "deriv_tensor (40, 81), indices tensor([2803, 1248, 2277, ..., 3128, 2171, 1911])\n", + "deriv_tensor (40, 81), indices tensor([2803, 1248, 2277, ..., 3128, 2171, 1911])\n", + "deriv_tensor (40, 81), indices tensor([2803, 1248, 2277, ..., 3128, 2171, 1911])\n", + "deriv_tensor (40, 81), indices tensor([2358, 475, 1546, ..., 726, 3224, 359])\n", + "deriv_tensor (40, 81), indices tensor([2358, 475, 1546, ..., 726, 3224, 359])\n", + "deriv_tensor (40, 81), indices tensor([2358, 475, 1546, ..., 726, 3224, 359])\n", + "deriv_tensor (40, 81), indices tensor([2358, 475, 1546, ..., 726, 3224, 359])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 187, 3117, ..., 2529, 1498, 1346])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 187, 3117, ..., 2529, 1498, 1346])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 187, 3117, ..., 2529, 1498, 1346])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 187, 3117, ..., 2529, 1498, 1346])\n", + "deriv_tensor (40, 81), indices tensor([2117, 1550, 3037, ..., 2942, 951, 1180])\n", + "deriv_tensor (40, 81), indices tensor([2117, 1550, 3037, ..., 2942, 951, 1180])\n", + "deriv_tensor (40, 81), indices tensor([2117, 1550, 3037, ..., 2942, 951, 1180])\n", + "deriv_tensor (40, 81), indices tensor([2117, 1550, 3037, ..., 2942, 951, 1180])\n", + "deriv_tensor (40, 81), indices tensor([ 305, 1031, 1946, ..., 2164, 1735, 665])\n", + "deriv_tensor (40, 81), indices tensor([ 305, 1031, 1946, ..., 2164, 1735, 665])\n", + "deriv_tensor (40, 81), indices tensor([ 305, 1031, 1946, ..., 2164, 1735, 665])\n", + "deriv_tensor (40, 81), indices tensor([ 305, 1031, 1946, ..., 2164, 1735, 665])\n", + "deriv_tensor (40, 81), indices tensor([2138, 3164, 244, ..., 2294, 1057, 3011])\n", + "deriv_tensor (40, 81), indices tensor([2138, 3164, 244, ..., 2294, 1057, 3011])\n", + "deriv_tensor (40, 81), indices tensor([2138, 3164, 244, ..., 2294, 1057, 3011])\n", + "deriv_tensor (40, 81), indices tensor([2138, 3164, 244, ..., 2294, 1057, 3011])\n", + "deriv_tensor (40, 81), indices tensor([ 883, 3228, 2005, ..., 51, 2543, 1184])\n", + "deriv_tensor (40, 81), indices tensor([ 883, 3228, 2005, ..., 51, 2543, 1184])\n", + "deriv_tensor (40, 81), indices tensor([ 883, 3228, 2005, ..., 51, 2543, 1184])\n", + "deriv_tensor (40, 81), indices tensor([ 883, 3228, 2005, ..., 51, 2543, 1184])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1722, 1656, ..., 2699, 145, 2083])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1722, 1656, ..., 2699, 145, 2083])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1722, 1656, ..., 2699, 145, 2083])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1722, 1656, ..., 2699, 145, 2083])\n", + "deriv_tensor (40, 81), indices tensor([3050, 251, 2861, ..., 186, 2483, 1296])\n", + "deriv_tensor (40, 81), indices tensor([3050, 251, 2861, ..., 186, 2483, 1296])\n", + "deriv_tensor (40, 81), indices tensor([3050, 251, 2861, ..., 186, 2483, 1296])\n", + "deriv_tensor (40, 81), indices tensor([3050, 251, 2861, ..., 186, 2483, 1296])\n", + "deriv_tensor (40, 81), indices tensor([3094, 1349, 2759, ..., 611, 1329, 1648])\n", + "deriv_tensor (40, 81), indices tensor([3094, 1349, 2759, ..., 611, 1329, 1648])\n", + "deriv_tensor (40, 81), indices tensor([3094, 1349, 2759, ..., 611, 1329, 1648])\n", + "deriv_tensor (40, 81), indices tensor([3094, 1349, 2759, ..., 611, 1329, 1648])\n", + "deriv_tensor (40, 81), indices tensor([ 138, 2138, 2298, ..., 973, 580, 51])\n", + "deriv_tensor (40, 81), indices tensor([ 138, 2138, 2298, ..., 973, 580, 51])\n", + "deriv_tensor (40, 81), indices tensor([ 138, 2138, 2298, ..., 973, 580, 51])\n", + "deriv_tensor (40, 81), indices tensor([ 138, 2138, 2298, ..., 973, 580, 51])\n", + "deriv_tensor (40, 81), indices tensor([ 184, 1224, 3170, ..., 268, 541, 327])\n", + "deriv_tensor (40, 81), indices tensor([ 184, 1224, 3170, ..., 268, 541, 327])\n", + "deriv_tensor (40, 81), indices tensor([ 184, 1224, 3170, ..., 268, 541, 327])\n", + "deriv_tensor (40, 81), indices tensor([ 184, 1224, 3170, ..., 268, 541, 327])\n", + "deriv_tensor (40, 81), indices tensor([1225, 533, 2771, ..., 1549, 3176, 1073])\n", + "deriv_tensor (40, 81), indices tensor([1225, 533, 2771, ..., 1549, 3176, 1073])\n", + "deriv_tensor (40, 81), indices tensor([1225, 533, 2771, ..., 1549, 3176, 1073])\n", + "deriv_tensor (40, 81), indices tensor([1225, 533, 2771, ..., 1549, 3176, 1073])\n", + "deriv_tensor (40, 81), indices tensor([1911, 2995, 99, ..., 1943, 1153, 2382])\n", + "deriv_tensor (40, 81), indices tensor([1911, 2995, 99, ..., 1943, 1153, 2382])\n", + "deriv_tensor (40, 81), indices tensor([1911, 2995, 99, ..., 1943, 1153, 2382])\n", + "deriv_tensor (40, 81), indices tensor([1911, 2995, 99, ..., 1943, 1153, 2382])\n", + "deriv_tensor (40, 81), indices tensor([ 655, 895, 2505, ..., 721, 2747, 2133])\n", + "deriv_tensor (40, 81), indices tensor([ 655, 895, 2505, ..., 721, 2747, 2133])\n", + "deriv_tensor (40, 81), indices tensor([ 655, 895, 2505, ..., 721, 2747, 2133])\n", + "deriv_tensor (40, 81), indices tensor([ 655, 895, 2505, ..., 721, 2747, 2133])\n", + "deriv_tensor (40, 81), indices tensor([ 802, 443, 1659, ..., 1060, 867, 3078])\n", + "deriv_tensor (40, 81), indices tensor([ 802, 443, 1659, ..., 1060, 867, 3078])\n", + "deriv_tensor (40, 81), indices tensor([ 802, 443, 1659, ..., 1060, 867, 3078])\n", + "deriv_tensor (40, 81), indices tensor([ 802, 443, 1659, ..., 1060, 867, 3078])\n", + "deriv_tensor (40, 81), indices tensor([ 698, 175, 2336, ..., 1744, 3234, 200])\n", + "deriv_tensor (40, 81), indices tensor([ 698, 175, 2336, ..., 1744, 3234, 200])\n", + "deriv_tensor (40, 81), indices tensor([ 698, 175, 2336, ..., 1744, 3234, 200])\n", + "deriv_tensor (40, 81), indices tensor([ 698, 175, 2336, ..., 1744, 3234, 200])\n", + "deriv_tensor (40, 81), indices tensor([2128, 2594, 2508, ..., 2851, 25, 638])\n", + "deriv_tensor (40, 81), indices tensor([2128, 2594, 2508, ..., 2851, 25, 638])\n", + "deriv_tensor (40, 81), indices tensor([2128, 2594, 2508, ..., 2851, 25, 638])\n", + "deriv_tensor (40, 81), indices tensor([2128, 2594, 2508, ..., 2851, 25, 638])\n", + "deriv_tensor (40, 81), indices tensor([2361, 3183, 1547, ..., 362, 2550, 760])\n", + "deriv_tensor (40, 81), indices tensor([2361, 3183, 1547, ..., 362, 2550, 760])\n", + "deriv_tensor (40, 81), indices tensor([2361, 3183, 1547, ..., 362, 2550, 760])\n", + "deriv_tensor (40, 81), indices tensor([2361, 3183, 1547, ..., 362, 2550, 760])\n", + "deriv_tensor (40, 81), indices tensor([ 737, 2336, 159, ..., 2987, 733, 815])\n", + "deriv_tensor (40, 81), indices tensor([ 737, 2336, 159, ..., 2987, 733, 815])\n", + "deriv_tensor (40, 81), indices tensor([ 737, 2336, 159, ..., 2987, 733, 815])\n", + "deriv_tensor (40, 81), indices tensor([ 737, 2336, 159, ..., 2987, 733, 815])\n", + "deriv_tensor (40, 81), indices tensor([2657, 1763, 3176, ..., 1813, 2470, 1295])\n", + "deriv_tensor (40, 81), indices tensor([2657, 1763, 3176, ..., 1813, 2470, 1295])\n", + "deriv_tensor (40, 81), indices tensor([2657, 1763, 3176, ..., 1813, 2470, 1295])\n", + "deriv_tensor (40, 81), indices tensor([2657, 1763, 3176, ..., 1813, 2470, 1295])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 1891, 1386, ..., 2512, 1471, 654])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 1891, 1386, ..., 2512, 1471, 654])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 1891, 1386, ..., 2512, 1471, 654])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 1891, 1386, ..., 2512, 1471, 654])\n", + "deriv_tensor (40, 81), indices tensor([1433, 1815, 538, ..., 322, 1987, 2719])\n", + "deriv_tensor (40, 81), indices tensor([1433, 1815, 538, ..., 322, 1987, 2719])\n", + "deriv_tensor (40, 81), indices tensor([1433, 1815, 538, ..., 322, 1987, 2719])\n", + "deriv_tensor (40, 81), indices tensor([1433, 1815, 538, ..., 322, 1987, 2719])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 2503, 1452, ..., 3166, 2541, 2727])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 2503, 1452, ..., 3166, 2541, 2727])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 2503, 1452, ..., 3166, 2541, 2727])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 2503, 1452, ..., 3166, 2541, 2727])\n", + "deriv_tensor (40, 81), indices tensor([2371, 1810, 1657, ..., 1425, 2166, 3006])\n", + "deriv_tensor (40, 81), indices tensor([2371, 1810, 1657, ..., 1425, 2166, 3006])\n", + "deriv_tensor (40, 81), indices tensor([2371, 1810, 1657, ..., 1425, 2166, 3006])\n", + "deriv_tensor (40, 81), indices tensor([2371, 1810, 1657, ..., 1425, 2166, 3006])\n", + "deriv_tensor (40, 81), indices tensor([2940, 764, 2186, ..., 945, 1724, 1859])\n", + "deriv_tensor (40, 81), indices tensor([2940, 764, 2186, ..., 945, 1724, 1859])\n", + "deriv_tensor (40, 81), indices tensor([2940, 764, 2186, ..., 945, 1724, 1859])\n", + "deriv_tensor (40, 81), indices tensor([2940, 764, 2186, ..., 945, 1724, 1859])\n", + "deriv_tensor (40, 81), indices tensor([1120, 1615, 293, ..., 1521, 2391, 397])\n", + "deriv_tensor (40, 81), indices tensor([1120, 1615, 293, ..., 1521, 2391, 397])\n", + "deriv_tensor (40, 81), indices tensor([1120, 1615, 293, ..., 1521, 2391, 397])\n", + "deriv_tensor (40, 81), indices tensor([1120, 1615, 293, ..., 1521, 2391, 397])\n", + "deriv_tensor (40, 81), indices tensor([2282, 769, 972, ..., 2069, 368, 2541])\n", + "deriv_tensor (40, 81), indices tensor([2282, 769, 972, ..., 2069, 368, 2541])\n", + "deriv_tensor (40, 81), indices tensor([2282, 769, 972, ..., 2069, 368, 2541])\n", + "deriv_tensor (40, 81), indices tensor([2282, 769, 972, ..., 2069, 368, 2541])\n", + "deriv_tensor (40, 81), indices tensor([ 70, 2747, 3145, ..., 1626, 1522, 917])\n", + "deriv_tensor (40, 81), indices tensor([ 70, 2747, 3145, ..., 1626, 1522, 917])\n", + "deriv_tensor (40, 81), indices tensor([ 70, 2747, 3145, ..., 1626, 1522, 917])\n", + "deriv_tensor (40, 81), indices tensor([ 70, 2747, 3145, ..., 1626, 1522, 917])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2198, 1757, ..., 2387, 1378, 1273])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2198, 1757, ..., 2387, 1378, 1273])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2198, 1757, ..., 2387, 1378, 1273])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2198, 1757, ..., 2387, 1378, 1273])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1934, 1886, ..., 2809, 445, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1934, 1886, ..., 2809, 445, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1934, 1886, ..., 2809, 445, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1934, 1886, ..., 2809, 445, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2227, 1158, ..., 511, 1648, 102])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2227, 1158, ..., 511, 1648, 102])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2227, 1158, ..., 511, 1648, 102])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2227, 1158, ..., 511, 1648, 102])\n", + "deriv_tensor (40, 81), indices tensor([1369, 727, 2860, ..., 1294, 126, 1780])\n", + "deriv_tensor (40, 81), indices tensor([1369, 727, 2860, ..., 1294, 126, 1780])\n", + "deriv_tensor (40, 81), indices tensor([1369, 727, 2860, ..., 1294, 126, 1780])\n", + "deriv_tensor (40, 81), indices tensor([1369, 727, 2860, ..., 1294, 126, 1780])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1755, 2460, ..., 1385, 2767, 1745])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1755, 2460, ..., 1385, 2767, 1745])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1755, 2460, ..., 1385, 2767, 1745])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1755, 2460, ..., 1385, 2767, 1745])\n", + "deriv_tensor (40, 81), indices tensor([1356, 1817, 559, ..., 73, 727, 3049])\n", + "deriv_tensor (40, 81), indices tensor([1356, 1817, 559, ..., 73, 727, 3049])\n", + "deriv_tensor (40, 81), indices tensor([1356, 1817, 559, ..., 73, 727, 3049])\n", + "deriv_tensor (40, 81), indices tensor([1356, 1817, 559, ..., 73, 727, 3049])\n", + "deriv_tensor (40, 81), indices tensor([ 256, 893, 2117, ..., 3236, 2158, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 256, 893, 2117, ..., 3236, 2158, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 256, 893, 2117, ..., 3236, 2158, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 256, 893, 2117, ..., 3236, 2158, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2056, 48, ..., 2075, 1578, 360])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2056, 48, ..., 2075, 1578, 360])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2056, 48, ..., 2075, 1578, 360])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2056, 48, ..., 2075, 1578, 360])\n", + "deriv_tensor (40, 81), indices tensor([2441, 1309, 377, ..., 2648, 2999, 481])\n", + "deriv_tensor (40, 81), indices tensor([2441, 1309, 377, ..., 2648, 2999, 481])\n", + "deriv_tensor (40, 81), indices tensor([2441, 1309, 377, ..., 2648, 2999, 481])\n", + "deriv_tensor (40, 81), indices tensor([2441, 1309, 377, ..., 2648, 2999, 481])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 2733, 3035, ..., 3046, 582, 2919])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 2733, 3035, ..., 3046, 582, 2919])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 2733, 3035, ..., 3046, 582, 2919])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 2733, 3035, ..., 3046, 582, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2605, 346, 1659, ..., 306, 670, 1862])\n", + "deriv_tensor (40, 81), indices tensor([2605, 346, 1659, ..., 306, 670, 1862])\n", + "deriv_tensor (40, 81), indices tensor([2605, 346, 1659, ..., 306, 670, 1862])\n", + "deriv_tensor (40, 81), indices tensor([2605, 346, 1659, ..., 306, 670, 1862])\n", + "deriv_tensor (40, 81), indices tensor([2815, 1891, 2071, ..., 1810, 1731, 137])\n", + "deriv_tensor (40, 81), indices tensor([2815, 1891, 2071, ..., 1810, 1731, 137])\n", + "deriv_tensor (40, 81), indices tensor([2815, 1891, 2071, ..., 1810, 1731, 137])\n", + "deriv_tensor (40, 81), indices tensor([2815, 1891, 2071, ..., 1810, 1731, 137])\n", + "deriv_tensor (40, 81), indices tensor([1012, 2054, 381, ..., 1346, 849, 1854])\n", + "deriv_tensor (40, 81), indices tensor([1012, 2054, 381, ..., 1346, 849, 1854])\n", + "deriv_tensor (40, 81), indices tensor([1012, 2054, 381, ..., 1346, 849, 1854])\n", + "deriv_tensor (40, 81), indices tensor([1012, 2054, 381, ..., 1346, 849, 1854])\n", + "deriv_tensor (40, 81), indices tensor([1671, 3034, 1450, ..., 2117, 1900, 1310])\n", + "deriv_tensor (40, 81), indices tensor([1671, 3034, 1450, ..., 2117, 1900, 1310])\n", + "deriv_tensor (40, 81), indices tensor([1671, 3034, 1450, ..., 2117, 1900, 1310])\n", + "deriv_tensor (40, 81), indices tensor([1671, 3034, 1450, ..., 2117, 1900, 1310])\n", + "deriv_tensor (40, 81), indices tensor([2729, 2885, 312, ..., 598, 3048, 550])\n", + "deriv_tensor (40, 81), indices tensor([2729, 2885, 312, ..., 598, 3048, 550])\n", + "deriv_tensor (40, 81), indices tensor([2729, 2885, 312, ..., 598, 3048, 550])\n", + "deriv_tensor (40, 81), indices tensor([2729, 2885, 312, ..., 598, 3048, 550])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 766, 2877, ..., 78, 577, 33])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 766, 2877, ..., 78, 577, 33])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 766, 2877, ..., 78, 577, 33])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 766, 2877, ..., 78, 577, 33])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1772, 213, ..., 2225, 2115, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1772, 213, ..., 2225, 2115, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1772, 213, ..., 2225, 2115, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1772, 213, ..., 2225, 2115, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2873, 333, 854, ..., 3192, 3093, 170])\n", + "deriv_tensor (40, 81), indices tensor([2873, 333, 854, ..., 3192, 3093, 170])\n", + "deriv_tensor (40, 81), indices tensor([2873, 333, 854, ..., 3192, 3093, 170])\n", + "deriv_tensor (40, 81), indices tensor([2873, 333, 854, ..., 3192, 3093, 170])\n", + "deriv_tensor (40, 81), indices tensor([2159, 1078, 183, ..., 1906, 2158, 2479])\n", + "deriv_tensor (40, 81), indices tensor([2159, 1078, 183, ..., 1906, 2158, 2479])\n", + "deriv_tensor (40, 81), indices tensor([2159, 1078, 183, ..., 1906, 2158, 2479])\n", + "deriv_tensor (40, 81), indices tensor([2159, 1078, 183, ..., 1906, 2158, 2479])\n", + "deriv_tensor (40, 81), indices tensor([1682, 1983, 1538, ..., 688, 2350, 2498])\n", + "deriv_tensor (40, 81), indices tensor([1682, 1983, 1538, ..., 688, 2350, 2498])\n", + "deriv_tensor (40, 81), indices tensor([1682, 1983, 1538, ..., 688, 2350, 2498])\n", + "deriv_tensor (40, 81), indices tensor([1682, 1983, 1538, ..., 688, 2350, 2498])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 289, 2990, ..., 2445, 2043, 1549])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 289, 2990, ..., 2445, 2043, 1549])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 289, 2990, ..., 2445, 2043, 1549])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 289, 2990, ..., 2445, 2043, 1549])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 537, 1965, ..., 36, 1291, 1824])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 537, 1965, ..., 36, 1291, 1824])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 537, 1965, ..., 36, 1291, 1824])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 537, 1965, ..., 36, 1291, 1824])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 137, 2981, ..., 188, 1346, 1156])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 137, 2981, ..., 188, 1346, 1156])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 137, 2981, ..., 188, 1346, 1156])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 137, 2981, ..., 188, 1346, 1156])\n", + "deriv_tensor (40, 81), indices tensor([2621, 2557, 2712, ..., 842, 1227, 275])\n", + "deriv_tensor (40, 81), indices tensor([2621, 2557, 2712, ..., 842, 1227, 275])\n", + "deriv_tensor (40, 81), indices tensor([2621, 2557, 2712, ..., 842, 1227, 275])\n", + "deriv_tensor (40, 81), indices tensor([2621, 2557, 2712, ..., 842, 1227, 275])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2224, 618, ..., 998, 1391, 1064])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2224, 618, ..., 998, 1391, 1064])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2224, 618, ..., 998, 1391, 1064])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2224, 618, ..., 998, 1391, 1064])\n", + "deriv_tensor (40, 81), indices tensor([ 791, 662, 155, ..., 2500, 1565, 1800])\n", + "deriv_tensor (40, 81), indices tensor([ 791, 662, 155, ..., 2500, 1565, 1800])\n", + "deriv_tensor (40, 81), indices tensor([ 791, 662, 155, ..., 2500, 1565, 1800])\n", + "deriv_tensor (40, 81), indices tensor([ 791, 662, 155, ..., 2500, 1565, 1800])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 1981, 2785, ..., 1539, 2361, 1232])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 1981, 2785, ..., 1539, 2361, 1232])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 1981, 2785, ..., 1539, 2361, 1232])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 1981, 2785, ..., 1539, 2361, 1232])\n", + "deriv_tensor (40, 81), indices tensor([1032, 3173, 1740, ..., 1706, 1661, 2102])\n", + "deriv_tensor (40, 81), indices tensor([1032, 3173, 1740, ..., 1706, 1661, 2102])\n", + "deriv_tensor (40, 81), indices tensor([1032, 3173, 1740, ..., 1706, 1661, 2102])\n", + "deriv_tensor (40, 81), indices tensor([1032, 3173, 1740, ..., 1706, 1661, 2102])\n", + "deriv_tensor (40, 81), indices tensor([ 262, 2715, 3200, ..., 1761, 1127, 978])\n", + "deriv_tensor (40, 81), indices tensor([ 262, 2715, 3200, ..., 1761, 1127, 978])\n", + "deriv_tensor (40, 81), indices tensor([ 262, 2715, 3200, ..., 1761, 1127, 978])\n", + "deriv_tensor (40, 81), indices tensor([ 262, 2715, 3200, ..., 1761, 1127, 978])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1926, 601, ..., 2433, 1428, 292])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1926, 601, ..., 2433, 1428, 292])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1926, 601, ..., 2433, 1428, 292])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1926, 601, ..., 2433, 1428, 292])\n", + "deriv_tensor (40, 81), indices tensor([3143, 163, 2870, ..., 1788, 884, 1599])\n", + "deriv_tensor (40, 81), indices tensor([3143, 163, 2870, ..., 1788, 884, 1599])\n", + "deriv_tensor (40, 81), indices tensor([3143, 163, 2870, ..., 1788, 884, 1599])\n", + "deriv_tensor (40, 81), indices tensor([3143, 163, 2870, ..., 1788, 884, 1599])\n", + "deriv_tensor (40, 81), indices tensor([2675, 180, 2036, ..., 35, 3137, 2102])\n", + "deriv_tensor (40, 81), indices tensor([2675, 180, 2036, ..., 35, 3137, 2102])\n", + "deriv_tensor (40, 81), indices tensor([2675, 180, 2036, ..., 35, 3137, 2102])\n", + "deriv_tensor (40, 81), indices tensor([2675, 180, 2036, ..., 35, 3137, 2102])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2601, 1883, ..., 2871, 2730, 2610])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2601, 1883, ..., 2871, 2730, 2610])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2601, 1883, ..., 2871, 2730, 2610])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2601, 1883, ..., 2871, 2730, 2610])\n", + "deriv_tensor (40, 81), indices tensor([1229, 768, 1119, ..., 811, 2225, 2511])\n", + "deriv_tensor (40, 81), indices tensor([1229, 768, 1119, ..., 811, 2225, 2511])\n", + "deriv_tensor (40, 81), indices tensor([1229, 768, 1119, ..., 811, 2225, 2511])\n", + "deriv_tensor (40, 81), indices tensor([1229, 768, 1119, ..., 811, 2225, 2511])\n", + "deriv_tensor (40, 81), indices tensor([ 581, 2418, 2089, ..., 1757, 2324, 13])\n", + "deriv_tensor (40, 81), indices tensor([ 581, 2418, 2089, ..., 1757, 2324, 13])\n", + "deriv_tensor (40, 81), indices tensor([ 581, 2418, 2089, ..., 1757, 2324, 13])\n", + "deriv_tensor (40, 81), indices tensor([ 581, 2418, 2089, ..., 1757, 2324, 13])\n", + "deriv_tensor (40, 81), indices tensor([2073, 1578, 572, ..., 314, 1072, 141])\n", + "deriv_tensor (40, 81), indices tensor([2073, 1578, 572, ..., 314, 1072, 141])\n", + "deriv_tensor (40, 81), indices tensor([2073, 1578, 572, ..., 314, 1072, 141])\n", + "deriv_tensor (40, 81), indices tensor([2073, 1578, 572, ..., 314, 1072, 141])\n", + "deriv_tensor (40, 81), indices tensor([1550, 1641, 247, ..., 1366, 338, 2007])\n", + "deriv_tensor (40, 81), indices tensor([1550, 1641, 247, ..., 1366, 338, 2007])\n", + "deriv_tensor (40, 81), indices tensor([1550, 1641, 247, ..., 1366, 338, 2007])\n", + "deriv_tensor (40, 81), indices tensor([1550, 1641, 247, ..., 1366, 338, 2007])\n", + "deriv_tensor (40, 81), indices tensor([1991, 1360, 1719, ..., 3027, 328, 2304])\n", + "deriv_tensor (40, 81), indices tensor([1991, 1360, 1719, ..., 3027, 328, 2304])\n", + "deriv_tensor (40, 81), indices tensor([1991, 1360, 1719, ..., 3027, 328, 2304])\n", + "deriv_tensor (40, 81), indices tensor([1991, 1360, 1719, ..., 3027, 328, 2304])\n", + "deriv_tensor (40, 81), indices tensor([1876, 807, 1790, ..., 2571, 1867, 2505])\n", + "deriv_tensor (40, 81), indices tensor([1876, 807, 1790, ..., 2571, 1867, 2505])\n", + "deriv_tensor (40, 81), indices tensor([1876, 807, 1790, ..., 2571, 1867, 2505])\n", + "deriv_tensor (40, 81), indices tensor([1876, 807, 1790, ..., 2571, 1867, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 497, 1921, 869, ..., 1170, 1873, 640])\n", + "deriv_tensor (40, 81), indices tensor([ 497, 1921, 869, ..., 1170, 1873, 640])\n", + "deriv_tensor (40, 81), indices tensor([ 497, 1921, 869, ..., 1170, 1873, 640])\n", + "deriv_tensor (40, 81), indices tensor([ 497, 1921, 869, ..., 1170, 1873, 640])\n", + "deriv_tensor (40, 81), indices tensor([1524, 2844, 1586, ..., 1520, 2718, 1548])\n", + "deriv_tensor (40, 81), indices tensor([1524, 2844, 1586, ..., 1520, 2718, 1548])\n", + "deriv_tensor (40, 81), indices tensor([1524, 2844, 1586, ..., 1520, 2718, 1548])\n", + "deriv_tensor (40, 81), indices tensor([1524, 2844, 1586, ..., 1520, 2718, 1548])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1325, 1684, ..., 43, 1431, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1325, 1684, ..., 43, 1431, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1325, 1684, ..., 43, 1431, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1325, 1684, ..., 43, 1431, 1275])\n", + "deriv_tensor (40, 81), indices tensor([ 271, 2044, 2325, ..., 905, 1158, 215])\n", + "deriv_tensor (40, 81), indices tensor([ 271, 2044, 2325, ..., 905, 1158, 215])\n", + "deriv_tensor (40, 81), indices tensor([ 271, 2044, 2325, ..., 905, 1158, 215])\n", + "deriv_tensor (40, 81), indices tensor([ 271, 2044, 2325, ..., 905, 1158, 215])\n", + "deriv_tensor (40, 81), indices tensor([1907, 2106, 2669, ..., 1511, 1221, 2568])\n", + "deriv_tensor (40, 81), indices tensor([1907, 2106, 2669, ..., 1511, 1221, 2568])\n", + "deriv_tensor (40, 81), indices tensor([1907, 2106, 2669, ..., 1511, 1221, 2568])\n", + "deriv_tensor (40, 81), indices tensor([1907, 2106, 2669, ..., 1511, 1221, 2568])\n", + "deriv_tensor (40, 81), indices tensor([1277, 1945, 2448, ..., 1856, 441, 836])\n", + "deriv_tensor (40, 81), indices tensor([1277, 1945, 2448, ..., 1856, 441, 836])\n", + "deriv_tensor (40, 81), indices tensor([1277, 1945, 2448, ..., 1856, 441, 836])\n", + "deriv_tensor (40, 81), indices tensor([1277, 1945, 2448, ..., 1856, 441, 836])\n", + "deriv_tensor (40, 81), indices tensor([ 929, 2162, 1603, ..., 1608, 1714, 1202])\n", + "deriv_tensor (40, 81), indices tensor([ 929, 2162, 1603, ..., 1608, 1714, 1202])\n", + "deriv_tensor (40, 81), indices tensor([ 929, 2162, 1603, ..., 1608, 1714, 1202])\n", + "deriv_tensor (40, 81), indices tensor([ 929, 2162, 1603, ..., 1608, 1714, 1202])\n", + "deriv_tensor (40, 81), indices tensor([1497, 817, 2830, ..., 3058, 2602, 471])\n", + "deriv_tensor (40, 81), indices tensor([1497, 817, 2830, ..., 3058, 2602, 471])\n", + "deriv_tensor (40, 81), indices tensor([1497, 817, 2830, ..., 3058, 2602, 471])\n", + "deriv_tensor (40, 81), indices tensor([1497, 817, 2830, ..., 3058, 2602, 471])\n", + "deriv_tensor (40, 81), indices tensor([1247, 245, 3134, ..., 1703, 207, 457])\n", + "deriv_tensor (40, 81), indices tensor([1247, 245, 3134, ..., 1703, 207, 457])\n", + "deriv_tensor (40, 81), indices tensor([1247, 245, 3134, ..., 1703, 207, 457])\n", + "deriv_tensor (40, 81), indices tensor([1247, 245, 3134, ..., 1703, 207, 457])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 26, 1812, ..., 84, 1755, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 26, 1812, ..., 84, 1755, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 26, 1812, ..., 84, 1755, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 548, 26, 1812, ..., 84, 1755, 245])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1237, 253, ..., 1899, 1179, 972])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1237, 253, ..., 1899, 1179, 972])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1237, 253, ..., 1899, 1179, 972])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1237, 253, ..., 1899, 1179, 972])\n", + "deriv_tensor (40, 81), indices tensor([ 970, 29, 2054, ..., 3168, 810, 377])\n", + "deriv_tensor (40, 81), indices tensor([ 970, 29, 2054, ..., 3168, 810, 377])\n", + "deriv_tensor (40, 81), indices tensor([ 970, 29, 2054, ..., 3168, 810, 377])\n", + "deriv_tensor (40, 81), indices tensor([ 970, 29, 2054, ..., 3168, 810, 377])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2023, 1623, ..., 3134, 774, 2954])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2023, 1623, ..., 3134, 774, 2954])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2023, 1623, ..., 3134, 774, 2954])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2023, 1623, ..., 3134, 774, 2954])\n", + "deriv_tensor (40, 81), indices tensor([1793, 689, 1626, ..., 3025, 1943, 3049])\n", + "deriv_tensor (40, 81), indices tensor([1793, 689, 1626, ..., 3025, 1943, 3049])\n", + "deriv_tensor (40, 81), indices tensor([1793, 689, 1626, ..., 3025, 1943, 3049])\n", + "deriv_tensor (40, 81), indices tensor([1793, 689, 1626, ..., 3025, 1943, 3049])\n", + "deriv_tensor (40, 81), indices tensor([2230, 352, 2884, ..., 1358, 1035, 1452])\n", + "deriv_tensor (40, 81), indices tensor([2230, 352, 2884, ..., 1358, 1035, 1452])\n", + "deriv_tensor (40, 81), indices tensor([2230, 352, 2884, ..., 1358, 1035, 1452])\n", + "deriv_tensor (40, 81), indices tensor([2230, 352, 2884, ..., 1358, 1035, 1452])\n", + "deriv_tensor (40, 81), indices tensor([2441, 450, 105, ..., 1023, 1527, 44])\n", + "deriv_tensor (40, 81), indices tensor([2441, 450, 105, ..., 1023, 1527, 44])\n", + "deriv_tensor (40, 81), indices tensor([2441, 450, 105, ..., 1023, 1527, 44])\n", + "deriv_tensor (40, 81), indices tensor([2441, 450, 105, ..., 1023, 1527, 44])\n", + "deriv_tensor (40, 81), indices tensor([3134, 2151, 2121, ..., 1978, 392, 2029])\n", + "deriv_tensor (40, 81), indices tensor([3134, 2151, 2121, ..., 1978, 392, 2029])\n", + "deriv_tensor (40, 81), indices tensor([3134, 2151, 2121, ..., 1978, 392, 2029])\n", + "deriv_tensor (40, 81), indices tensor([3134, 2151, 2121, ..., 1978, 392, 2029])\n", + "deriv_tensor (40, 81), indices tensor([ 822, 429, 351, ..., 1210, 2812, 1371])\n", + "deriv_tensor (40, 81), indices tensor([ 822, 429, 351, ..., 1210, 2812, 1371])\n", + "deriv_tensor (40, 81), indices tensor([ 822, 429, 351, ..., 1210, 2812, 1371])\n", + "deriv_tensor (40, 81), indices tensor([ 822, 429, 351, ..., 1210, 2812, 1371])\n", + "deriv_tensor (40, 81), indices tensor([2476, 2276, 1963, ..., 2258, 503, 2832])\n", + "deriv_tensor (40, 81), indices tensor([2476, 2276, 1963, ..., 2258, 503, 2832])\n", + "deriv_tensor (40, 81), indices tensor([2476, 2276, 1963, ..., 2258, 503, 2832])\n", + "deriv_tensor (40, 81), indices tensor([2476, 2276, 1963, ..., 2258, 503, 2832])\n", + "deriv_tensor (40, 81), indices tensor([1937, 1390, 961, ..., 1787, 571, 2805])\n", + "deriv_tensor (40, 81), indices tensor([1937, 1390, 961, ..., 1787, 571, 2805])\n", + "deriv_tensor (40, 81), indices tensor([1937, 1390, 961, ..., 1787, 571, 2805])\n", + "deriv_tensor (40, 81), indices tensor([1937, 1390, 961, ..., 1787, 571, 2805])\n", + "deriv_tensor (40, 81), indices tensor([1240, 84, 2745, ..., 706, 2635, 21])\n", + "deriv_tensor (40, 81), indices tensor([1240, 84, 2745, ..., 706, 2635, 21])\n", + "deriv_tensor (40, 81), indices tensor([1240, 84, 2745, ..., 706, 2635, 21])\n", + "deriv_tensor (40, 81), indices tensor([1240, 84, 2745, ..., 706, 2635, 21])\n", + "deriv_tensor (40, 81), indices tensor([1335, 756, 2804, ..., 2439, 2627, 2614])\n", + "deriv_tensor (40, 81), indices tensor([1335, 756, 2804, ..., 2439, 2627, 2614])\n", + "deriv_tensor (40, 81), indices tensor([1335, 756, 2804, ..., 2439, 2627, 2614])\n", + "deriv_tensor (40, 81), indices tensor([1335, 756, 2804, ..., 2439, 2627, 2614])\n", + "deriv_tensor (40, 81), indices tensor([1201, 858, 43, ..., 1496, 562, 1410])\n", + "deriv_tensor (40, 81), indices tensor([1201, 858, 43, ..., 1496, 562, 1410])\n", + "deriv_tensor (40, 81), indices tensor([1201, 858, 43, ..., 1496, 562, 1410])\n", + "deriv_tensor (40, 81), indices tensor([1201, 858, 43, ..., 1496, 562, 1410])\n", + "deriv_tensor (40, 81), indices tensor([ 92, 3148, 1368, ..., 1982, 1675, 2501])\n", + "deriv_tensor (40, 81), indices tensor([ 92, 3148, 1368, ..., 1982, 1675, 2501])\n", + "deriv_tensor (40, 81), indices tensor([ 92, 3148, 1368, ..., 1982, 1675, 2501])\n", + "deriv_tensor (40, 81), indices tensor([ 92, 3148, 1368, ..., 1982, 1675, 2501])\n", + "deriv_tensor (40, 81), indices tensor([ 898, 3122, 594, ..., 2480, 474, 743])\n", + "deriv_tensor (40, 81), indices tensor([ 898, 3122, 594, ..., 2480, 474, 743])\n", + "deriv_tensor (40, 81), indices tensor([ 898, 3122, 594, ..., 2480, 474, 743])\n", + "deriv_tensor (40, 81), indices tensor([ 898, 3122, 594, ..., 2480, 474, 743])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 1627, 1892, ..., 252, 2269, 2713])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 1627, 1892, ..., 252, 2269, 2713])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 1627, 1892, ..., 252, 2269, 2713])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 1627, 1892, ..., 252, 2269, 2713])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2152, 1853, ..., 412, 2308, 908])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2152, 1853, ..., 412, 2308, 908])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2152, 1853, ..., 412, 2308, 908])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2152, 1853, ..., 412, 2308, 908])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2288, 261, ..., 2348, 1931, 2726])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2288, 261, ..., 2348, 1931, 2726])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2288, 261, ..., 2348, 1931, 2726])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2288, 261, ..., 2348, 1931, 2726])\n", + "deriv_tensor (40, 81), indices tensor([1539, 303, 2854, ..., 1643, 186, 615])\n", + "deriv_tensor (40, 81), indices tensor([1539, 303, 2854, ..., 1643, 186, 615])\n", + "deriv_tensor (40, 81), indices tensor([1539, 303, 2854, ..., 1643, 186, 615])\n", + "deriv_tensor (40, 81), indices tensor([1539, 303, 2854, ..., 1643, 186, 615])\n", + "deriv_tensor (40, 81), indices tensor([2437, 2325, 941, ..., 1602, 623, 1523])\n", + "deriv_tensor (40, 81), indices tensor([2437, 2325, 941, ..., 1602, 623, 1523])\n", + "deriv_tensor (40, 81), indices tensor([2437, 2325, 941, ..., 1602, 623, 1523])\n", + "deriv_tensor (40, 81), indices tensor([2437, 2325, 941, ..., 1602, 623, 1523])\n", + "deriv_tensor (40, 81), indices tensor([2716, 87, 2517, ..., 3076, 2592, 432])\n", + "deriv_tensor (40, 81), indices tensor([2716, 87, 2517, ..., 3076, 2592, 432])\n", + "deriv_tensor (40, 81), indices tensor([2716, 87, 2517, ..., 3076, 2592, 432])\n", + "deriv_tensor (40, 81), indices tensor([2716, 87, 2517, ..., 3076, 2592, 432])\n", + "deriv_tensor (40, 81), indices tensor([2396, 1226, 2563, ..., 64, 515, 1194])\n", + "deriv_tensor (40, 81), indices tensor([2396, 1226, 2563, ..., 64, 515, 1194])\n", + "deriv_tensor (40, 81), indices tensor([2396, 1226, 2563, ..., 64, 515, 1194])\n", + "deriv_tensor (40, 81), indices tensor([2396, 1226, 2563, ..., 64, 515, 1194])\n", + "deriv_tensor (40, 81), indices tensor([2790, 3023, 339, ..., 534, 2082, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2790, 3023, 339, ..., 534, 2082, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2790, 3023, 339, ..., 534, 2082, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2790, 3023, 339, ..., 534, 2082, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2184, 2578, 2617, ..., 1277, 1729, 829])\n", + "deriv_tensor (40, 81), indices tensor([2184, 2578, 2617, ..., 1277, 1729, 829])\n", + "deriv_tensor (40, 81), indices tensor([2184, 2578, 2617, ..., 1277, 1729, 829])\n", + "deriv_tensor (40, 81), indices tensor([2184, 2578, 2617, ..., 1277, 1729, 829])\n", + "deriv_tensor (40, 81), indices tensor([ 498, 2419, 1285, ..., 1895, 330, 136])\n", + "deriv_tensor (40, 81), indices tensor([ 498, 2419, 1285, ..., 1895, 330, 136])\n", + "deriv_tensor (40, 81), indices tensor([ 498, 2419, 1285, ..., 1895, 330, 136])\n", + "deriv_tensor (40, 81), indices tensor([ 498, 2419, 1285, ..., 1895, 330, 136])\n", + "deriv_tensor (40, 81), indices tensor([2884, 1587, 2754, ..., 2433, 2646, 638])\n", + "deriv_tensor (40, 81), indices tensor([2884, 1587, 2754, ..., 2433, 2646, 638])\n", + "deriv_tensor (40, 81), indices tensor([2884, 1587, 2754, ..., 2433, 2646, 638])\n", + "deriv_tensor (40, 81), indices tensor([2884, 1587, 2754, ..., 2433, 2646, 638])\n", + "deriv_tensor (40, 81), indices tensor([ 827, 143, 2011, ..., 1843, 318, 2885])\n", + "deriv_tensor (40, 81), indices tensor([ 827, 143, 2011, ..., 1843, 318, 2885])\n", + "deriv_tensor (40, 81), indices tensor([ 827, 143, 2011, ..., 1843, 318, 2885])\n", + "deriv_tensor (40, 81), indices tensor([ 827, 143, 2011, ..., 1843, 318, 2885])\n", + "deriv_tensor (40, 81), indices tensor([ 397, 2149, 1512, ..., 2028, 2505, 1622])\n", + "deriv_tensor (40, 81), indices tensor([ 397, 2149, 1512, ..., 2028, 2505, 1622])\n", + "deriv_tensor (40, 81), indices tensor([ 397, 2149, 1512, ..., 2028, 2505, 1622])\n", + "deriv_tensor (40, 81), indices tensor([ 397, 2149, 1512, ..., 2028, 2505, 1622])\n", + "deriv_tensor (40, 81), indices tensor([ 600, 2539, 1660, ..., 732, 2178, 3234])\n", + "deriv_tensor (40, 81), indices tensor([ 600, 2539, 1660, ..., 732, 2178, 3234])\n", + "deriv_tensor (40, 81), indices tensor([ 600, 2539, 1660, ..., 732, 2178, 3234])\n", + "deriv_tensor (40, 81), indices tensor([ 600, 2539, 1660, ..., 732, 2178, 3234])\n", + "deriv_tensor (40, 81), indices tensor([2357, 1068, 371, ..., 112, 2020, 1747])\n", + "deriv_tensor (40, 81), indices tensor([2357, 1068, 371, ..., 112, 2020, 1747])\n", + "deriv_tensor (40, 81), indices tensor([2357, 1068, 371, ..., 112, 2020, 1747])\n", + "deriv_tensor (40, 81), indices tensor([2357, 1068, 371, ..., 112, 2020, 1747])\n", + "deriv_tensor (40, 81), indices tensor([1018, 409, 594, ..., 856, 2907, 1155])\n", + "deriv_tensor (40, 81), indices tensor([1018, 409, 594, ..., 856, 2907, 1155])\n", + "deriv_tensor (40, 81), indices tensor([1018, 409, 594, ..., 856, 2907, 1155])\n", + "deriv_tensor (40, 81), indices tensor([1018, 409, 594, ..., 856, 2907, 1155])\n", + "deriv_tensor (40, 81), indices tensor([ 608, 2073, 396, ..., 3128, 335, 1612])\n", + "deriv_tensor (40, 81), indices tensor([ 608, 2073, 396, ..., 3128, 335, 1612])\n", + "deriv_tensor (40, 81), indices tensor([ 608, 2073, 396, ..., 3128, 335, 1612])\n", + "deriv_tensor (40, 81), indices tensor([ 608, 2073, 396, ..., 3128, 335, 1612])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 784, 1443, ..., 2804, 1465, 345])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 784, 1443, ..., 2804, 1465, 345])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 784, 1443, ..., 2804, 1465, 345])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 784, 1443, ..., 2804, 1465, 345])\n", + "deriv_tensor (40, 81), indices tensor([2527, 2226, 1448, ..., 3229, 1978, 3001])\n", + "deriv_tensor (40, 81), indices tensor([2527, 2226, 1448, ..., 3229, 1978, 3001])\n", + "deriv_tensor (40, 81), indices tensor([2527, 2226, 1448, ..., 3229, 1978, 3001])\n", + "deriv_tensor (40, 81), indices tensor([2527, 2226, 1448, ..., 3229, 1978, 3001])\n", + "deriv_tensor (40, 81), indices tensor([ 663, 2355, 2017, ..., 2776, 3076, 890])\n", + "deriv_tensor (40, 81), indices tensor([ 663, 2355, 2017, ..., 2776, 3076, 890])\n", + "deriv_tensor (40, 81), indices tensor([ 663, 2355, 2017, ..., 2776, 3076, 890])\n", + "deriv_tensor (40, 81), indices tensor([ 663, 2355, 2017, ..., 2776, 3076, 890])\n", + "deriv_tensor (40, 81), indices tensor([1614, 466, 2046, ..., 2025, 513, 1959])\n", + "deriv_tensor (40, 81), indices tensor([1614, 466, 2046, ..., 2025, 513, 1959])\n", + "deriv_tensor (40, 81), indices tensor([1614, 466, 2046, ..., 2025, 513, 1959])\n", + "deriv_tensor (40, 81), indices tensor([1614, 466, 2046, ..., 2025, 513, 1959])\n", + "deriv_tensor (40, 81), indices tensor([1756, 2765, 697, ..., 102, 1327, 817])\n", + "deriv_tensor (40, 81), indices tensor([1756, 2765, 697, ..., 102, 1327, 817])\n", + "deriv_tensor (40, 81), indices tensor([1756, 2765, 697, ..., 102, 1327, 817])\n", + "deriv_tensor (40, 81), indices tensor([1756, 2765, 697, ..., 102, 1327, 817])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 1267, 2765, ..., 2752, 498, 1511])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 1267, 2765, ..., 2752, 498, 1511])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 1267, 2765, ..., 2752, 498, 1511])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 1267, 2765, ..., 2752, 498, 1511])\n", + "deriv_tensor (40, 81), indices tensor([3053, 1193, 2214, ..., 921, 1038, 1000])\n", + "deriv_tensor (40, 81), indices tensor([3053, 1193, 2214, ..., 921, 1038, 1000])\n", + "deriv_tensor (40, 81), indices tensor([3053, 1193, 2214, ..., 921, 1038, 1000])\n", + "deriv_tensor (40, 81), indices tensor([3053, 1193, 2214, ..., 921, 1038, 1000])\n", + "deriv_tensor (40, 81), indices tensor([ 624, 1361, 3071, ..., 1102, 2236, 622])\n", + "deriv_tensor (40, 81), indices tensor([ 624, 1361, 3071, ..., 1102, 2236, 622])\n", + "deriv_tensor (40, 81), indices tensor([ 624, 1361, 3071, ..., 1102, 2236, 622])\n", + "deriv_tensor (40, 81), indices tensor([ 624, 1361, 3071, ..., 1102, 2236, 622])\n", + "deriv_tensor (40, 81), indices tensor([ 18, 3103, 325, ..., 901, 1858, 1058])\n", + "deriv_tensor (40, 81), indices tensor([ 18, 3103, 325, ..., 901, 1858, 1058])\n", + "deriv_tensor (40, 81), indices tensor([ 18, 3103, 325, ..., 901, 1858, 1058])\n", + "deriv_tensor (40, 81), indices tensor([ 18, 3103, 325, ..., 901, 1858, 1058])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1120, 1571, ..., 765, 2964, 2398])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1120, 1571, ..., 765, 2964, 2398])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1120, 1571, ..., 765, 2964, 2398])\n", + "deriv_tensor (40, 81), indices tensor([2970, 1120, 1571, ..., 765, 2964, 2398])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2375, 2948, ..., 436, 1884, 2829])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2375, 2948, ..., 436, 1884, 2829])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2375, 2948, ..., 436, 1884, 2829])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 2375, 2948, ..., 436, 1884, 2829])\n", + "deriv_tensor (40, 81), indices tensor([2581, 1540, 2366, ..., 3042, 444, 2321])\n", + "deriv_tensor (40, 81), indices tensor([2581, 1540, 2366, ..., 3042, 444, 2321])\n", + "deriv_tensor (40, 81), indices tensor([2581, 1540, 2366, ..., 3042, 444, 2321])\n", + "deriv_tensor (40, 81), indices tensor([2581, 1540, 2366, ..., 3042, 444, 2321])\n", + "deriv_tensor (40, 81), indices tensor([1507, 1876, 3132, ..., 506, 1066, 570])\n", + "deriv_tensor (40, 81), indices tensor([1507, 1876, 3132, ..., 506, 1066, 570])\n", + "deriv_tensor (40, 81), indices tensor([1507, 1876, 3132, ..., 506, 1066, 570])\n", + "deriv_tensor (40, 81), indices tensor([1507, 1876, 3132, ..., 506, 1066, 570])\n", + "deriv_tensor (40, 81), indices tensor([ 525, 1878, 2789, ..., 295, 2085, 1989])\n", + "deriv_tensor (40, 81), indices tensor([ 525, 1878, 2789, ..., 295, 2085, 1989])\n", + "deriv_tensor (40, 81), indices tensor([ 525, 1878, 2789, ..., 295, 2085, 1989])\n", + "deriv_tensor (40, 81), indices tensor([ 525, 1878, 2789, ..., 295, 2085, 1989])\n", + "deriv_tensor (40, 81), indices tensor([ 569, 527, 1313, ..., 1785, 951, 729])\n", + "deriv_tensor (40, 81), indices tensor([ 569, 527, 1313, ..., 1785, 951, 729])\n", + "deriv_tensor (40, 81), indices tensor([ 569, 527, 1313, ..., 1785, 951, 729])\n", + "deriv_tensor (40, 81), indices tensor([ 569, 527, 1313, ..., 1785, 951, 729])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2113, 1462, ..., 2708, 1516, 2032])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2113, 1462, ..., 2708, 1516, 2032])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2113, 1462, ..., 2708, 1516, 2032])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2113, 1462, ..., 2708, 1516, 2032])\n", + "deriv_tensor (40, 81), indices tensor([ 670, 3176, 3004, ..., 2478, 1836, 346])\n", + "deriv_tensor (40, 81), indices tensor([ 670, 3176, 3004, ..., 2478, 1836, 346])\n", + "deriv_tensor (40, 81), indices tensor([ 670, 3176, 3004, ..., 2478, 1836, 346])\n", + "deriv_tensor (40, 81), indices tensor([ 670, 3176, 3004, ..., 2478, 1836, 346])\n", + "deriv_tensor (40, 81), indices tensor([2801, 402, 2664, ..., 2460, 1219, 2891])\n", + "deriv_tensor (40, 81), indices tensor([2801, 402, 2664, ..., 2460, 1219, 2891])\n", + "deriv_tensor (40, 81), indices tensor([2801, 402, 2664, ..., 2460, 1219, 2891])\n", + "deriv_tensor (40, 81), indices tensor([2801, 402, 2664, ..., 2460, 1219, 2891])\n", + "deriv_tensor (40, 81), indices tensor([1784, 809, 748, ..., 1397, 2508, 2807])\n", + "deriv_tensor (40, 81), indices tensor([1784, 809, 748, ..., 1397, 2508, 2807])\n", + "deriv_tensor (40, 81), indices tensor([1784, 809, 748, ..., 1397, 2508, 2807])\n", + "deriv_tensor (40, 81), indices tensor([1784, 809, 748, ..., 1397, 2508, 2807])\n", + "deriv_tensor (40, 81), indices tensor([1476, 1892, 1908, ..., 2247, 2200, 225])\n", + "deriv_tensor (40, 81), indices tensor([1476, 1892, 1908, ..., 2247, 2200, 225])\n", + "deriv_tensor (40, 81), indices tensor([1476, 1892, 1908, ..., 2247, 2200, 225])\n", + "deriv_tensor (40, 81), indices tensor([1476, 1892, 1908, ..., 2247, 2200, 225])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2858, 1320, ..., 1019, 2163, 816])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2858, 1320, ..., 1019, 2163, 816])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2858, 1320, ..., 1019, 2163, 816])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2858, 1320, ..., 1019, 2163, 816])\n", + "deriv_tensor (40, 81), indices tensor([2933, 2938, 1516, ..., 1695, 2123, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2933, 2938, 1516, ..., 1695, 2123, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2933, 2938, 1516, ..., 1695, 2123, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2933, 2938, 1516, ..., 1695, 2123, 2899])\n", + "deriv_tensor (40, 81), indices tensor([2630, 1412, 1270, ..., 2998, 2728, 1468])\n", + "deriv_tensor (40, 81), indices tensor([2630, 1412, 1270, ..., 2998, 2728, 1468])\n", + "deriv_tensor (40, 81), indices tensor([2630, 1412, 1270, ..., 2998, 2728, 1468])\n", + "deriv_tensor (40, 81), indices tensor([2630, 1412, 1270, ..., 2998, 2728, 1468])\n", + "deriv_tensor (40, 81), indices tensor([ 636, 162, 1607, ..., 36, 108, 2005])\n", + "deriv_tensor (40, 81), indices tensor([ 636, 162, 1607, ..., 36, 108, 2005])\n", + "deriv_tensor (40, 81), indices tensor([ 636, 162, 1607, ..., 36, 108, 2005])\n", + "deriv_tensor (40, 81), indices tensor([ 636, 162, 1607, ..., 36, 108, 2005])\n", + "deriv_tensor (40, 81), indices tensor([3065, 1613, 2637, ..., 3232, 2449, 467])\n", + "deriv_tensor (40, 81), indices tensor([3065, 1613, 2637, ..., 3232, 2449, 467])\n", + "deriv_tensor (40, 81), indices tensor([3065, 1613, 2637, ..., 3232, 2449, 467])\n", + "deriv_tensor (40, 81), indices tensor([3065, 1613, 2637, ..., 3232, 2449, 467])\n", + "deriv_tensor (40, 81), indices tensor([1883, 377, 2021, ..., 2889, 2423, 1386])\n", + "deriv_tensor (40, 81), indices tensor([1883, 377, 2021, ..., 2889, 2423, 1386])\n", + "deriv_tensor (40, 81), indices tensor([1883, 377, 2021, ..., 2889, 2423, 1386])\n", + "deriv_tensor (40, 81), indices tensor([1883, 377, 2021, ..., 2889, 2423, 1386])\n", + "deriv_tensor (40, 81), indices tensor([ 805, 1611, 869, ..., 2857, 919, 338])\n", + "deriv_tensor (40, 81), indices tensor([ 805, 1611, 869, ..., 2857, 919, 338])\n", + "deriv_tensor (40, 81), indices tensor([ 805, 1611, 869, ..., 2857, 919, 338])\n", + "deriv_tensor (40, 81), indices tensor([ 805, 1611, 869, ..., 2857, 919, 338])\n", + "deriv_tensor (40, 81), indices tensor([ 235, 318, 245, ..., 2157, 540, 281])\n", + "deriv_tensor (40, 81), indices tensor([ 235, 318, 245, ..., 2157, 540, 281])\n", + "deriv_tensor (40, 81), indices tensor([ 235, 318, 245, ..., 2157, 540, 281])\n", + "deriv_tensor (40, 81), indices tensor([ 235, 318, 245, ..., 2157, 540, 281])\n", + "deriv_tensor (40, 81), indices tensor([2795, 908, 2364, ..., 1808, 1894, 390])\n", + "deriv_tensor (40, 81), indices tensor([2795, 908, 2364, ..., 1808, 1894, 390])\n", + "deriv_tensor (40, 81), indices tensor([2795, 908, 2364, ..., 1808, 1894, 390])\n", + "deriv_tensor (40, 81), indices tensor([2795, 908, 2364, ..., 1808, 1894, 390])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 1138, 2134, ..., 1305, 2762, 899])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 1138, 2134, ..., 1305, 2762, 899])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 1138, 2134, ..., 1305, 2762, 899])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 1138, 2134, ..., 1305, 2762, 899])\n", + "deriv_tensor (40, 81), indices tensor([ 783, 36, 184, ..., 1309, 247, 1558])\n", + "deriv_tensor (40, 81), indices tensor([ 783, 36, 184, ..., 1309, 247, 1558])\n", + "deriv_tensor (40, 81), indices tensor([ 783, 36, 184, ..., 1309, 247, 1558])\n", + "deriv_tensor (40, 81), indices tensor([ 783, 36, 184, ..., 1309, 247, 1558])\n", + "deriv_tensor (40, 81), indices tensor([ 640, 2884, 3165, ..., 2561, 2024, 342])\n", + "deriv_tensor (40, 81), indices tensor([ 640, 2884, 3165, ..., 2561, 2024, 342])\n", + "deriv_tensor (40, 81), indices tensor([ 640, 2884, 3165, ..., 2561, 2024, 342])\n", + "deriv_tensor (40, 81), indices tensor([ 640, 2884, 3165, ..., 2561, 2024, 342])\n", + "deriv_tensor (40, 81), indices tensor([2361, 1695, 89, ..., 2225, 1964, 787])\n", + "deriv_tensor (40, 81), indices tensor([2361, 1695, 89, ..., 2225, 1964, 787])\n", + "deriv_tensor (40, 81), indices tensor([2361, 1695, 89, ..., 2225, 1964, 787])\n", + "deriv_tensor (40, 81), indices tensor([2361, 1695, 89, ..., 2225, 1964, 787])\n", + "deriv_tensor (40, 81), indices tensor([ 657, 2593, 1847, ..., 1429, 2891, 1863])\n", + "deriv_tensor (40, 81), indices tensor([ 657, 2593, 1847, ..., 1429, 2891, 1863])\n", + "deriv_tensor (40, 81), indices tensor([ 657, 2593, 1847, ..., 1429, 2891, 1863])\n", + "deriv_tensor (40, 81), indices tensor([ 657, 2593, 1847, ..., 1429, 2891, 1863])\n", + "deriv_tensor (40, 81), indices tensor([1829, 1966, 2819, ..., 2439, 2785, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1829, 1966, 2819, ..., 2439, 2785, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1829, 1966, 2819, ..., 2439, 2785, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1829, 1966, 2819, ..., 2439, 2785, 1491])\n", + "deriv_tensor (40, 81), indices tensor([2913, 907, 3134, ..., 711, 3075, 74])\n", + "deriv_tensor (40, 81), indices tensor([2913, 907, 3134, ..., 711, 3075, 74])\n", + "deriv_tensor (40, 81), indices tensor([2913, 907, 3134, ..., 711, 3075, 74])\n", + "deriv_tensor (40, 81), indices tensor([2913, 907, 3134, ..., 711, 3075, 74])\n", + "deriv_tensor (40, 81), indices tensor([ 793, 701, 1482, ..., 2408, 870, 2894])\n", + "deriv_tensor (40, 81), indices tensor([ 793, 701, 1482, ..., 2408, 870, 2894])\n", + "deriv_tensor (40, 81), indices tensor([ 793, 701, 1482, ..., 2408, 870, 2894])\n", + "deriv_tensor (40, 81), indices tensor([ 793, 701, 1482, ..., 2408, 870, 2894])\n", + "deriv_tensor (40, 81), indices tensor([1044, 3198, 86, ..., 1998, 1457, 792])\n", + "deriv_tensor (40, 81), indices tensor([1044, 3198, 86, ..., 1998, 1457, 792])\n", + "deriv_tensor (40, 81), indices tensor([1044, 3198, 86, ..., 1998, 1457, 792])\n", + "deriv_tensor (40, 81), indices tensor([1044, 3198, 86, ..., 1998, 1457, 792])\n", + "deriv_tensor (40, 81), indices tensor([2590, 249, 900, ..., 118, 1032, 924])\n", + "deriv_tensor (40, 81), indices tensor([2590, 249, 900, ..., 118, 1032, 924])\n", + "deriv_tensor (40, 81), indices tensor([2590, 249, 900, ..., 118, 1032, 924])\n", + "deriv_tensor (40, 81), indices tensor([2590, 249, 900, ..., 118, 1032, 924])\n", + "deriv_tensor (40, 81), indices tensor([2839, 2847, 1612, ..., 1663, 1305, 3082])\n", + "deriv_tensor (40, 81), indices tensor([2839, 2847, 1612, ..., 1663, 1305, 3082])\n", + "deriv_tensor (40, 81), indices tensor([2839, 2847, 1612, ..., 1663, 1305, 3082])\n", + "deriv_tensor (40, 81), indices tensor([2839, 2847, 1612, ..., 1663, 1305, 3082])\n", + "deriv_tensor (40, 81), indices tensor([1726, 2694, 2898, ..., 674, 1279, 1173])\n", + "deriv_tensor (40, 81), indices tensor([1726, 2694, 2898, ..., 674, 1279, 1173])\n", + "deriv_tensor (40, 81), indices tensor([1726, 2694, 2898, ..., 674, 1279, 1173])\n", + "deriv_tensor (40, 81), indices tensor([1726, 2694, 2898, ..., 674, 1279, 1173])\n", + "deriv_tensor (40, 81), indices tensor([2314, 1021, 141, ..., 1446, 572, 2955])\n", + "deriv_tensor (40, 81), indices tensor([2314, 1021, 141, ..., 1446, 572, 2955])\n", + "deriv_tensor (40, 81), indices tensor([2314, 1021, 141, ..., 1446, 572, 2955])\n", + "deriv_tensor (40, 81), indices tensor([2314, 1021, 141, ..., 1446, 572, 2955])\n", + "deriv_tensor (40, 81), indices tensor([2430, 3159, 1754, ..., 1843, 1948, 553])\n", + "deriv_tensor (40, 81), indices tensor([2430, 3159, 1754, ..., 1843, 1948, 553])\n", + "deriv_tensor (40, 81), indices tensor([2430, 3159, 1754, ..., 1843, 1948, 553])\n", + "deriv_tensor (40, 81), indices tensor([2430, 3159, 1754, ..., 1843, 1948, 553])\n", + "deriv_tensor (40, 81), indices tensor([2552, 2303, 1593, ..., 2674, 2197, 112])\n", + "deriv_tensor (40, 81), indices tensor([2552, 2303, 1593, ..., 2674, 2197, 112])\n", + "deriv_tensor (40, 81), indices tensor([2552, 2303, 1593, ..., 2674, 2197, 112])\n", + "deriv_tensor (40, 81), indices tensor([2552, 2303, 1593, ..., 2674, 2197, 112])\n", + "deriv_tensor (40, 81), indices tensor([2868, 2448, 812, ..., 1166, 2711, 1416])\n", + "deriv_tensor (40, 81), indices tensor([2868, 2448, 812, ..., 1166, 2711, 1416])\n", + "deriv_tensor (40, 81), indices tensor([2868, 2448, 812, ..., 1166, 2711, 1416])\n", + "deriv_tensor (40, 81), indices tensor([2868, 2448, 812, ..., 1166, 2711, 1416])\n", + "deriv_tensor (40, 81), indices tensor([2270, 3168, 660, ..., 2885, 2067, 2676])\n", + "deriv_tensor (40, 81), indices tensor([2270, 3168, 660, ..., 2885, 2067, 2676])\n", + "deriv_tensor (40, 81), indices tensor([2270, 3168, 660, ..., 2885, 2067, 2676])\n", + "deriv_tensor (40, 81), indices tensor([2270, 3168, 660, ..., 2885, 2067, 2676])\n", + "deriv_tensor (40, 81), indices tensor([1290, 509, 1004, ..., 2852, 2895, 3146])\n", + "deriv_tensor (40, 81), indices tensor([1290, 509, 1004, ..., 2852, 2895, 3146])\n", + "deriv_tensor (40, 81), indices tensor([1290, 509, 1004, ..., 2852, 2895, 3146])\n", + "deriv_tensor (40, 81), indices tensor([1290, 509, 1004, ..., 2852, 2895, 3146])\n", + "deriv_tensor (40, 81), indices tensor([ 53, 1223, 664, ..., 2350, 2372, 2906])\n", + "deriv_tensor (40, 81), indices tensor([ 53, 1223, 664, ..., 2350, 2372, 2906])\n", + "deriv_tensor (40, 81), indices tensor([ 53, 1223, 664, ..., 2350, 2372, 2906])\n", + "deriv_tensor (40, 81), indices tensor([ 53, 1223, 664, ..., 2350, 2372, 2906])\n", + "deriv_tensor (40, 81), indices tensor([2118, 2375, 3220, ..., 793, 1648, 3221])\n", + "deriv_tensor (40, 81), indices tensor([2118, 2375, 3220, ..., 793, 1648, 3221])\n", + "deriv_tensor (40, 81), indices tensor([2118, 2375, 3220, ..., 793, 1648, 3221])\n", + "deriv_tensor (40, 81), indices tensor([2118, 2375, 3220, ..., 793, 1648, 3221])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1704, 2933, ..., 3058, 1274, 3050])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1704, 2933, ..., 3058, 1274, 3050])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1704, 2933, ..., 3058, 1274, 3050])\n", + "deriv_tensor (40, 81), indices tensor([ 769, 1704, 2933, ..., 3058, 1274, 3050])\n", + "deriv_tensor (40, 81), indices tensor([1732, 3154, 3029, ..., 2378, 2586, 1924])\n", + "deriv_tensor (40, 81), indices tensor([1732, 3154, 3029, ..., 2378, 2586, 1924])\n", + "deriv_tensor (40, 81), indices tensor([1732, 3154, 3029, ..., 2378, 2586, 1924])\n", + "deriv_tensor (40, 81), indices tensor([1732, 3154, 3029, ..., 2378, 2586, 1924])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1740, 3034, ..., 1016, 557, 2156])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1740, 3034, ..., 1016, 557, 2156])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1740, 3034, ..., 1016, 557, 2156])\n", + "deriv_tensor (40, 81), indices tensor([2327, 1740, 3034, ..., 1016, 557, 2156])\n", + "deriv_tensor (40, 81), indices tensor([1295, 2645, 1066, ..., 951, 1442, 3000])\n", + "deriv_tensor (40, 81), indices tensor([1295, 2645, 1066, ..., 951, 1442, 3000])\n", + "deriv_tensor (40, 81), indices tensor([1295, 2645, 1066, ..., 951, 1442, 3000])\n", + "deriv_tensor (40, 81), indices tensor([1295, 2645, 1066, ..., 951, 1442, 3000])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2038, 2762, ..., 801, 2885, 1865])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2038, 2762, ..., 801, 2885, 1865])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2038, 2762, ..., 801, 2885, 1865])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2038, 2762, ..., 801, 2885, 1865])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 2613, 1081, ..., 388, 1415, 1903])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 2613, 1081, ..., 388, 1415, 1903])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 2613, 1081, ..., 388, 1415, 1903])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 2613, 1081, ..., 388, 1415, 1903])\n", + "deriv_tensor (40, 81), indices tensor([ 35, 2856, 1708, ..., 2381, 3196, 2954])\n", + "deriv_tensor (40, 81), indices tensor([ 35, 2856, 1708, ..., 2381, 3196, 2954])\n", + "deriv_tensor (40, 81), indices tensor([ 35, 2856, 1708, ..., 2381, 3196, 2954])\n", + "deriv_tensor (40, 81), indices tensor([ 35, 2856, 1708, ..., 2381, 3196, 2954])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 2800, 2094, ..., 1830, 921, 1473])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 2800, 2094, ..., 1830, 921, 1473])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 2800, 2094, ..., 1830, 921, 1473])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 2800, 2094, ..., 1830, 921, 1473])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2983, 1182, ..., 1423, 1463, 2879])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2983, 1182, ..., 1423, 1463, 2879])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2983, 1182, ..., 1423, 1463, 2879])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2983, 1182, ..., 1423, 1463, 2879])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2497, 1158, ..., 2071, 858, 3153])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2497, 1158, ..., 2071, 858, 3153])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2497, 1158, ..., 2071, 858, 3153])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2497, 1158, ..., 2071, 858, 3153])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2562, 2035, ..., 1599, 159, 940])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2562, 2035, ..., 1599, 159, 940])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2562, 2035, ..., 1599, 159, 940])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2562, 2035, ..., 1599, 159, 940])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 2285, 1537, ..., 2837, 3064, 674])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 2285, 1537, ..., 2837, 3064, 674])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 2285, 1537, ..., 2837, 3064, 674])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 2285, 1537, ..., 2837, 3064, 674])\n", + "deriv_tensor (40, 81), indices tensor([1975, 719, 3014, ..., 999, 2627, 2104])\n", + "deriv_tensor (40, 81), indices tensor([1975, 719, 3014, ..., 999, 2627, 2104])\n", + "deriv_tensor (40, 81), indices tensor([1975, 719, 3014, ..., 999, 2627, 2104])\n", + "deriv_tensor (40, 81), indices tensor([1975, 719, 3014, ..., 999, 2627, 2104])\n", + "deriv_tensor (40, 81), indices tensor([1506, 2919, 969, ..., 2202, 131, 2503])\n", + "deriv_tensor (40, 81), indices tensor([1506, 2919, 969, ..., 2202, 131, 2503])\n", + "deriv_tensor (40, 81), indices tensor([1506, 2919, 969, ..., 2202, 131, 2503])\n", + "deriv_tensor (40, 81), indices tensor([1506, 2919, 969, ..., 2202, 131, 2503])\n", + "deriv_tensor (40, 81), indices tensor([2870, 2799, 2639, ..., 121, 471, 457])\n", + "deriv_tensor (40, 81), indices tensor([2870, 2799, 2639, ..., 121, 471, 457])\n", + "deriv_tensor (40, 81), indices tensor([2870, 2799, 2639, ..., 121, 471, 457])\n", + "deriv_tensor (40, 81), indices tensor([2870, 2799, 2639, ..., 121, 471, 457])\n", + "deriv_tensor (40, 81), indices tensor([1770, 43, 321, ..., 878, 1054, 1527])\n", + "deriv_tensor (40, 81), indices tensor([1770, 43, 321, ..., 878, 1054, 1527])\n", + "deriv_tensor (40, 81), indices tensor([1770, 43, 321, ..., 878, 1054, 1527])\n", + "deriv_tensor (40, 81), indices tensor([1770, 43, 321, ..., 878, 1054, 1527])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 281, 1832, ..., 2756, 1925, 1312])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 281, 1832, ..., 2756, 1925, 1312])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 281, 1832, ..., 2756, 1925, 1312])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 281, 1832, ..., 2756, 1925, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2108, 7, ..., 800, 698, 2427])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2108, 7, ..., 800, 698, 2427])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2108, 7, ..., 800, 698, 2427])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2108, 7, ..., 800, 698, 2427])\n", + "deriv_tensor (40, 81), indices tensor([1028, 1827, 691, ..., 352, 1737, 70])\n", + "deriv_tensor (40, 81), indices tensor([1028, 1827, 691, ..., 352, 1737, 70])\n", + "deriv_tensor (40, 81), indices tensor([1028, 1827, 691, ..., 352, 1737, 70])\n", + "deriv_tensor (40, 81), indices tensor([1028, 1827, 691, ..., 352, 1737, 70])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2043, 2874, ..., 1851, 1442, 1857])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2043, 2874, ..., 1851, 1442, 1857])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2043, 2874, ..., 1851, 1442, 1857])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2043, 2874, ..., 1851, 1442, 1857])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1074, 1029, ..., 3027, 1739, 272])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1074, 1029, ..., 3027, 1739, 272])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1074, 1029, ..., 3027, 1739, 272])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1074, 1029, ..., 3027, 1739, 272])\n", + "deriv_tensor (40, 81), indices tensor([ 416, 1208, 887, ..., 1904, 884, 639])\n", + "deriv_tensor (40, 81), indices tensor([ 416, 1208, 887, ..., 1904, 884, 639])\n", + "deriv_tensor (40, 81), indices tensor([ 416, 1208, 887, ..., 1904, 884, 639])\n", + "deriv_tensor (40, 81), indices tensor([ 416, 1208, 887, ..., 1904, 884, 639])\n", + "deriv_tensor (40, 81), indices tensor([ 556, 1514, 297, ..., 3105, 636, 3139])\n", + "deriv_tensor (40, 81), indices tensor([ 556, 1514, 297, ..., 3105, 636, 3139])\n", + "deriv_tensor (40, 81), indices tensor([ 556, 1514, 297, ..., 3105, 636, 3139])\n", + "deriv_tensor (40, 81), indices tensor([ 556, 1514, 297, ..., 3105, 636, 3139])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2041, 92, ..., 1109, 18, 3169])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2041, 92, ..., 1109, 18, 3169])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2041, 92, ..., 1109, 18, 3169])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2041, 92, ..., 1109, 18, 3169])\n", + "deriv_tensor (40, 81), indices tensor([2558, 1524, 1745, ..., 2882, 2588, 1105])\n", + "deriv_tensor (40, 81), indices tensor([2558, 1524, 1745, ..., 2882, 2588, 1105])\n", + "deriv_tensor (40, 81), indices tensor([2558, 1524, 1745, ..., 2882, 2588, 1105])\n", + "deriv_tensor (40, 81), indices tensor([2558, 1524, 1745, ..., 2882, 2588, 1105])\n", + "deriv_tensor (40, 81), indices tensor([3142, 2092, 2337, ..., 2680, 873, 462])\n", + "deriv_tensor (40, 81), indices tensor([3142, 2092, 2337, ..., 2680, 873, 462])\n", + "deriv_tensor (40, 81), indices tensor([3142, 2092, 2337, ..., 2680, 873, 462])\n", + "deriv_tensor (40, 81), indices tensor([3142, 2092, 2337, ..., 2680, 873, 462])\n", + "deriv_tensor (40, 81), indices tensor([1703, 2852, 593, ..., 1028, 895, 3019])\n", + "deriv_tensor (40, 81), indices tensor([1703, 2852, 593, ..., 1028, 895, 3019])\n", + "deriv_tensor (40, 81), indices tensor([1703, 2852, 593, ..., 1028, 895, 3019])\n", + "deriv_tensor (40, 81), indices tensor([1703, 2852, 593, ..., 1028, 895, 3019])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1948, 909, ..., 2412, 2686, 492])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1948, 909, ..., 2412, 2686, 492])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1948, 909, ..., 2412, 2686, 492])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 1948, 909, ..., 2412, 2686, 492])\n", + "deriv_tensor (40, 81), indices tensor([3025, 2792, 3045, ..., 1460, 1668, 364])\n", + "deriv_tensor (40, 81), indices tensor([3025, 2792, 3045, ..., 1460, 1668, 364])\n", + "deriv_tensor (40, 81), indices tensor([3025, 2792, 3045, ..., 1460, 1668, 364])\n", + "deriv_tensor (40, 81), indices tensor([3025, 2792, 3045, ..., 1460, 1668, 364])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 2842, 719, ..., 2735, 289, 4])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 2842, 719, ..., 2735, 289, 4])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 2842, 719, ..., 2735, 289, 4])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 2842, 719, ..., 2735, 289, 4])\n", + "deriv_tensor (40, 81), indices tensor([1505, 1049, 980, ..., 606, 2156, 1438])\n", + "deriv_tensor (40, 81), indices tensor([1505, 1049, 980, ..., 606, 2156, 1438])\n", + "deriv_tensor (40, 81), indices tensor([1505, 1049, 980, ..., 606, 2156, 1438])\n", + "deriv_tensor (40, 81), indices tensor([1505, 1049, 980, ..., 606, 2156, 1438])\n", + "deriv_tensor (40, 81), indices tensor([3191, 2414, 639, ..., 2046, 2867, 2532])\n", + "deriv_tensor (40, 81), indices tensor([3191, 2414, 639, ..., 2046, 2867, 2532])\n", + "deriv_tensor (40, 81), indices tensor([3191, 2414, 639, ..., 2046, 2867, 2532])\n", + "deriv_tensor (40, 81), indices tensor([3191, 2414, 639, ..., 2046, 2867, 2532])\n", + "deriv_tensor (40, 81), indices tensor([2412, 2341, 1230, ..., 205, 2988, 1195])\n", + "deriv_tensor (40, 81), indices tensor([2412, 2341, 1230, ..., 205, 2988, 1195])\n", + "deriv_tensor (40, 81), indices tensor([2412, 2341, 1230, ..., 205, 2988, 1195])\n", + "deriv_tensor (40, 81), indices tensor([2412, 2341, 1230, ..., 205, 2988, 1195])\n", + "deriv_tensor (40, 81), indices tensor([1375, 2891, 73, ..., 1425, 1929, 1812])\n", + "deriv_tensor (40, 81), indices tensor([1375, 2891, 73, ..., 1425, 1929, 1812])\n", + "deriv_tensor (40, 81), indices tensor([1375, 2891, 73, ..., 1425, 1929, 1812])\n", + "deriv_tensor (40, 81), indices tensor([1375, 2891, 73, ..., 1425, 1929, 1812])\n", + "deriv_tensor (40, 81), indices tensor([2008, 1349, 2018, ..., 1881, 3128, 1765])\n", + "deriv_tensor (40, 81), indices tensor([2008, 1349, 2018, ..., 1881, 3128, 1765])\n", + "deriv_tensor (40, 81), indices tensor([2008, 1349, 2018, ..., 1881, 3128, 1765])\n", + "deriv_tensor (40, 81), indices tensor([2008, 1349, 2018, ..., 1881, 3128, 1765])\n", + "deriv_tensor (40, 81), indices tensor([ 810, 1442, 229, ..., 415, 885, 2710])\n", + "deriv_tensor (40, 81), indices tensor([ 810, 1442, 229, ..., 415, 885, 2710])\n", + "deriv_tensor (40, 81), indices tensor([ 810, 1442, 229, ..., 415, 885, 2710])\n", + "deriv_tensor (40, 81), indices tensor([ 810, 1442, 229, ..., 415, 885, 2710])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 2915, 1969, ..., 760, 1380, 2481])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 2915, 1969, ..., 760, 1380, 2481])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 2915, 1969, ..., 760, 1380, 2481])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 2915, 1969, ..., 760, 1380, 2481])\n", + "deriv_tensor (40, 81), indices tensor([2632, 1339, 41, ..., 1657, 1821, 3114])\n", + "deriv_tensor (40, 81), indices tensor([2632, 1339, 41, ..., 1657, 1821, 3114])\n", + "deriv_tensor (40, 81), indices tensor([2632, 1339, 41, ..., 1657, 1821, 3114])\n", + "deriv_tensor (40, 81), indices tensor([2632, 1339, 41, ..., 1657, 1821, 3114])\n", + "deriv_tensor (40, 81), indices tensor([3012, 2565, 227, ..., 2961, 1321, 3054])\n", + "deriv_tensor (40, 81), indices tensor([3012, 2565, 227, ..., 2961, 1321, 3054])\n", + "deriv_tensor (40, 81), indices tensor([3012, 2565, 227, ..., 2961, 1321, 3054])\n", + "deriv_tensor (40, 81), indices tensor([3012, 2565, 227, ..., 2961, 1321, 3054])\n", + "deriv_tensor (40, 81), indices tensor([ 182, 1662, 2957, ..., 235, 2718, 322])\n", + "deriv_tensor (40, 81), indices tensor([ 182, 1662, 2957, ..., 235, 2718, 322])\n", + "deriv_tensor (40, 81), indices tensor([ 182, 1662, 2957, ..., 235, 2718, 322])\n", + "deriv_tensor (40, 81), indices tensor([ 182, 1662, 2957, ..., 235, 2718, 322])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2393, 2072, ..., 2739, 788, 1125])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2393, 2072, ..., 2739, 788, 1125])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2393, 2072, ..., 2739, 788, 1125])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2393, 2072, ..., 2739, 788, 1125])\n", + "deriv_tensor (40, 81), indices tensor([2417, 1705, 2296, ..., 2294, 2056, 219])\n", + "deriv_tensor (40, 81), indices tensor([2417, 1705, 2296, ..., 2294, 2056, 219])\n", + "deriv_tensor (40, 81), indices tensor([2417, 1705, 2296, ..., 2294, 2056, 219])\n", + "deriv_tensor (40, 81), indices tensor([2417, 1705, 2296, ..., 2294, 2056, 219])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2760, 100, ..., 2035, 920, 1526])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2760, 100, ..., 2035, 920, 1526])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2760, 100, ..., 2035, 920, 1526])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2760, 100, ..., 2035, 920, 1526])\n", + "deriv_tensor (40, 81), indices tensor([ 508, 1890, 1832, ..., 1991, 2618, 1448])\n", + "deriv_tensor (40, 81), indices tensor([ 508, 1890, 1832, ..., 1991, 2618, 1448])\n", + "deriv_tensor (40, 81), indices tensor([ 508, 1890, 1832, ..., 1991, 2618, 1448])\n", + "deriv_tensor (40, 81), indices tensor([ 508, 1890, 1832, ..., 1991, 2618, 1448])\n", + "deriv_tensor (40, 81), indices tensor([3105, 3072, 2189, ..., 1901, 2299, 1128])\n", + "deriv_tensor (40, 81), indices tensor([3105, 3072, 2189, ..., 1901, 2299, 1128])\n", + "deriv_tensor (40, 81), indices tensor([3105, 3072, 2189, ..., 1901, 2299, 1128])\n", + "deriv_tensor (40, 81), indices tensor([3105, 3072, 2189, ..., 1901, 2299, 1128])\n", + "deriv_tensor (40, 81), indices tensor([1773, 840, 3052, ..., 464, 758, 644])\n", + "deriv_tensor (40, 81), indices tensor([1773, 840, 3052, ..., 464, 758, 644])\n", + "deriv_tensor (40, 81), indices tensor([1773, 840, 3052, ..., 464, 758, 644])\n", + "deriv_tensor (40, 81), indices tensor([1773, 840, 3052, ..., 464, 758, 644])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 3201, 319, ..., 2083, 1030, 366])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 3201, 319, ..., 2083, 1030, 366])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 3201, 319, ..., 2083, 1030, 366])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 3201, 319, ..., 2083, 1030, 366])\n", + "deriv_tensor (40, 81), indices tensor([ 906, 2893, 1172, ..., 3066, 1542, 188])\n", + "deriv_tensor (40, 81), indices tensor([ 906, 2893, 1172, ..., 3066, 1542, 188])\n", + "deriv_tensor (40, 81), indices tensor([ 906, 2893, 1172, ..., 3066, 1542, 188])\n", + "deriv_tensor (40, 81), indices tensor([ 906, 2893, 1172, ..., 3066, 1542, 188])\n", + "deriv_tensor (40, 81), indices tensor([2001, 788, 248, ..., 165, 2942, 293])\n", + "deriv_tensor (40, 81), indices tensor([2001, 788, 248, ..., 165, 2942, 293])\n", + "deriv_tensor (40, 81), indices tensor([2001, 788, 248, ..., 165, 2942, 293])\n", + "deriv_tensor (40, 81), indices tensor([2001, 788, 248, ..., 165, 2942, 293])\n", + "deriv_tensor (40, 81), indices tensor([ 61, 2908, 2169, ..., 1412, 486, 2797])\n", + "deriv_tensor (40, 81), indices tensor([ 61, 2908, 2169, ..., 1412, 486, 2797])\n", + "deriv_tensor (40, 81), indices tensor([ 61, 2908, 2169, ..., 1412, 486, 2797])\n", + "deriv_tensor (40, 81), indices tensor([ 61, 2908, 2169, ..., 1412, 486, 2797])\n", + "deriv_tensor (40, 81), indices tensor([1904, 2702, 2590, ..., 2624, 931, 1732])\n", + "deriv_tensor (40, 81), indices tensor([1904, 2702, 2590, ..., 2624, 931, 1732])\n", + "deriv_tensor (40, 81), indices tensor([1904, 2702, 2590, ..., 2624, 931, 1732])\n", + "deriv_tensor (40, 81), indices tensor([1904, 2702, 2590, ..., 2624, 931, 1732])\n", + "deriv_tensor (40, 81), indices tensor([ 133, 1447, 2389, ..., 1329, 1979, 50])\n", + "deriv_tensor (40, 81), indices tensor([ 133, 1447, 2389, ..., 1329, 1979, 50])\n", + "deriv_tensor (40, 81), indices tensor([ 133, 1447, 2389, ..., 1329, 1979, 50])\n", + "deriv_tensor (40, 81), indices tensor([ 133, 1447, 2389, ..., 1329, 1979, 50])\n", + "deriv_tensor (40, 81), indices tensor([3207, 130, 333, ..., 809, 1519, 1342])\n", + "deriv_tensor (40, 81), indices tensor([3207, 130, 333, ..., 809, 1519, 1342])\n", + "deriv_tensor (40, 81), indices tensor([3207, 130, 333, ..., 809, 1519, 1342])\n", + "deriv_tensor (40, 81), indices tensor([3207, 130, 333, ..., 809, 1519, 1342])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2481, 325, ..., 1172, 1198, 2348])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2481, 325, ..., 1172, 1198, 2348])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2481, 325, ..., 1172, 1198, 2348])\n", + "deriv_tensor (40, 81), indices tensor([1134, 2481, 325, ..., 1172, 1198, 2348])\n", + "deriv_tensor (40, 81), indices tensor([2577, 3116, 860, ..., 1012, 96, 529])\n", + "deriv_tensor (40, 81), indices tensor([2577, 3116, 860, ..., 1012, 96, 529])\n", + "deriv_tensor (40, 81), indices tensor([2577, 3116, 860, ..., 1012, 96, 529])\n", + "deriv_tensor (40, 81), indices tensor([2577, 3116, 860, ..., 1012, 96, 529])\n", + "deriv_tensor (40, 81), indices tensor([ 126, 2916, 1424, ..., 102, 2744, 2240])\n", + "deriv_tensor (40, 81), indices tensor([ 126, 2916, 1424, ..., 102, 2744, 2240])\n", + "deriv_tensor (40, 81), indices tensor([ 126, 2916, 1424, ..., 102, 2744, 2240])\n", + "deriv_tensor (40, 81), indices tensor([ 126, 2916, 1424, ..., 102, 2744, 2240])\n", + "deriv_tensor (40, 81), indices tensor([1065, 481, 162, ..., 512, 3134, 1383])\n", + "deriv_tensor (40, 81), indices tensor([1065, 481, 162, ..., 512, 3134, 1383])\n", + "deriv_tensor (40, 81), indices tensor([1065, 481, 162, ..., 512, 3134, 1383])\n", + "deriv_tensor (40, 81), indices tensor([1065, 481, 162, ..., 512, 3134, 1383])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 1143, 379, ..., 1920, 1205, 1160])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 1143, 379, ..., 1920, 1205, 1160])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 1143, 379, ..., 1920, 1205, 1160])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 1143, 379, ..., 1920, 1205, 1160])\n", + "deriv_tensor (40, 81), indices tensor([1440, 1747, 1900, ..., 1336, 3206, 1888])\n", + "deriv_tensor (40, 81), indices tensor([1440, 1747, 1900, ..., 1336, 3206, 1888])\n", + "deriv_tensor (40, 81), indices tensor([1440, 1747, 1900, ..., 1336, 3206, 1888])\n", + "deriv_tensor (40, 81), indices tensor([1440, 1747, 1900, ..., 1336, 3206, 1888])\n", + "deriv_tensor (40, 81), indices tensor([3145, 2973, 957, ..., 1660, 107, 2708])\n", + "deriv_tensor (40, 81), indices tensor([3145, 2973, 957, ..., 1660, 107, 2708])\n", + "deriv_tensor (40, 81), indices tensor([3145, 2973, 957, ..., 1660, 107, 2708])\n", + "deriv_tensor (40, 81), indices tensor([3145, 2973, 957, ..., 1660, 107, 2708])\n", + "deriv_tensor (40, 81), indices tensor([2920, 554, 507, ..., 1973, 1054, 41])\n", + "deriv_tensor (40, 81), indices tensor([2920, 554, 507, ..., 1973, 1054, 41])\n", + "deriv_tensor (40, 81), indices tensor([2920, 554, 507, ..., 1973, 1054, 41])\n", + "deriv_tensor (40, 81), indices tensor([2920, 554, 507, ..., 1973, 1054, 41])\n", + "deriv_tensor (40, 81), indices tensor([1486, 1160, 2119, ..., 604, 2003, 3068])\n", + "deriv_tensor (40, 81), indices tensor([1486, 1160, 2119, ..., 604, 2003, 3068])\n", + "deriv_tensor (40, 81), indices tensor([1486, 1160, 2119, ..., 604, 2003, 3068])\n", + "deriv_tensor (40, 81), indices tensor([1486, 1160, 2119, ..., 604, 2003, 3068])\n", + "deriv_tensor (40, 81), indices tensor([2222, 978, 2942, ..., 1371, 1145, 132])\n", + "deriv_tensor (40, 81), indices tensor([2222, 978, 2942, ..., 1371, 1145, 132])\n", + "deriv_tensor (40, 81), indices tensor([2222, 978, 2942, ..., 1371, 1145, 132])\n", + "deriv_tensor (40, 81), indices tensor([2222, 978, 2942, ..., 1371, 1145, 132])\n", + "deriv_tensor (40, 81), indices tensor([3208, 327, 1750, ..., 303, 2316, 1610])\n", + "deriv_tensor (40, 81), indices tensor([3208, 327, 1750, ..., 303, 2316, 1610])\n", + "deriv_tensor (40, 81), indices tensor([3208, 327, 1750, ..., 303, 2316, 1610])\n", + "deriv_tensor (40, 81), indices tensor([3208, 327, 1750, ..., 303, 2316, 1610])\n", + "deriv_tensor (40, 81), indices tensor([ 155, 512, 2158, ..., 893, 269, 2706])\n", + "deriv_tensor (40, 81), indices tensor([ 155, 512, 2158, ..., 893, 269, 2706])\n", + "deriv_tensor (40, 81), indices tensor([ 155, 512, 2158, ..., 893, 269, 2706])\n", + "deriv_tensor (40, 81), indices tensor([ 155, 512, 2158, ..., 893, 269, 2706])\n", + "deriv_tensor (40, 81), indices tensor([1704, 2478, 2403, ..., 849, 2830, 1334])\n", + "deriv_tensor (40, 81), indices tensor([1704, 2478, 2403, ..., 849, 2830, 1334])\n", + "deriv_tensor (40, 81), indices tensor([1704, 2478, 2403, ..., 849, 2830, 1334])\n", + "deriv_tensor (40, 81), indices tensor([1704, 2478, 2403, ..., 849, 2830, 1334])\n", + "deriv_tensor (40, 81), indices tensor([ 949, 1873, 917, ..., 1536, 1907, 2102])\n", + "deriv_tensor (40, 81), indices tensor([ 949, 1873, 917, ..., 1536, 1907, 2102])\n", + "deriv_tensor (40, 81), indices tensor([ 949, 1873, 917, ..., 1536, 1907, 2102])\n", + "deriv_tensor (40, 81), indices tensor([ 949, 1873, 917, ..., 1536, 1907, 2102])\n", + "deriv_tensor (40, 81), indices tensor([ 954, 685, 2896, ..., 2459, 2159, 1775])\n", + "deriv_tensor (40, 81), indices tensor([ 954, 685, 2896, ..., 2459, 2159, 1775])\n", + "deriv_tensor (40, 81), indices tensor([ 954, 685, 2896, ..., 2459, 2159, 1775])\n", + "deriv_tensor (40, 81), indices tensor([ 954, 685, 2896, ..., 2459, 2159, 1775])\n", + "deriv_tensor (40, 81), indices tensor([ 222, 2524, 1536, ..., 2720, 722, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 222, 2524, 1536, ..., 2720, 722, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 222, 2524, 1536, ..., 2720, 722, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 222, 2524, 1536, ..., 2720, 722, 754])\n", + "deriv_tensor (40, 81), indices tensor([2969, 2923, 1011, ..., 1472, 2247, 2071])\n", + "deriv_tensor (40, 81), indices tensor([2969, 2923, 1011, ..., 1472, 2247, 2071])\n", + "deriv_tensor (40, 81), indices tensor([2969, 2923, 1011, ..., 1472, 2247, 2071])\n", + "deriv_tensor (40, 81), indices tensor([2969, 2923, 1011, ..., 1472, 2247, 2071])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 1368, 2818, ..., 1784, 625, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 1368, 2818, ..., 1784, 625, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 1368, 2818, ..., 1784, 625, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 1368, 2818, ..., 1784, 625, 1779])\n", + "deriv_tensor (40, 81), indices tensor([1250, 1696, 569, ..., 12, 2226, 801])\n", + "deriv_tensor (40, 81), indices tensor([1250, 1696, 569, ..., 12, 2226, 801])\n", + "deriv_tensor (40, 81), indices tensor([1250, 1696, 569, ..., 12, 2226, 801])\n", + "deriv_tensor (40, 81), indices tensor([1250, 1696, 569, ..., 12, 2226, 801])\n", + "deriv_tensor (40, 81), indices tensor([ 920, 2074, 2908, ..., 1410, 407, 1004])\n", + "deriv_tensor (40, 81), indices tensor([ 920, 2074, 2908, ..., 1410, 407, 1004])\n", + "deriv_tensor (40, 81), indices tensor([ 920, 2074, 2908, ..., 1410, 407, 1004])\n", + "deriv_tensor (40, 81), indices tensor([ 920, 2074, 2908, ..., 1410, 407, 1004])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 1148, 801, ..., 2896, 554, 2573])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 1148, 801, ..., 2896, 554, 2573])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 1148, 801, ..., 2896, 554, 2573])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 1148, 801, ..., 2896, 554, 2573])\n", + "deriv_tensor (40, 81), indices tensor([2475, 2789, 2263, ..., 590, 1906, 917])\n", + "deriv_tensor (40, 81), indices tensor([2475, 2789, 2263, ..., 590, 1906, 917])\n", + "deriv_tensor (40, 81), indices tensor([2475, 2789, 2263, ..., 590, 1906, 917])\n", + "deriv_tensor (40, 81), indices tensor([2475, 2789, 2263, ..., 590, 1906, 917])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 1944, 2718, ..., 3116, 2417, 1158])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 1944, 2718, ..., 3116, 2417, 1158])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 1944, 2718, ..., 3116, 2417, 1158])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 1944, 2718, ..., 3116, 2417, 1158])\n", + "deriv_tensor (40, 81), indices tensor([ 914, 306, 2295, ..., 402, 1246, 2757])\n", + "deriv_tensor (40, 81), indices tensor([ 914, 306, 2295, ..., 402, 1246, 2757])\n", + "deriv_tensor (40, 81), indices tensor([ 914, 306, 2295, ..., 402, 1246, 2757])\n", + "deriv_tensor (40, 81), indices tensor([ 914, 306, 2295, ..., 402, 1246, 2757])\n", + "deriv_tensor (40, 81), indices tensor([3214, 728, 1744, ..., 1164, 2073, 1885])\n", + "deriv_tensor (40, 81), indices tensor([3214, 728, 1744, ..., 1164, 2073, 1885])\n", + "deriv_tensor (40, 81), indices tensor([3214, 728, 1744, ..., 1164, 2073, 1885])\n", + "deriv_tensor (40, 81), indices tensor([3214, 728, 1744, ..., 1164, 2073, 1885])\n", + "deriv_tensor (40, 81), indices tensor([ 125, 1208, 1462, ..., 2912, 1008, 1855])\n", + "deriv_tensor (40, 81), indices tensor([ 125, 1208, 1462, ..., 2912, 1008, 1855])\n", + "deriv_tensor (40, 81), indices tensor([ 125, 1208, 1462, ..., 2912, 1008, 1855])\n", + "deriv_tensor (40, 81), indices tensor([ 125, 1208, 1462, ..., 2912, 1008, 1855])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 1657, 821, ..., 2046, 2348, 1511])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 1657, 821, ..., 2046, 2348, 1511])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 1657, 821, ..., 2046, 2348, 1511])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 1657, 821, ..., 2046, 2348, 1511])\n", + "deriv_tensor (40, 81), indices tensor([2174, 577, 1280, ..., 2637, 2444, 1380])\n", + "deriv_tensor (40, 81), indices tensor([2174, 577, 1280, ..., 2637, 2444, 1380])\n", + "deriv_tensor (40, 81), indices tensor([2174, 577, 1280, ..., 2637, 2444, 1380])\n", + "deriv_tensor (40, 81), indices tensor([2174, 577, 1280, ..., 2637, 2444, 1380])\n", + "deriv_tensor (40, 81), indices tensor([ 362, 2385, 844, ..., 438, 3187, 2863])\n", + "deriv_tensor (40, 81), indices tensor([ 362, 2385, 844, ..., 438, 3187, 2863])\n", + "deriv_tensor (40, 81), indices tensor([ 362, 2385, 844, ..., 438, 3187, 2863])\n", + "deriv_tensor (40, 81), indices tensor([ 362, 2385, 844, ..., 438, 3187, 2863])\n", + "deriv_tensor (40, 81), indices tensor([2067, 2640, 2824, ..., 1630, 1465, 775])\n", + "deriv_tensor (40, 81), indices tensor([2067, 2640, 2824, ..., 1630, 1465, 775])\n", + "deriv_tensor (40, 81), indices tensor([2067, 2640, 2824, ..., 1630, 1465, 775])\n", + "deriv_tensor (40, 81), indices tensor([2067, 2640, 2824, ..., 1630, 1465, 775])\n", + "deriv_tensor (40, 81), indices tensor([2414, 139, 805, ..., 1572, 1877, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2414, 139, 805, ..., 1572, 1877, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2414, 139, 805, ..., 1572, 1877, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2414, 139, 805, ..., 1572, 1877, 2919])\n", + "deriv_tensor (40, 81), indices tensor([2794, 1820, 2567, ..., 614, 2388, 1367])\n", + "deriv_tensor (40, 81), indices tensor([2794, 1820, 2567, ..., 614, 2388, 1367])\n", + "deriv_tensor (40, 81), indices tensor([2794, 1820, 2567, ..., 614, 2388, 1367])\n", + "deriv_tensor (40, 81), indices tensor([2794, 1820, 2567, ..., 614, 2388, 1367])\n", + "deriv_tensor (40, 81), indices tensor([ 651, 2785, 1951, ..., 955, 1037, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 651, 2785, 1951, ..., 955, 1037, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 651, 2785, 1951, ..., 955, 1037, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 651, 2785, 1951, ..., 955, 1037, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 120, 172, 608, ..., 1269, 851, 954])\n", + "deriv_tensor (40, 81), indices tensor([ 120, 172, 608, ..., 1269, 851, 954])\n", + "deriv_tensor (40, 81), indices tensor([ 120, 172, 608, ..., 1269, 851, 954])\n", + "deriv_tensor (40, 81), indices tensor([ 120, 172, 608, ..., 1269, 851, 954])\n", + "deriv_tensor (40, 81), indices tensor([1071, 2043, 2653, ..., 1373, 2754, 2769])\n", + "deriv_tensor (40, 81), indices tensor([1071, 2043, 2653, ..., 1373, 2754, 2769])\n", + "deriv_tensor (40, 81), indices tensor([1071, 2043, 2653, ..., 1373, 2754, 2769])\n", + "deriv_tensor (40, 81), indices tensor([1071, 2043, 2653, ..., 1373, 2754, 2769])\n", + "deriv_tensor (40, 81), indices tensor([1776, 2637, 1641, ..., 2661, 2495, 179])\n", + "deriv_tensor (40, 81), indices tensor([1776, 2637, 1641, ..., 2661, 2495, 179])\n", + "deriv_tensor (40, 81), indices tensor([1776, 2637, 1641, ..., 2661, 2495, 179])\n", + "deriv_tensor (40, 81), indices tensor([1776, 2637, 1641, ..., 2661, 2495, 179])\n", + "deriv_tensor (40, 81), indices tensor([ 24, 916, 2174, ..., 1398, 17, 3023])\n", + "deriv_tensor (40, 81), indices tensor([ 24, 916, 2174, ..., 1398, 17, 3023])\n", + "deriv_tensor (40, 81), indices tensor([ 24, 916, 2174, ..., 1398, 17, 3023])\n", + "deriv_tensor (40, 81), indices tensor([ 24, 916, 2174, ..., 1398, 17, 3023])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 1043, 1780, ..., 668, 769, 2337])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 1043, 1780, ..., 668, 769, 2337])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 1043, 1780, ..., 668, 769, 2337])\n", + "deriv_tensor (40, 81), indices tensor([ 484, 1043, 1780, ..., 668, 769, 2337])\n", + "deriv_tensor (40, 81), indices tensor([1569, 1764, 2433, ..., 330, 2285, 1053])\n", + "deriv_tensor (40, 81), indices tensor([1569, 1764, 2433, ..., 330, 2285, 1053])\n", + "deriv_tensor (40, 81), indices tensor([1569, 1764, 2433, ..., 330, 2285, 1053])\n", + "deriv_tensor (40, 81), indices tensor([1569, 1764, 2433, ..., 330, 2285, 1053])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1316, 290, ..., 2098, 659, 3201])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1316, 290, ..., 2098, 659, 3201])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1316, 290, ..., 2098, 659, 3201])\n", + "deriv_tensor (40, 81), indices tensor([2882, 1316, 290, ..., 2098, 659, 3201])\n", + "deriv_tensor (40, 81), indices tensor([2932, 1139, 2363, ..., 1487, 354, 1070])\n", + "deriv_tensor (40, 81), indices tensor([2932, 1139, 2363, ..., 1487, 354, 1070])\n", + "deriv_tensor (40, 81), indices tensor([2932, 1139, 2363, ..., 1487, 354, 1070])\n", + "deriv_tensor (40, 81), indices tensor([2932, 1139, 2363, ..., 1487, 354, 1070])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2891, 1478, ..., 2564, 2731, 2009])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2891, 1478, ..., 2564, 2731, 2009])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2891, 1478, ..., 2564, 2731, 2009])\n", + "deriv_tensor (40, 81), indices tensor([ 65, 2891, 1478, ..., 2564, 2731, 2009])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 963, 2893, ..., 2937, 2717, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 963, 2893, ..., 2937, 2717, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 963, 2893, ..., 2937, 2717, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 963, 2893, ..., 2937, 2717, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 2085, 2007, ..., 1492, 3195, 234])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 2085, 2007, ..., 1492, 3195, 234])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 2085, 2007, ..., 1492, 3195, 234])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 2085, 2007, ..., 1492, 3195, 234])\n", + "deriv_tensor (40, 81), indices tensor([2176, 2334, 2299, ..., 1677, 3083, 1036])\n", + "deriv_tensor (40, 81), indices tensor([2176, 2334, 2299, ..., 1677, 3083, 1036])\n", + "deriv_tensor (40, 81), indices tensor([2176, 2334, 2299, ..., 1677, 3083, 1036])\n", + "deriv_tensor (40, 81), indices tensor([2176, 2334, 2299, ..., 1677, 3083, 1036])\n", + "deriv_tensor (40, 81), indices tensor([1725, 1095, 2950, ..., 2735, 1255, 283])\n", + "deriv_tensor (40, 81), indices tensor([1725, 1095, 2950, ..., 2735, 1255, 283])\n", + "deriv_tensor (40, 81), indices tensor([1725, 1095, 2950, ..., 2735, 1255, 283])\n", + "deriv_tensor (40, 81), indices tensor([1725, 1095, 2950, ..., 2735, 1255, 283])\n", + "deriv_tensor (40, 81), indices tensor([2040, 441, 13, ..., 315, 809, 1790])\n", + "deriv_tensor (40, 81), indices tensor([2040, 441, 13, ..., 315, 809, 1790])\n", + "deriv_tensor (40, 81), indices tensor([2040, 441, 13, ..., 315, 809, 1790])\n", + "deriv_tensor (40, 81), indices tensor([2040, 441, 13, ..., 315, 809, 1790])\n", + "deriv_tensor (40, 81), indices tensor([2878, 565, 1561, ..., 868, 382, 2397])\n", + "deriv_tensor (40, 81), indices tensor([2878, 565, 1561, ..., 868, 382, 2397])\n", + "deriv_tensor (40, 81), indices tensor([2878, 565, 1561, ..., 868, 382, 2397])\n", + "deriv_tensor (40, 81), indices tensor([2878, 565, 1561, ..., 868, 382, 2397])\n", + "deriv_tensor (40, 81), indices tensor([2666, 2133, 2443, ..., 2081, 3006, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2666, 2133, 2443, ..., 2081, 3006, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2666, 2133, 2443, ..., 2081, 3006, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2666, 2133, 2443, ..., 2081, 3006, 2058])\n", + "deriv_tensor (40, 81), indices tensor([ 681, 168, 2165, ..., 1533, 28, 1505])\n", + "deriv_tensor (40, 81), indices tensor([ 681, 168, 2165, ..., 1533, 28, 1505])\n", + "deriv_tensor (40, 81), indices tensor([ 681, 168, 2165, ..., 1533, 28, 1505])\n", + "deriv_tensor (40, 81), indices tensor([ 681, 168, 2165, ..., 1533, 28, 1505])\n", + "deriv_tensor (40, 81), indices tensor([2891, 2233, 650, ..., 2530, 2039, 1941])\n", + "deriv_tensor (40, 81), indices tensor([2891, 2233, 650, ..., 2530, 2039, 1941])\n", + "deriv_tensor (40, 81), indices tensor([2891, 2233, 650, ..., 2530, 2039, 1941])\n", + "deriv_tensor (40, 81), indices tensor([2891, 2233, 650, ..., 2530, 2039, 1941])\n", + "deriv_tensor (40, 81), indices tensor([2765, 1218, 150, ..., 217, 287, 252])\n", + "deriv_tensor (40, 81), indices tensor([2765, 1218, 150, ..., 217, 287, 252])\n", + "deriv_tensor (40, 81), indices tensor([2765, 1218, 150, ..., 217, 287, 252])\n", + "deriv_tensor (40, 81), indices tensor([2765, 1218, 150, ..., 217, 287, 252])\n", + "deriv_tensor (40, 81), indices tensor([3104, 2660, 751, ..., 1057, 2541, 555])\n", + "deriv_tensor (40, 81), indices tensor([3104, 2660, 751, ..., 1057, 2541, 555])\n", + "deriv_tensor (40, 81), indices tensor([3104, 2660, 751, ..., 1057, 2541, 555])\n", + "deriv_tensor (40, 81), indices tensor([3104, 2660, 751, ..., 1057, 2541, 555])\n", + "deriv_tensor (40, 81), indices tensor([2182, 1448, 3102, ..., 2101, 1084, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2182, 1448, 3102, ..., 2101, 1084, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2182, 1448, 3102, ..., 2101, 1084, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2182, 1448, 3102, ..., 2101, 1084, 1729])\n", + "deriv_tensor (40, 81), indices tensor([ 596, 2712, 1390, ..., 2872, 2741, 1460])\n", + "deriv_tensor (40, 81), indices tensor([ 596, 2712, 1390, ..., 2872, 2741, 1460])\n", + "deriv_tensor (40, 81), indices tensor([ 596, 2712, 1390, ..., 2872, 2741, 1460])\n", + "deriv_tensor (40, 81), indices tensor([ 596, 2712, 1390, ..., 2872, 2741, 1460])\n", + "deriv_tensor (40, 81), indices tensor([3070, 996, 2559, ..., 197, 1994, 2293])\n", + "deriv_tensor (40, 81), indices tensor([3070, 996, 2559, ..., 197, 1994, 2293])\n", + "deriv_tensor (40, 81), indices tensor([3070, 996, 2559, ..., 197, 1994, 2293])\n", + "deriv_tensor (40, 81), indices tensor([3070, 996, 2559, ..., 197, 1994, 2293])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2862, 925, ..., 1543, 1181, 1441])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2862, 925, ..., 1543, 1181, 1441])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2862, 925, ..., 1543, 1181, 1441])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2862, 925, ..., 1543, 1181, 1441])\n", + "deriv_tensor (40, 81), indices tensor([1193, 1281, 1679, ..., 883, 2297, 1887])\n", + "deriv_tensor (40, 81), indices tensor([1193, 1281, 1679, ..., 883, 2297, 1887])\n", + "deriv_tensor (40, 81), indices tensor([1193, 1281, 1679, ..., 883, 2297, 1887])\n", + "deriv_tensor (40, 81), indices tensor([1193, 1281, 1679, ..., 883, 2297, 1887])\n", + "deriv_tensor (40, 81), indices tensor([1999, 2787, 3188, ..., 1207, 2566, 2562])\n", + "deriv_tensor (40, 81), indices tensor([1999, 2787, 3188, ..., 1207, 2566, 2562])\n", + "deriv_tensor (40, 81), indices tensor([1999, 2787, 3188, ..., 1207, 2566, 2562])\n", + "deriv_tensor (40, 81), indices tensor([1999, 2787, 3188, ..., 1207, 2566, 2562])\n", + "deriv_tensor (40, 81), indices tensor([2728, 2460, 2205, ..., 1476, 1013, 2019])\n", + "deriv_tensor (40, 81), indices tensor([2728, 2460, 2205, ..., 1476, 1013, 2019])\n", + "deriv_tensor (40, 81), indices tensor([2728, 2460, 2205, ..., 1476, 1013, 2019])\n", + "deriv_tensor (40, 81), indices tensor([2728, 2460, 2205, ..., 1476, 1013, 2019])\n", + "deriv_tensor (40, 81), indices tensor([2386, 1674, 2956, ..., 3235, 773, 1637])\n", + "deriv_tensor (40, 81), indices tensor([2386, 1674, 2956, ..., 3235, 773, 1637])\n", + "deriv_tensor (40, 81), indices tensor([2386, 1674, 2956, ..., 3235, 773, 1637])\n", + "deriv_tensor (40, 81), indices tensor([2386, 1674, 2956, ..., 3235, 773, 1637])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 1481, 535, ..., 2246, 1432, 1654])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 1481, 535, ..., 2246, 1432, 1654])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 1481, 535, ..., 2246, 1432, 1654])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 1481, 535, ..., 2246, 1432, 1654])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2600, 1295, ..., 1517, 2200, 96])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2600, 1295, ..., 1517, 2200, 96])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2600, 1295, ..., 1517, 2200, 96])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2600, 1295, ..., 1517, 2200, 96])\n", + "deriv_tensor (40, 81), indices tensor([ 611, 1002, 2133, ..., 189, 490, 3166])\n", + "deriv_tensor (40, 81), indices tensor([ 611, 1002, 2133, ..., 189, 490, 3166])\n", + "deriv_tensor (40, 81), indices tensor([ 611, 1002, 2133, ..., 189, 490, 3166])\n", + "deriv_tensor (40, 81), indices tensor([ 611, 1002, 2133, ..., 189, 490, 3166])\n", + "deriv_tensor (40, 81), indices tensor([ 449, 2361, 483, ..., 2912, 3190, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 449, 2361, 483, ..., 2912, 3190, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 449, 2361, 483, ..., 2912, 3190, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 449, 2361, 483, ..., 2912, 3190, 1804])\n", + "deriv_tensor (40, 81), indices tensor([1680, 181, 2498, ..., 3008, 890, 1858])\n", + "deriv_tensor (40, 81), indices tensor([1680, 181, 2498, ..., 3008, 890, 1858])\n", + "deriv_tensor (40, 81), indices tensor([1680, 181, 2498, ..., 3008, 890, 1858])\n", + "deriv_tensor (40, 81), indices tensor([1680, 181, 2498, ..., 3008, 890, 1858])\n", + "deriv_tensor (40, 81), indices tensor([1275, 1959, 304, ..., 2815, 535, 1312])\n", + "deriv_tensor (40, 81), indices tensor([1275, 1959, 304, ..., 2815, 535, 1312])\n", + "deriv_tensor (40, 81), indices tensor([1275, 1959, 304, ..., 2815, 535, 1312])\n", + "deriv_tensor (40, 81), indices tensor([1275, 1959, 304, ..., 2815, 535, 1312])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 1047, 2494, ..., 1904, 2361, 566])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 1047, 2494, ..., 1904, 2361, 566])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 1047, 2494, ..., 1904, 2361, 566])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 1047, 2494, ..., 1904, 2361, 566])\n", + "deriv_tensor (40, 81), indices tensor([1427, 2698, 1406, ..., 3120, 1338, 2753])\n", + "deriv_tensor (40, 81), indices tensor([1427, 2698, 1406, ..., 3120, 1338, 2753])\n", + "deriv_tensor (40, 81), indices tensor([1427, 2698, 1406, ..., 3120, 1338, 2753])\n", + "deriv_tensor (40, 81), indices tensor([1427, 2698, 1406, ..., 3120, 1338, 2753])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 405, 1800, ..., 3042, 101, 3210])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 405, 1800, ..., 3042, 101, 3210])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 405, 1800, ..., 3042, 101, 3210])\n", + "deriv_tensor (40, 81), indices tensor([ 17, 405, 1800, ..., 3042, 101, 3210])\n", + "deriv_tensor (40, 81), indices tensor([1530, 276, 3131, ..., 279, 1292, 1807])\n", + "deriv_tensor (40, 81), indices tensor([1530, 276, 3131, ..., 279, 1292, 1807])\n", + "deriv_tensor (40, 81), indices tensor([1530, 276, 3131, ..., 279, 1292, 1807])\n", + "deriv_tensor (40, 81), indices tensor([1530, 276, 3131, ..., 279, 1292, 1807])\n", + "deriv_tensor (40, 81), indices tensor([2020, 118, 367, ..., 2231, 2052, 1897])\n", + "deriv_tensor (40, 81), indices tensor([2020, 118, 367, ..., 2231, 2052, 1897])\n", + "deriv_tensor (40, 81), indices tensor([2020, 118, 367, ..., 2231, 2052, 1897])\n", + "deriv_tensor (40, 81), indices tensor([2020, 118, 367, ..., 2231, 2052, 1897])\n", + "deriv_tensor (40, 81), indices tensor([1526, 2125, 1712, ..., 1982, 1815, 1927])\n", + "deriv_tensor (40, 81), indices tensor([1526, 2125, 1712, ..., 1982, 1815, 1927])\n", + "deriv_tensor (40, 81), indices tensor([1526, 2125, 1712, ..., 1982, 1815, 1927])\n", + "deriv_tensor (40, 81), indices tensor([1526, 2125, 1712, ..., 1982, 1815, 1927])\n", + "deriv_tensor (40, 81), indices tensor([1577, 685, 2076, ..., 2837, 2714, 2666])\n", + "deriv_tensor (40, 81), indices tensor([1577, 685, 2076, ..., 2837, 2714, 2666])\n", + "deriv_tensor (40, 81), indices tensor([1577, 685, 2076, ..., 2837, 2714, 2666])\n", + "deriv_tensor (40, 81), indices tensor([1577, 685, 2076, ..., 2837, 2714, 2666])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 2616, 3204, ..., 2093, 1582, 1624])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 2616, 3204, ..., 2093, 1582, 1624])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 2616, 3204, ..., 2093, 1582, 1624])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 2616, 3204, ..., 2093, 1582, 1624])\n", + "deriv_tensor (40, 81), indices tensor([2378, 3194, 788, ..., 364, 809, 1797])\n", + "deriv_tensor (40, 81), indices tensor([2378, 3194, 788, ..., 364, 809, 1797])\n", + "deriv_tensor (40, 81), indices tensor([2378, 3194, 788, ..., 364, 809, 1797])\n", + "deriv_tensor (40, 81), indices tensor([2378, 3194, 788, ..., 364, 809, 1797])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2975, 1249, ..., 1578, 625, 740])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2975, 1249, ..., 1578, 625, 740])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2975, 1249, ..., 1578, 625, 740])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2975, 1249, ..., 1578, 625, 740])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 2687, 3223, ..., 165, 1008, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 2687, 3223, ..., 165, 1008, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 2687, 3223, ..., 165, 1008, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 2687, 3223, ..., 165, 1008, 1779])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 3011, 2099, ..., 2384, 1347, 3026])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 3011, 2099, ..., 2384, 1347, 3026])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 3011, 2099, ..., 2384, 1347, 3026])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 3011, 2099, ..., 2384, 1347, 3026])\n", + "deriv_tensor (40, 81), indices tensor([2635, 788, 1320, ..., 2801, 726, 2141])\n", + "deriv_tensor (40, 81), indices tensor([2635, 788, 1320, ..., 2801, 726, 2141])\n", + "deriv_tensor (40, 81), indices tensor([2635, 788, 1320, ..., 2801, 726, 2141])\n", + "deriv_tensor (40, 81), indices tensor([2635, 788, 1320, ..., 2801, 726, 2141])\n", + "deriv_tensor (40, 81), indices tensor([1553, 538, 921, ..., 1671, 2856, 2982])\n", + "deriv_tensor (40, 81), indices tensor([1553, 538, 921, ..., 1671, 2856, 2982])\n", + "deriv_tensor (40, 81), indices tensor([1553, 538, 921, ..., 1671, 2856, 2982])\n", + "deriv_tensor (40, 81), indices tensor([1553, 538, 921, ..., 1671, 2856, 2982])\n", + "deriv_tensor (40, 81), indices tensor([ 917, 62, 841, ..., 747, 2156, 3084])\n", + "deriv_tensor (40, 81), indices tensor([ 917, 62, 841, ..., 747, 2156, 3084])\n", + "deriv_tensor (40, 81), indices tensor([ 917, 62, 841, ..., 747, 2156, 3084])\n", + "deriv_tensor (40, 81), indices tensor([ 917, 62, 841, ..., 747, 2156, 3084])\n", + "deriv_tensor (40, 81), indices tensor([2402, 1612, 2638, ..., 3112, 2722, 711])\n", + "deriv_tensor (40, 81), indices tensor([2402, 1612, 2638, ..., 3112, 2722, 711])\n", + "deriv_tensor (40, 81), indices tensor([2402, 1612, 2638, ..., 3112, 2722, 711])\n", + "deriv_tensor (40, 81), indices tensor([2402, 1612, 2638, ..., 3112, 2722, 711])\n", + "deriv_tensor (40, 81), indices tensor([3157, 1454, 2163, ..., 1890, 1490, 417])\n", + "deriv_tensor (40, 81), indices tensor([3157, 1454, 2163, ..., 1890, 1490, 417])\n", + "deriv_tensor (40, 81), indices tensor([3157, 1454, 2163, ..., 1890, 1490, 417])\n", + "deriv_tensor (40, 81), indices tensor([3157, 1454, 2163, ..., 1890, 1490, 417])\n", + "deriv_tensor (40, 81), indices tensor([2047, 370, 300, ..., 3033, 1477, 1978])\n", + "deriv_tensor (40, 81), indices tensor([2047, 370, 300, ..., 3033, 1477, 1978])\n", + "deriv_tensor (40, 81), indices tensor([2047, 370, 300, ..., 3033, 1477, 1978])\n", + "deriv_tensor (40, 81), indices tensor([2047, 370, 300, ..., 3033, 1477, 1978])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2456, 2056, ..., 2268, 1207, 1096])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2456, 2056, ..., 2268, 1207, 1096])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2456, 2056, ..., 2268, 1207, 1096])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 2456, 2056, ..., 2268, 1207, 1096])\n", + "deriv_tensor (40, 81), indices tensor([2589, 2334, 2191, ..., 545, 2534, 378])\n", + "deriv_tensor (40, 81), indices tensor([2589, 2334, 2191, ..., 545, 2534, 378])\n", + "deriv_tensor (40, 81), indices tensor([2589, 2334, 2191, ..., 545, 2534, 378])\n", + "deriv_tensor (40, 81), indices tensor([2589, 2334, 2191, ..., 545, 2534, 378])\n", + "deriv_tensor (40, 81), indices tensor([1801, 1554, 1510, ..., 208, 2122, 1273])\n", + "deriv_tensor (40, 81), indices tensor([1801, 1554, 1510, ..., 208, 2122, 1273])\n", + "deriv_tensor (40, 81), indices tensor([1801, 1554, 1510, ..., 208, 2122, 1273])\n", + "deriv_tensor (40, 81), indices tensor([1801, 1554, 1510, ..., 208, 2122, 1273])\n", + "deriv_tensor (40, 81), indices tensor([1634, 829, 1233, ..., 140, 2176, 3012])\n", + "deriv_tensor (40, 81), indices tensor([1634, 829, 1233, ..., 140, 2176, 3012])\n", + "deriv_tensor (40, 81), indices tensor([1634, 829, 1233, ..., 140, 2176, 3012])\n", + "deriv_tensor (40, 81), indices tensor([1634, 829, 1233, ..., 140, 2176, 3012])\n", + "deriv_tensor (40, 81), indices tensor([2047, 493, 3051, ..., 620, 852, 1834])\n", + "deriv_tensor (40, 81), indices tensor([2047, 493, 3051, ..., 620, 852, 1834])\n", + "deriv_tensor (40, 81), indices tensor([2047, 493, 3051, ..., 620, 852, 1834])\n", + "deriv_tensor (40, 81), indices tensor([2047, 493, 3051, ..., 620, 852, 1834])\n", + "deriv_tensor (40, 81), indices tensor([1226, 568, 1571, ..., 375, 3198, 476])\n", + "deriv_tensor (40, 81), indices tensor([1226, 568, 1571, ..., 375, 3198, 476])\n", + "deriv_tensor (40, 81), indices tensor([1226, 568, 1571, ..., 375, 3198, 476])\n", + "deriv_tensor (40, 81), indices tensor([1226, 568, 1571, ..., 375, 3198, 476])\n", + "deriv_tensor (40, 81), indices tensor([1037, 1773, 107, ..., 411, 2613, 3205])\n", + "deriv_tensor (40, 81), indices tensor([1037, 1773, 107, ..., 411, 2613, 3205])\n", + "deriv_tensor (40, 81), indices tensor([1037, 1773, 107, ..., 411, 2613, 3205])\n", + "deriv_tensor (40, 81), indices tensor([1037, 1773, 107, ..., 411, 2613, 3205])\n", + "deriv_tensor (40, 81), indices tensor([2586, 1596, 881, ..., 191, 3088, 1651])\n", + "deriv_tensor (40, 81), indices tensor([2586, 1596, 881, ..., 191, 3088, 1651])\n", + "deriv_tensor (40, 81), indices tensor([2586, 1596, 881, ..., 191, 3088, 1651])\n", + "deriv_tensor (40, 81), indices tensor([2586, 1596, 881, ..., 191, 3088, 1651])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2350, 709, ..., 145, 2490, 1712])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2350, 709, ..., 145, 2490, 1712])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2350, 709, ..., 145, 2490, 1712])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2350, 709, ..., 145, 2490, 1712])\n", + "deriv_tensor (40, 81), indices tensor([2475, 601, 388, ..., 2776, 2955, 860])\n", + "deriv_tensor (40, 81), indices tensor([2475, 601, 388, ..., 2776, 2955, 860])\n", + "deriv_tensor (40, 81), indices tensor([2475, 601, 388, ..., 2776, 2955, 860])\n", + "deriv_tensor (40, 81), indices tensor([2475, 601, 388, ..., 2776, 2955, 860])\n", + "deriv_tensor (40, 81), indices tensor([2735, 286, 1087, ..., 857, 1236, 3050])\n", + "deriv_tensor (40, 81), indices tensor([2735, 286, 1087, ..., 857, 1236, 3050])\n", + "deriv_tensor (40, 81), indices tensor([2735, 286, 1087, ..., 857, 1236, 3050])\n", + "deriv_tensor (40, 81), indices tensor([2735, 286, 1087, ..., 857, 1236, 3050])\n", + "deriv_tensor (40, 81), indices tensor([1143, 2224, 226, ..., 382, 2572, 258])\n", + "deriv_tensor (40, 81), indices tensor([1143, 2224, 226, ..., 382, 2572, 258])\n", + "deriv_tensor (40, 81), indices tensor([1143, 2224, 226, ..., 382, 2572, 258])\n", + "deriv_tensor (40, 81), indices tensor([1143, 2224, 226, ..., 382, 2572, 258])\n", + "deriv_tensor (40, 81), indices tensor([2435, 639, 396, ..., 1847, 1818, 1296])\n", + "deriv_tensor (40, 81), indices tensor([2435, 639, 396, ..., 1847, 1818, 1296])\n", + "deriv_tensor (40, 81), indices tensor([2435, 639, 396, ..., 1847, 1818, 1296])\n", + "deriv_tensor (40, 81), indices tensor([2435, 639, 396, ..., 1847, 1818, 1296])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2319, 119, ..., 2893, 425, 2426])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2319, 119, ..., 2893, 425, 2426])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2319, 119, ..., 2893, 425, 2426])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 2319, 119, ..., 2893, 425, 2426])\n", + "deriv_tensor (40, 81), indices tensor([1005, 260, 2327, ..., 2462, 2651, 2278])\n", + "deriv_tensor (40, 81), indices tensor([1005, 260, 2327, ..., 2462, 2651, 2278])\n", + "deriv_tensor (40, 81), indices tensor([1005, 260, 2327, ..., 2462, 2651, 2278])\n", + "deriv_tensor (40, 81), indices tensor([1005, 260, 2327, ..., 2462, 2651, 2278])\n", + "deriv_tensor (40, 81), indices tensor([3188, 1571, 2216, ..., 343, 582, 541])\n", + "deriv_tensor (40, 81), indices tensor([3188, 1571, 2216, ..., 343, 582, 541])\n", + "deriv_tensor (40, 81), indices tensor([3188, 1571, 2216, ..., 343, 582, 541])\n", + "deriv_tensor (40, 81), indices tensor([3188, 1571, 2216, ..., 343, 582, 541])\n", + "deriv_tensor (40, 81), indices tensor([2218, 1974, 2501, ..., 3072, 1665, 2002])\n", + "deriv_tensor (40, 81), indices tensor([2218, 1974, 2501, ..., 3072, 1665, 2002])\n", + "deriv_tensor (40, 81), indices tensor([2218, 1974, 2501, ..., 3072, 1665, 2002])\n", + "deriv_tensor (40, 81), indices tensor([2218, 1974, 2501, ..., 3072, 1665, 2002])\n", + "deriv_tensor (40, 81), indices tensor([ 789, 1758, 3108, ..., 2633, 2572, 113])\n", + "deriv_tensor (40, 81), indices tensor([ 789, 1758, 3108, ..., 2633, 2572, 113])\n", + "deriv_tensor (40, 81), indices tensor([ 789, 1758, 3108, ..., 2633, 2572, 113])\n", + "deriv_tensor (40, 81), indices tensor([ 789, 1758, 3108, ..., 2633, 2572, 113])\n", + "deriv_tensor (40, 81), indices tensor([ 726, 2476, 2000, ..., 1095, 3140, 2423])\n", + "deriv_tensor (40, 81), indices tensor([ 726, 2476, 2000, ..., 1095, 3140, 2423])\n", + "deriv_tensor (40, 81), indices tensor([ 726, 2476, 2000, ..., 1095, 3140, 2423])\n", + "deriv_tensor (40, 81), indices tensor([ 726, 2476, 2000, ..., 1095, 3140, 2423])\n", + "deriv_tensor (40, 81), indices tensor([2821, 465, 1164, ..., 2941, 3025, 2038])\n", + "deriv_tensor (40, 81), indices tensor([2821, 465, 1164, ..., 2941, 3025, 2038])\n", + "deriv_tensor (40, 81), indices tensor([2821, 465, 1164, ..., 2941, 3025, 2038])\n", + "deriv_tensor (40, 81), indices tensor([2821, 465, 1164, ..., 2941, 3025, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 3126, 2460, ..., 3099, 1485, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 3126, 2460, ..., 3099, 1485, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 3126, 2460, ..., 3099, 1485, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 3126, 2460, ..., 3099, 1485, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 2219, 2982, ..., 1646, 2720, 2759])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 2219, 2982, ..., 1646, 2720, 2759])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 2219, 2982, ..., 1646, 2720, 2759])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 2219, 2982, ..., 1646, 2720, 2759])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 2362, 226, ..., 1051, 2854, 1980])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 2362, 226, ..., 1051, 2854, 1980])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 2362, 226, ..., 1051, 2854, 1980])\n", + "deriv_tensor (40, 81), indices tensor([ 894, 2362, 226, ..., 1051, 2854, 1980])\n", + "deriv_tensor (40, 81), indices tensor([1320, 2266, 2435, ..., 2067, 2197, 2902])\n", + "deriv_tensor (40, 81), indices tensor([1320, 2266, 2435, ..., 2067, 2197, 2902])\n", + "deriv_tensor (40, 81), indices tensor([1320, 2266, 2435, ..., 2067, 2197, 2902])\n", + "deriv_tensor (40, 81), indices tensor([1320, 2266, 2435, ..., 2067, 2197, 2902])\n", + "deriv_tensor (40, 81), indices tensor([2027, 1074, 592, ..., 2467, 1006, 2898])\n", + "deriv_tensor (40, 81), indices tensor([2027, 1074, 592, ..., 2467, 1006, 2898])\n", + "deriv_tensor (40, 81), indices tensor([2027, 1074, 592, ..., 2467, 1006, 2898])\n", + "deriv_tensor (40, 81), indices tensor([2027, 1074, 592, ..., 2467, 1006, 2898])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 903, 2854, ..., 2434, 2159, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 903, 2854, ..., 2434, 2159, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 903, 2854, ..., 2434, 2159, 2801])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 903, 2854, ..., 2434, 2159, 2801])\n", + "deriv_tensor (40, 81), indices tensor([1985, 1099, 1093, ..., 2400, 2339, 1381])\n", + "deriv_tensor (40, 81), indices tensor([1985, 1099, 1093, ..., 2400, 2339, 1381])\n", + "deriv_tensor (40, 81), indices tensor([1985, 1099, 1093, ..., 2400, 2339, 1381])\n", + "deriv_tensor (40, 81), indices tensor([1985, 1099, 1093, ..., 2400, 2339, 1381])\n", + "deriv_tensor (40, 81), indices tensor([1139, 2257, 2363, ..., 1053, 1984, 1917])\n", + "deriv_tensor (40, 81), indices tensor([1139, 2257, 2363, ..., 1053, 1984, 1917])\n", + "deriv_tensor (40, 81), indices tensor([1139, 2257, 2363, ..., 1053, 1984, 1917])\n", + "deriv_tensor (40, 81), indices tensor([1139, 2257, 2363, ..., 1053, 1984, 1917])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 1678, 2616, ..., 2341, 1460, 2061])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 1678, 2616, ..., 2341, 1460, 2061])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 1678, 2616, ..., 2341, 1460, 2061])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 1678, 2616, ..., 2341, 1460, 2061])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 307, 2923, ..., 2281, 1448, 1416])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 307, 2923, ..., 2281, 1448, 1416])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 307, 2923, ..., 2281, 1448, 1416])\n", + "deriv_tensor (40, 81), indices tensor([ 90, 307, 2923, ..., 2281, 1448, 1416])\n", + "deriv_tensor (40, 81), indices tensor([2129, 1727, 3197, ..., 656, 1883, 836])\n", + "deriv_tensor (40, 81), indices tensor([2129, 1727, 3197, ..., 656, 1883, 836])\n", + "deriv_tensor (40, 81), indices tensor([2129, 1727, 3197, ..., 656, 1883, 836])\n", + "deriv_tensor (40, 81), indices tensor([2129, 1727, 3197, ..., 656, 1883, 836])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 310, 1918, ..., 2443, 432, 525])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 310, 1918, ..., 2443, 432, 525])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 310, 1918, ..., 2443, 432, 525])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 310, 1918, ..., 2443, 432, 525])\n", + "deriv_tensor (40, 81), indices tensor([ 718, 3100, 1814, ..., 642, 2284, 2065])\n", + "deriv_tensor (40, 81), indices tensor([ 718, 3100, 1814, ..., 642, 2284, 2065])\n", + "deriv_tensor (40, 81), indices tensor([ 718, 3100, 1814, ..., 642, 2284, 2065])\n", + "deriv_tensor (40, 81), indices tensor([ 718, 3100, 1814, ..., 642, 2284, 2065])\n", + "deriv_tensor (40, 81), indices tensor([ 524, 73, 1310, ..., 1908, 413, 2372])\n", + "deriv_tensor (40, 81), indices tensor([ 524, 73, 1310, ..., 1908, 413, 2372])\n", + "deriv_tensor (40, 81), indices tensor([ 524, 73, 1310, ..., 1908, 413, 2372])\n", + "deriv_tensor (40, 81), indices tensor([ 524, 73, 1310, ..., 1908, 413, 2372])\n", + "deriv_tensor (40, 81), indices tensor([2476, 70, 693, ..., 1181, 2876, 3027])\n", + "deriv_tensor (40, 81), indices tensor([2476, 70, 693, ..., 1181, 2876, 3027])\n", + "deriv_tensor (40, 81), indices tensor([2476, 70, 693, ..., 1181, 2876, 3027])\n", + "deriv_tensor (40, 81), indices tensor([2476, 70, 693, ..., 1181, 2876, 3027])\n", + "deriv_tensor (40, 81), indices tensor([1780, 1852, 2912, ..., 2128, 2782, 2887])\n", + "deriv_tensor (40, 81), indices tensor([1780, 1852, 2912, ..., 2128, 2782, 2887])\n", + "deriv_tensor (40, 81), indices tensor([1780, 1852, 2912, ..., 2128, 2782, 2887])\n", + "deriv_tensor (40, 81), indices tensor([1780, 1852, 2912, ..., 2128, 2782, 2887])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2215, 1284, ..., 2752, 2816, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2215, 1284, ..., 2752, 2816, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2215, 1284, ..., 2752, 2816, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2215, 1284, ..., 2752, 2816, 2927])\n", + "deriv_tensor (40, 81), indices tensor([1359, 661, 404, ..., 2406, 2013, 2682])\n", + "deriv_tensor (40, 81), indices tensor([1359, 661, 404, ..., 2406, 2013, 2682])\n", + "deriv_tensor (40, 81), indices tensor([1359, 661, 404, ..., 2406, 2013, 2682])\n", + "deriv_tensor (40, 81), indices tensor([1359, 661, 404, ..., 2406, 2013, 2682])\n", + "deriv_tensor (40, 81), indices tensor([1280, 1456, 2096, ..., 1394, 516, 1336])\n", + "deriv_tensor (40, 81), indices tensor([1280, 1456, 2096, ..., 1394, 516, 1336])\n", + "deriv_tensor (40, 81), indices tensor([1280, 1456, 2096, ..., 1394, 516, 1336])\n", + "deriv_tensor (40, 81), indices tensor([1280, 1456, 2096, ..., 1394, 516, 1336])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 377, 843, ..., 62, 2299, 733])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 377, 843, ..., 62, 2299, 733])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 377, 843, ..., 62, 2299, 733])\n", + "deriv_tensor (40, 81), indices tensor([ 976, 377, 843, ..., 62, 2299, 733])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 604, 3033, ..., 2000, 3056, 1883])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 604, 3033, ..., 2000, 3056, 1883])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 604, 3033, ..., 2000, 3056, 1883])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 604, 3033, ..., 2000, 3056, 1883])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 2414, 1834, ..., 2278, 1262, 1712])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 2414, 1834, ..., 2278, 1262, 1712])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 2414, 1834, ..., 2278, 1262, 1712])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 2414, 1834, ..., 2278, 1262, 1712])\n", + "deriv_tensor (40, 81), indices tensor([2301, 2094, 1899, ..., 1393, 2690, 1183])\n", + "deriv_tensor (40, 81), indices tensor([2301, 2094, 1899, ..., 1393, 2690, 1183])\n", + "deriv_tensor (40, 81), indices tensor([2301, 2094, 1899, ..., 1393, 2690, 1183])\n", + "deriv_tensor (40, 81), indices tensor([2301, 2094, 1899, ..., 1393, 2690, 1183])\n", + "deriv_tensor (40, 81), indices tensor([1934, 917, 2021, ..., 724, 346, 1025])\n", + "deriv_tensor (40, 81), indices tensor([1934, 917, 2021, ..., 724, 346, 1025])\n", + "deriv_tensor (40, 81), indices tensor([1934, 917, 2021, ..., 724, 346, 1025])\n", + "deriv_tensor (40, 81), indices tensor([1934, 917, 2021, ..., 724, 346, 1025])\n", + "deriv_tensor (40, 81), indices tensor([ 174, 423, 1045, ..., 713, 3234, 603])\n", + "deriv_tensor (40, 81), indices tensor([ 174, 423, 1045, ..., 713, 3234, 603])\n", + "deriv_tensor (40, 81), indices tensor([ 174, 423, 1045, ..., 713, 3234, 603])\n", + "deriv_tensor (40, 81), indices tensor([ 174, 423, 1045, ..., 713, 3234, 603])\n", + "deriv_tensor (40, 81), indices tensor([2779, 1080, 1431, ..., 1207, 916, 1637])\n", + "deriv_tensor (40, 81), indices tensor([2779, 1080, 1431, ..., 1207, 916, 1637])\n", + "deriv_tensor (40, 81), indices tensor([2779, 1080, 1431, ..., 1207, 916, 1637])\n", + "deriv_tensor (40, 81), indices tensor([2779, 1080, 1431, ..., 1207, 916, 1637])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 919, 945, ..., 2281, 1759, 646])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 919, 945, ..., 2281, 1759, 646])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 919, 945, ..., 2281, 1759, 646])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 919, 945, ..., 2281, 1759, 646])\n", + "deriv_tensor (40, 81), indices tensor([2854, 1348, 2649, ..., 3169, 119, 712])\n", + "deriv_tensor (40, 81), indices tensor([2854, 1348, 2649, ..., 3169, 119, 712])\n", + "deriv_tensor (40, 81), indices tensor([2854, 1348, 2649, ..., 3169, 119, 712])\n", + "deriv_tensor (40, 81), indices tensor([2854, 1348, 2649, ..., 3169, 119, 712])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1015, 2832, ..., 770, 623, 1759])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1015, 2832, ..., 770, 623, 1759])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1015, 2832, ..., 770, 623, 1759])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1015, 2832, ..., 770, 623, 1759])\n", + "deriv_tensor (40, 81), indices tensor([2814, 3068, 2052, ..., 2374, 2273, 20])\n", + "deriv_tensor (40, 81), indices tensor([2814, 3068, 2052, ..., 2374, 2273, 20])\n", + "deriv_tensor (40, 81), indices tensor([2814, 3068, 2052, ..., 2374, 2273, 20])\n", + "deriv_tensor (40, 81), indices tensor([2814, 3068, 2052, ..., 2374, 2273, 20])\n", + "deriv_tensor (40, 81), indices tensor([ 754, 3099, 3085, ..., 2978, 1914, 1532])\n", + "deriv_tensor (40, 81), indices tensor([ 754, 3099, 3085, ..., 2978, 1914, 1532])\n", + "deriv_tensor (40, 81), indices tensor([ 754, 3099, 3085, ..., 2978, 1914, 1532])\n", + "deriv_tensor (40, 81), indices tensor([ 754, 3099, 3085, ..., 2978, 1914, 1532])\n", + "deriv_tensor (40, 81), indices tensor([1898, 2010, 1392, ..., 10, 2799, 2552])\n", + "deriv_tensor (40, 81), indices tensor([1898, 2010, 1392, ..., 10, 2799, 2552])\n", + "deriv_tensor (40, 81), indices tensor([1898, 2010, 1392, ..., 10, 2799, 2552])\n", + "deriv_tensor (40, 81), indices tensor([1898, 2010, 1392, ..., 10, 2799, 2552])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1926, 652, ..., 2145, 519, 3035])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1926, 652, ..., 2145, 519, 3035])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1926, 652, ..., 2145, 519, 3035])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1926, 652, ..., 2145, 519, 3035])\n", + "deriv_tensor (40, 81), indices tensor([2797, 517, 544, ..., 2546, 666, 1113])\n", + "deriv_tensor (40, 81), indices tensor([2797, 517, 544, ..., 2546, 666, 1113])\n", + "deriv_tensor (40, 81), indices tensor([2797, 517, 544, ..., 2546, 666, 1113])\n", + "deriv_tensor (40, 81), indices tensor([2797, 517, 544, ..., 2546, 666, 1113])\n", + "deriv_tensor (40, 81), indices tensor([ 81, 698, 1653, ..., 2184, 434, 1041])\n", + "deriv_tensor (40, 81), indices tensor([ 81, 698, 1653, ..., 2184, 434, 1041])\n", + "deriv_tensor (40, 81), indices tensor([ 81, 698, 1653, ..., 2184, 434, 1041])\n", + "deriv_tensor (40, 81), indices tensor([ 81, 698, 1653, ..., 2184, 434, 1041])\n", + "deriv_tensor (40, 81), indices tensor([2447, 244, 2374, ..., 1469, 1922, 138])\n", + "deriv_tensor (40, 81), indices tensor([2447, 244, 2374, ..., 1469, 1922, 138])\n", + "deriv_tensor (40, 81), indices tensor([2447, 244, 2374, ..., 1469, 1922, 138])\n", + "deriv_tensor (40, 81), indices tensor([2447, 244, 2374, ..., 1469, 1922, 138])\n", + "deriv_tensor (40, 81), indices tensor([3211, 460, 2905, ..., 1477, 1045, 1774])\n", + "deriv_tensor (40, 81), indices tensor([3211, 460, 2905, ..., 1477, 1045, 1774])\n", + "deriv_tensor (40, 81), indices tensor([3211, 460, 2905, ..., 1477, 1045, 1774])\n", + "deriv_tensor (40, 81), indices tensor([3211, 460, 2905, ..., 1477, 1045, 1774])\n", + "deriv_tensor (40, 81), indices tensor([2977, 1934, 2281, ..., 256, 864, 434])\n", + "deriv_tensor (40, 81), indices tensor([2977, 1934, 2281, ..., 256, 864, 434])\n", + "deriv_tensor (40, 81), indices tensor([2977, 1934, 2281, ..., 256, 864, 434])\n", + "deriv_tensor (40, 81), indices tensor([2977, 1934, 2281, ..., 256, 864, 434])\n", + "deriv_tensor (40, 81), indices tensor([1753, 73, 641, ..., 1980, 747, 2476])\n", + "deriv_tensor (40, 81), indices tensor([1753, 73, 641, ..., 1980, 747, 2476])\n", + "deriv_tensor (40, 81), indices tensor([1753, 73, 641, ..., 1980, 747, 2476])\n", + "deriv_tensor (40, 81), indices tensor([1753, 73, 641, ..., 1980, 747, 2476])\n", + "deriv_tensor (40, 81), indices tensor([2486, 2453, 348, ..., 743, 854, 1940])\n", + "deriv_tensor (40, 81), indices tensor([2486, 2453, 348, ..., 743, 854, 1940])\n", + "deriv_tensor (40, 81), indices tensor([2486, 2453, 348, ..., 743, 854, 1940])\n", + "deriv_tensor (40, 81), indices tensor([2486, 2453, 348, ..., 743, 854, 1940])\n", + "deriv_tensor (40, 81), indices tensor([2190, 1085, 3079, ..., 2515, 451, 3185])\n", + "deriv_tensor (40, 81), indices tensor([2190, 1085, 3079, ..., 2515, 451, 3185])\n", + "deriv_tensor (40, 81), indices tensor([2190, 1085, 3079, ..., 2515, 451, 3185])\n", + "deriv_tensor (40, 81), indices tensor([2190, 1085, 3079, ..., 2515, 451, 3185])\n", + "deriv_tensor (40, 81), indices tensor([ 794, 3198, 816, ..., 3068, 2970, 1104])\n", + "deriv_tensor (40, 81), indices tensor([ 794, 3198, 816, ..., 3068, 2970, 1104])\n", + "deriv_tensor (40, 81), indices tensor([ 794, 3198, 816, ..., 3068, 2970, 1104])\n", + "deriv_tensor (40, 81), indices tensor([ 794, 3198, 816, ..., 3068, 2970, 1104])\n", + "deriv_tensor (40, 81), indices tensor([1375, 1427, 1398, ..., 2205, 1877, 1256])\n", + "deriv_tensor (40, 81), indices tensor([1375, 1427, 1398, ..., 2205, 1877, 1256])\n", + "deriv_tensor (40, 81), indices tensor([1375, 1427, 1398, ..., 2205, 1877, 1256])\n", + "deriv_tensor (40, 81), indices tensor([1375, 1427, 1398, ..., 2205, 1877, 1256])\n", + "deriv_tensor (40, 81), indices tensor([1052, 3118, 2656, ..., 1356, 553, 378])\n", + "deriv_tensor (40, 81), indices tensor([1052, 3118, 2656, ..., 1356, 553, 378])\n", + "deriv_tensor (40, 81), indices tensor([1052, 3118, 2656, ..., 1356, 553, 378])\n", + "deriv_tensor (40, 81), indices tensor([1052, 3118, 2656, ..., 1356, 553, 378])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 1752, 1954, ..., 141, 2724, 2281])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 1752, 1954, ..., 141, 2724, 2281])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 1752, 1954, ..., 141, 2724, 2281])\n", + "deriv_tensor (40, 81), indices tensor([ 285, 1752, 1954, ..., 141, 2724, 2281])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 3189, 1109, ..., 110, 1730, 2114])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 3189, 1109, ..., 110, 1730, 2114])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 3189, 1109, ..., 110, 1730, 2114])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 3189, 1109, ..., 110, 1730, 2114])\n", + "deriv_tensor (40, 81), indices tensor([2407, 3001, 2394, ..., 702, 37, 563])\n", + "deriv_tensor (40, 81), indices tensor([2407, 3001, 2394, ..., 702, 37, 563])\n", + "deriv_tensor (40, 81), indices tensor([2407, 3001, 2394, ..., 702, 37, 563])\n", + "deriv_tensor (40, 81), indices tensor([2407, 3001, 2394, ..., 702, 37, 563])\n", + "deriv_tensor (40, 81), indices tensor([2460, 2531, 2155, ..., 816, 3207, 2067])\n", + "deriv_tensor (40, 81), indices tensor([2460, 2531, 2155, ..., 816, 3207, 2067])\n", + "deriv_tensor (40, 81), indices tensor([2460, 2531, 2155, ..., 816, 3207, 2067])\n", + "deriv_tensor (40, 81), indices tensor([2460, 2531, 2155, ..., 816, 3207, 2067])\n", + "deriv_tensor (40, 81), indices tensor([1868, 690, 2973, ..., 895, 1393, 606])\n", + "deriv_tensor (40, 81), indices tensor([1868, 690, 2973, ..., 895, 1393, 606])\n", + "deriv_tensor (40, 81), indices tensor([1868, 690, 2973, ..., 895, 1393, 606])\n", + "deriv_tensor (40, 81), indices tensor([1868, 690, 2973, ..., 895, 1393, 606])\n", + "deriv_tensor (40, 81), indices tensor([2223, 202, 2727, ..., 1583, 2894, 2509])\n", + "deriv_tensor (40, 81), indices tensor([2223, 202, 2727, ..., 1583, 2894, 2509])\n", + "deriv_tensor (40, 81), indices tensor([2223, 202, 2727, ..., 1583, 2894, 2509])\n", + "deriv_tensor (40, 81), indices tensor([2223, 202, 2727, ..., 1583, 2894, 2509])\n", + "deriv_tensor (40, 81), indices tensor([1578, 3132, 1824, ..., 742, 2024, 230])\n", + "deriv_tensor (40, 81), indices tensor([1578, 3132, 1824, ..., 742, 2024, 230])\n", + "deriv_tensor (40, 81), indices tensor([1578, 3132, 1824, ..., 742, 2024, 230])\n", + "deriv_tensor (40, 81), indices tensor([1578, 3132, 1824, ..., 742, 2024, 230])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 557, 359, ..., 31, 3010, 528])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 557, 359, ..., 31, 3010, 528])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 557, 359, ..., 31, 3010, 528])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 557, 359, ..., 31, 3010, 528])\n", + "deriv_tensor (40, 81), indices tensor([ 475, 3099, 1642, ..., 3012, 1906, 1552])\n", + "deriv_tensor (40, 81), indices tensor([ 475, 3099, 1642, ..., 3012, 1906, 1552])\n", + "deriv_tensor (40, 81), indices tensor([ 475, 3099, 1642, ..., 3012, 1906, 1552])\n", + "deriv_tensor (40, 81), indices tensor([ 475, 3099, 1642, ..., 3012, 1906, 1552])\n", + "deriv_tensor (40, 81), indices tensor([2664, 58, 2446, ..., 2927, 1052, 803])\n", + "deriv_tensor (40, 81), indices tensor([2664, 58, 2446, ..., 2927, 1052, 803])\n", + "deriv_tensor (40, 81), indices tensor([2664, 58, 2446, ..., 2927, 1052, 803])\n", + "deriv_tensor (40, 81), indices tensor([2664, 58, 2446, ..., 2927, 1052, 803])\n", + "deriv_tensor (40, 81), indices tensor([ 69, 1030, 1232, ..., 3171, 2245, 80])\n", + "deriv_tensor (40, 81), indices tensor([ 69, 1030, 1232, ..., 3171, 2245, 80])\n", + "deriv_tensor (40, 81), indices tensor([ 69, 1030, 1232, ..., 3171, 2245, 80])\n", + "deriv_tensor (40, 81), indices tensor([ 69, 1030, 1232, ..., 3171, 2245, 80])\n", + "deriv_tensor (40, 81), indices tensor([3008, 528, 631, ..., 2191, 2766, 2011])\n", + "deriv_tensor (40, 81), indices tensor([3008, 528, 631, ..., 2191, 2766, 2011])\n", + "deriv_tensor (40, 81), indices tensor([3008, 528, 631, ..., 2191, 2766, 2011])\n", + "deriv_tensor (40, 81), indices tensor([3008, 528, 631, ..., 2191, 2766, 2011])\n", + "deriv_tensor (40, 81), indices tensor([2735, 523, 2469, ..., 2414, 3104, 489])\n", + "deriv_tensor (40, 81), indices tensor([2735, 523, 2469, ..., 2414, 3104, 489])\n", + "deriv_tensor (40, 81), indices tensor([2735, 523, 2469, ..., 2414, 3104, 489])\n", + "deriv_tensor (40, 81), indices tensor([2735, 523, 2469, ..., 2414, 3104, 489])\n", + "deriv_tensor (40, 81), indices tensor([1094, 1048, 3130, ..., 2464, 1183, 2163])\n", + "deriv_tensor (40, 81), indices tensor([1094, 1048, 3130, ..., 2464, 1183, 2163])\n", + "deriv_tensor (40, 81), indices tensor([1094, 1048, 3130, ..., 2464, 1183, 2163])\n", + "deriv_tensor (40, 81), indices tensor([1094, 1048, 3130, ..., 2464, 1183, 2163])\n", + "deriv_tensor (40, 81), indices tensor([ 428, 197, 2022, ..., 2837, 2310, 493])\n", + "deriv_tensor (40, 81), indices tensor([ 428, 197, 2022, ..., 2837, 2310, 493])\n", + "deriv_tensor (40, 81), indices tensor([ 428, 197, 2022, ..., 2837, 2310, 493])\n", + "deriv_tensor (40, 81), indices tensor([ 428, 197, 2022, ..., 2837, 2310, 493])\n", + "deriv_tensor (40, 81), indices tensor([1862, 1180, 415, ..., 2337, 58, 1541])\n", + "deriv_tensor (40, 81), indices tensor([1862, 1180, 415, ..., 2337, 58, 1541])\n", + "deriv_tensor (40, 81), indices tensor([1862, 1180, 415, ..., 2337, 58, 1541])\n", + "deriv_tensor (40, 81), indices tensor([1862, 1180, 415, ..., 2337, 58, 1541])\n", + "deriv_tensor (40, 81), indices tensor([2066, 1257, 1963, ..., 1226, 102, 1373])\n", + "deriv_tensor (40, 81), indices tensor([2066, 1257, 1963, ..., 1226, 102, 1373])\n", + "deriv_tensor (40, 81), indices tensor([2066, 1257, 1963, ..., 1226, 102, 1373])\n", + "deriv_tensor (40, 81), indices tensor([2066, 1257, 1963, ..., 1226, 102, 1373])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1889, 591, ..., 2058, 1051, 1361])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1889, 591, ..., 2058, 1051, 1361])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1889, 591, ..., 2058, 1051, 1361])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1889, 591, ..., 2058, 1051, 1361])\n", + "deriv_tensor (40, 81), indices tensor([3044, 764, 2235, ..., 1893, 448, 1668])\n", + "deriv_tensor (40, 81), indices tensor([3044, 764, 2235, ..., 1893, 448, 1668])\n", + "deriv_tensor (40, 81), indices tensor([3044, 764, 2235, ..., 1893, 448, 1668])\n", + "deriv_tensor (40, 81), indices tensor([3044, 764, 2235, ..., 1893, 448, 1668])\n", + "deriv_tensor (40, 81), indices tensor([1245, 3129, 3021, ..., 2890, 2517, 790])\n", + "deriv_tensor (40, 81), indices tensor([1245, 3129, 3021, ..., 2890, 2517, 790])\n", + "deriv_tensor (40, 81), indices tensor([1245, 3129, 3021, ..., 2890, 2517, 790])\n", + "deriv_tensor (40, 81), indices tensor([1245, 3129, 3021, ..., 2890, 2517, 790])\n", + "deriv_tensor (40, 81), indices tensor([1842, 290, 1021, ..., 2348, 1022, 1433])\n", + "deriv_tensor (40, 81), indices tensor([1842, 290, 1021, ..., 2348, 1022, 1433])\n", + "deriv_tensor (40, 81), indices tensor([1842, 290, 1021, ..., 2348, 1022, 1433])\n", + "deriv_tensor (40, 81), indices tensor([1842, 290, 1021, ..., 2348, 1022, 1433])\n", + "deriv_tensor (40, 81), indices tensor([ 948, 378, 2531, ..., 1285, 1472, 533])\n", + "deriv_tensor (40, 81), indices tensor([ 948, 378, 2531, ..., 1285, 1472, 533])\n", + "deriv_tensor (40, 81), indices tensor([ 948, 378, 2531, ..., 1285, 1472, 533])\n", + "deriv_tensor (40, 81), indices tensor([ 948, 378, 2531, ..., 1285, 1472, 533])\n", + "deriv_tensor (40, 81), indices tensor([ 534, 909, 2993, ..., 171, 114, 2227])\n", + "deriv_tensor (40, 81), indices tensor([ 534, 909, 2993, ..., 171, 114, 2227])\n", + "deriv_tensor (40, 81), indices tensor([ 534, 909, 2993, ..., 171, 114, 2227])\n", + "deriv_tensor (40, 81), indices tensor([ 534, 909, 2993, ..., 171, 114, 2227])\n", + "deriv_tensor (40, 81), indices tensor([3076, 2304, 1714, ..., 2307, 2391, 1086])\n", + "deriv_tensor (40, 81), indices tensor([3076, 2304, 1714, ..., 2307, 2391, 1086])\n", + "deriv_tensor (40, 81), indices tensor([3076, 2304, 1714, ..., 2307, 2391, 1086])\n", + "deriv_tensor (40, 81), indices tensor([3076, 2304, 1714, ..., 2307, 2391, 1086])\n", + "deriv_tensor (40, 81), indices tensor([2672, 2425, 1760, ..., 3144, 1460, 2836])\n", + "deriv_tensor (40, 81), indices tensor([2672, 2425, 1760, ..., 3144, 1460, 2836])\n", + "deriv_tensor (40, 81), indices tensor([2672, 2425, 1760, ..., 3144, 1460, 2836])\n", + "deriv_tensor (40, 81), indices tensor([2672, 2425, 1760, ..., 3144, 1460, 2836])\n", + "deriv_tensor (40, 81), indices tensor([2949, 2349, 1575, ..., 3103, 2988, 1886])\n", + "deriv_tensor (40, 81), indices tensor([2949, 2349, 1575, ..., 3103, 2988, 1886])\n", + "deriv_tensor (40, 81), indices tensor([2949, 2349, 1575, ..., 3103, 2988, 1886])\n", + "deriv_tensor (40, 81), indices tensor([2949, 2349, 1575, ..., 3103, 2988, 1886])\n", + "deriv_tensor (40, 81), indices tensor([ 730, 1849, 2859, ..., 132, 1649, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 730, 1849, 2859, ..., 132, 1649, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 730, 1849, 2859, ..., 132, 1649, 75])\n", + "deriv_tensor (40, 81), indices tensor([ 730, 1849, 2859, ..., 132, 1649, 75])\n", + "deriv_tensor (40, 81), indices tensor([1115, 1297, 2647, ..., 1707, 2956, 1758])\n", + "deriv_tensor (40, 81), indices tensor([1115, 1297, 2647, ..., 1707, 2956, 1758])\n", + "deriv_tensor (40, 81), indices tensor([1115, 1297, 2647, ..., 1707, 2956, 1758])\n", + "deriv_tensor (40, 81), indices tensor([1115, 1297, 2647, ..., 1707, 2956, 1758])\n", + "deriv_tensor (40, 81), indices tensor([ 574, 3222, 211, ..., 1147, 3131, 3216])\n", + "deriv_tensor (40, 81), indices tensor([ 574, 3222, 211, ..., 1147, 3131, 3216])\n", + "deriv_tensor (40, 81), indices tensor([ 574, 3222, 211, ..., 1147, 3131, 3216])\n", + "deriv_tensor (40, 81), indices tensor([ 574, 3222, 211, ..., 1147, 3131, 3216])\n", + "deriv_tensor (40, 81), indices tensor([1675, 596, 3014, ..., 2235, 2382, 533])\n", + "deriv_tensor (40, 81), indices tensor([1675, 596, 3014, ..., 2235, 2382, 533])\n", + "deriv_tensor (40, 81), indices tensor([1675, 596, 3014, ..., 2235, 2382, 533])\n", + "deriv_tensor (40, 81), indices tensor([1675, 596, 3014, ..., 2235, 2382, 533])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2655, 2970, ..., 160, 2684, 2351])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2655, 2970, ..., 160, 2684, 2351])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2655, 2970, ..., 160, 2684, 2351])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2655, 2970, ..., 160, 2684, 2351])\n", + "deriv_tensor (40, 81), indices tensor([1135, 1726, 2895, ..., 2183, 1040, 1131])\n", + "deriv_tensor (40, 81), indices tensor([1135, 1726, 2895, ..., 2183, 1040, 1131])\n", + "deriv_tensor (40, 81), indices tensor([1135, 1726, 2895, ..., 2183, 1040, 1131])\n", + "deriv_tensor (40, 81), indices tensor([1135, 1726, 2895, ..., 2183, 1040, 1131])\n", + "deriv_tensor (40, 81), indices tensor([2876, 210, 109, ..., 2583, 1598, 1261])\n", + "deriv_tensor (40, 81), indices tensor([2876, 210, 109, ..., 2583, 1598, 1261])\n", + "deriv_tensor (40, 81), indices tensor([2876, 210, 109, ..., 2583, 1598, 1261])\n", + "deriv_tensor (40, 81), indices tensor([2876, 210, 109, ..., 2583, 1598, 1261])\n", + "deriv_tensor (40, 81), indices tensor([2493, 87, 1264, ..., 1853, 455, 1556])\n", + "deriv_tensor (40, 81), indices tensor([2493, 87, 1264, ..., 1853, 455, 1556])\n", + "deriv_tensor (40, 81), indices tensor([2493, 87, 1264, ..., 1853, 455, 1556])\n", + "deriv_tensor (40, 81), indices tensor([2493, 87, 1264, ..., 1853, 455, 1556])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 328, 38, ..., 464, 1030, 1457])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 328, 38, ..., 464, 1030, 1457])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 328, 38, ..., 464, 1030, 1457])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 328, 38, ..., 464, 1030, 1457])\n", + "deriv_tensor (40, 81), indices tensor([ 916, 2513, 2032, ..., 1682, 37, 1805])\n", + "deriv_tensor (40, 81), indices tensor([ 916, 2513, 2032, ..., 1682, 37, 1805])\n", + "deriv_tensor (40, 81), indices tensor([ 916, 2513, 2032, ..., 1682, 37, 1805])\n", + "deriv_tensor (40, 81), indices tensor([ 916, 2513, 2032, ..., 1682, 37, 1805])\n", + "deriv_tensor (40, 81), indices tensor([ 586, 3011, 2059, ..., 1471, 2108, 524])\n", + "deriv_tensor (40, 81), indices tensor([ 586, 3011, 2059, ..., 1471, 2108, 524])\n", + "deriv_tensor (40, 81), indices tensor([ 586, 3011, 2059, ..., 1471, 2108, 524])\n", + "deriv_tensor (40, 81), indices tensor([ 586, 3011, 2059, ..., 1471, 2108, 524])\n", + "deriv_tensor (40, 81), indices tensor([3224, 633, 611, ..., 1720, 196, 151])\n", + "deriv_tensor (40, 81), indices tensor([3224, 633, 611, ..., 1720, 196, 151])\n", + "deriv_tensor (40, 81), indices tensor([3224, 633, 611, ..., 1720, 196, 151])\n", + "deriv_tensor (40, 81), indices tensor([3224, 633, 611, ..., 1720, 196, 151])\n", + "deriv_tensor (40, 81), indices tensor([1428, 2416, 254, ..., 205, 1475, 2146])\n", + "deriv_tensor (40, 81), indices tensor([1428, 2416, 254, ..., 205, 1475, 2146])\n", + "deriv_tensor (40, 81), indices tensor([1428, 2416, 254, ..., 205, 1475, 2146])\n", + "deriv_tensor (40, 81), indices tensor([1428, 2416, 254, ..., 205, 1475, 2146])\n", + "deriv_tensor (40, 81), indices tensor([2711, 42, 1740, ..., 1777, 850, 3073])\n", + "deriv_tensor (40, 81), indices tensor([2711, 42, 1740, ..., 1777, 850, 3073])\n", + "deriv_tensor (40, 81), indices tensor([2711, 42, 1740, ..., 1777, 850, 3073])\n", + "deriv_tensor (40, 81), indices tensor([2711, 42, 1740, ..., 1777, 850, 3073])\n", + "deriv_tensor (40, 81), indices tensor([1109, 2229, 1275, ..., 2528, 854, 123])\n", + "deriv_tensor (40, 81), indices tensor([1109, 2229, 1275, ..., 2528, 854, 123])\n", + "deriv_tensor (40, 81), indices tensor([1109, 2229, 1275, ..., 2528, 854, 123])\n", + "deriv_tensor (40, 81), indices tensor([1109, 2229, 1275, ..., 2528, 854, 123])\n", + "deriv_tensor (40, 81), indices tensor([1936, 41, 1530, ..., 2894, 1388, 2742])\n", + "deriv_tensor (40, 81), indices tensor([1936, 41, 1530, ..., 2894, 1388, 2742])\n", + "deriv_tensor (40, 81), indices tensor([1936, 41, 1530, ..., 2894, 1388, 2742])\n", + "deriv_tensor (40, 81), indices tensor([1936, 41, 1530, ..., 2894, 1388, 2742])\n", + "deriv_tensor (40, 81), indices tensor([2913, 1149, 3139, ..., 1840, 1464, 2055])\n", + "deriv_tensor (40, 81), indices tensor([2913, 1149, 3139, ..., 1840, 1464, 2055])\n", + "deriv_tensor (40, 81), indices tensor([2913, 1149, 3139, ..., 1840, 1464, 2055])\n", + "deriv_tensor (40, 81), indices tensor([2913, 1149, 3139, ..., 1840, 1464, 2055])\n", + "deriv_tensor (40, 81), indices tensor([ 992, 1892, 1497, ..., 3202, 931, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 992, 1892, 1497, ..., 3202, 931, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 992, 1892, 1497, ..., 3202, 931, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 992, 1892, 1497, ..., 3202, 931, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 242, 869, ..., 561, 1071, 2752])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 242, 869, ..., 561, 1071, 2752])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 242, 869, ..., 561, 1071, 2752])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 242, 869, ..., 561, 1071, 2752])\n", + "deriv_tensor (40, 81), indices tensor([ 946, 534, 834, ..., 2057, 392, 723])\n", + "deriv_tensor (40, 81), indices tensor([ 946, 534, 834, ..., 2057, 392, 723])\n", + "deriv_tensor (40, 81), indices tensor([ 946, 534, 834, ..., 2057, 392, 723])\n", + "deriv_tensor (40, 81), indices tensor([ 946, 534, 834, ..., 2057, 392, 723])\n", + "deriv_tensor (40, 81), indices tensor([2888, 3097, 660, ..., 481, 2589, 2967])\n", + "deriv_tensor (40, 81), indices tensor([2888, 3097, 660, ..., 481, 2589, 2967])\n", + "deriv_tensor (40, 81), indices tensor([2888, 3097, 660, ..., 481, 2589, 2967])\n", + "deriv_tensor (40, 81), indices tensor([2888, 3097, 660, ..., 481, 2589, 2967])\n", + "deriv_tensor (40, 81), indices tensor([1996, 670, 1251, ..., 73, 2302, 860])\n", + "deriv_tensor (40, 81), indices tensor([1996, 670, 1251, ..., 73, 2302, 860])\n", + "deriv_tensor (40, 81), indices tensor([1996, 670, 1251, ..., 73, 2302, 860])\n", + "deriv_tensor (40, 81), indices tensor([1996, 670, 1251, ..., 73, 2302, 860])\n", + "deriv_tensor (40, 81), indices tensor([2751, 498, 3029, ..., 2050, 3172, 2533])\n", + "deriv_tensor (40, 81), indices tensor([2751, 498, 3029, ..., 2050, 3172, 2533])\n", + "deriv_tensor (40, 81), indices tensor([2751, 498, 3029, ..., 2050, 3172, 2533])\n", + "deriv_tensor (40, 81), indices tensor([2751, 498, 3029, ..., 2050, 3172, 2533])\n", + "deriv_tensor (40, 81), indices tensor([2770, 2744, 590, ..., 2920, 2364, 3221])\n", + "deriv_tensor (40, 81), indices tensor([2770, 2744, 590, ..., 2920, 2364, 3221])\n", + "deriv_tensor (40, 81), indices tensor([2770, 2744, 590, ..., 2920, 2364, 3221])\n", + "deriv_tensor (40, 81), indices tensor([2770, 2744, 590, ..., 2920, 2364, 3221])\n", + "deriv_tensor (40, 81), indices tensor([1708, 2976, 2993, ..., 1622, 1935, 1061])\n", + "deriv_tensor (40, 81), indices tensor([1708, 2976, 2993, ..., 1622, 1935, 1061])\n", + "deriv_tensor (40, 81), indices tensor([1708, 2976, 2993, ..., 1622, 1935, 1061])\n", + "deriv_tensor (40, 81), indices tensor([1708, 2976, 2993, ..., 1622, 1935, 1061])\n", + "deriv_tensor (40, 81), indices tensor([ 253, 482, 1085, ..., 2051, 2767, 3073])\n", + "deriv_tensor (40, 81), indices tensor([ 253, 482, 1085, ..., 2051, 2767, 3073])\n", + "deriv_tensor (40, 81), indices tensor([ 253, 482, 1085, ..., 2051, 2767, 3073])\n", + "deriv_tensor (40, 81), indices tensor([ 253, 482, 1085, ..., 2051, 2767, 3073])\n", + "deriv_tensor (40, 81), indices tensor([1395, 997, 2145, ..., 3177, 1364, 2624])\n", + "deriv_tensor (40, 81), indices tensor([1395, 997, 2145, ..., 3177, 1364, 2624])\n", + "deriv_tensor (40, 81), indices tensor([1395, 997, 2145, ..., 3177, 1364, 2624])\n", + "deriv_tensor (40, 81), indices tensor([1395, 997, 2145, ..., 3177, 1364, 2624])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1311, 2783, ..., 1411, 2792, 1223])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1311, 2783, ..., 1411, 2792, 1223])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1311, 2783, ..., 1411, 2792, 1223])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1311, 2783, ..., 1411, 2792, 1223])\n", + "deriv_tensor (40, 81), indices tensor([3090, 2790, 2526, ..., 632, 2345, 1258])\n", + "deriv_tensor (40, 81), indices tensor([3090, 2790, 2526, ..., 632, 2345, 1258])\n", + "deriv_tensor (40, 81), indices tensor([3090, 2790, 2526, ..., 632, 2345, 1258])\n", + "deriv_tensor (40, 81), indices tensor([3090, 2790, 2526, ..., 632, 2345, 1258])\n", + "deriv_tensor (40, 81), indices tensor([ 19, 1405, 377, ..., 1173, 1759, 584])\n", + "deriv_tensor (40, 81), indices tensor([ 19, 1405, 377, ..., 1173, 1759, 584])\n", + "deriv_tensor (40, 81), indices tensor([ 19, 1405, 377, ..., 1173, 1759, 584])\n", + "deriv_tensor (40, 81), indices tensor([ 19, 1405, 377, ..., 1173, 1759, 584])\n", + "deriv_tensor (40, 81), indices tensor([1494, 733, 2159, ..., 1088, 1054, 992])\n", + "deriv_tensor (40, 81), indices tensor([1494, 733, 2159, ..., 1088, 1054, 992])\n", + "deriv_tensor (40, 81), indices tensor([1494, 733, 2159, ..., 1088, 1054, 992])\n", + "deriv_tensor (40, 81), indices tensor([1494, 733, 2159, ..., 1088, 1054, 992])\n", + "deriv_tensor (40, 81), indices tensor([2396, 2081, 1353, ..., 205, 1614, 1117])\n", + "deriv_tensor (40, 81), indices tensor([2396, 2081, 1353, ..., 205, 1614, 1117])\n", + "deriv_tensor (40, 81), indices tensor([2396, 2081, 1353, ..., 205, 1614, 1117])\n", + "deriv_tensor (40, 81), indices tensor([2396, 2081, 1353, ..., 205, 1614, 1117])\n", + "deriv_tensor (40, 81), indices tensor([ 216, 1613, 2809, ..., 2699, 1452, 697])\n", + "deriv_tensor (40, 81), indices tensor([ 216, 1613, 2809, ..., 2699, 1452, 697])\n", + "deriv_tensor (40, 81), indices tensor([ 216, 1613, 2809, ..., 2699, 1452, 697])\n", + "deriv_tensor (40, 81), indices tensor([ 216, 1613, 2809, ..., 2699, 1452, 697])\n", + "deriv_tensor (40, 81), indices tensor([1936, 2671, 711, ..., 2420, 1802, 857])\n", + "deriv_tensor (40, 81), indices tensor([1936, 2671, 711, ..., 2420, 1802, 857])\n", + "deriv_tensor (40, 81), indices tensor([1936, 2671, 711, ..., 2420, 1802, 857])\n", + "deriv_tensor (40, 81), indices tensor([1936, 2671, 711, ..., 2420, 1802, 857])\n", + "deriv_tensor (40, 81), indices tensor([ 523, 735, 3113, ..., 2644, 2523, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 523, 735, 3113, ..., 2644, 2523, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 523, 735, 3113, ..., 2644, 2523, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 523, 735, 3113, ..., 2644, 2523, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 1858, 537, ..., 1418, 3229, 1631])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 1858, 537, ..., 1418, 3229, 1631])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 1858, 537, ..., 1418, 3229, 1631])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 1858, 537, ..., 1418, 3229, 1631])\n", + "deriv_tensor (40, 81), indices tensor([2324, 1098, 1435, ..., 3150, 390, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2324, 1098, 1435, ..., 3150, 390, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2324, 1098, 1435, ..., 3150, 390, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2324, 1098, 1435, ..., 3150, 390, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2839, 328, 2468, ..., 2140, 2445, 2555])\n", + "deriv_tensor (40, 81), indices tensor([2839, 328, 2468, ..., 2140, 2445, 2555])\n", + "deriv_tensor (40, 81), indices tensor([2839, 328, 2468, ..., 2140, 2445, 2555])\n", + "deriv_tensor (40, 81), indices tensor([2839, 328, 2468, ..., 2140, 2445, 2555])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 416, 676, ..., 1630, 33, 1459])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 416, 676, ..., 1630, 33, 1459])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 416, 676, ..., 1630, 33, 1459])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 416, 676, ..., 1630, 33, 1459])\n", + "deriv_tensor (40, 81), indices tensor([1183, 2809, 1401, ..., 640, 2640, 1657])\n", + "deriv_tensor (40, 81), indices tensor([1183, 2809, 1401, ..., 640, 2640, 1657])\n", + "deriv_tensor (40, 81), indices tensor([1183, 2809, 1401, ..., 640, 2640, 1657])\n", + "deriv_tensor (40, 81), indices tensor([1183, 2809, 1401, ..., 640, 2640, 1657])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 256, 2946, ..., 2587, 1729, 1796])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 256, 2946, ..., 2587, 1729, 1796])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 256, 2946, ..., 2587, 1729, 1796])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 256, 2946, ..., 2587, 1729, 1796])\n", + "deriv_tensor (40, 81), indices tensor([1018, 2881, 294, ..., 36, 1928, 2229])\n", + "deriv_tensor (40, 81), indices tensor([1018, 2881, 294, ..., 36, 1928, 2229])\n", + "deriv_tensor (40, 81), indices tensor([1018, 2881, 294, ..., 36, 1928, 2229])\n", + "deriv_tensor (40, 81), indices tensor([1018, 2881, 294, ..., 36, 1928, 2229])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1249, 297, ..., 1820, 941, 3192])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1249, 297, ..., 1820, 941, 3192])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1249, 297, ..., 1820, 941, 3192])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1249, 297, ..., 1820, 941, 3192])\n", + "deriv_tensor (40, 81), indices tensor([1261, 420, 2325, ..., 1362, 945, 2061])\n", + "deriv_tensor (40, 81), indices tensor([1261, 420, 2325, ..., 1362, 945, 2061])\n", + "deriv_tensor (40, 81), indices tensor([1261, 420, 2325, ..., 1362, 945, 2061])\n", + "deriv_tensor (40, 81), indices tensor([1261, 420, 2325, ..., 1362, 945, 2061])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 424, 918, ..., 2723, 1129, 2744])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 424, 918, ..., 2723, 1129, 2744])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 424, 918, ..., 2723, 1129, 2744])\n", + "deriv_tensor (40, 81), indices tensor([ 947, 424, 918, ..., 2723, 1129, 2744])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 224, 414, ..., 2588, 1107, 2929])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 224, 414, ..., 2588, 1107, 2929])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 224, 414, ..., 2588, 1107, 2929])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 224, 414, ..., 2588, 1107, 2929])\n", + "deriv_tensor (40, 81), indices tensor([1617, 307, 2230, ..., 918, 1139, 2241])\n", + "deriv_tensor (40, 81), indices tensor([1617, 307, 2230, ..., 918, 1139, 2241])\n", + "deriv_tensor (40, 81), indices tensor([1617, 307, 2230, ..., 918, 1139, 2241])\n", + "deriv_tensor (40, 81), indices tensor([1617, 307, 2230, ..., 918, 1139, 2241])\n", + "deriv_tensor (40, 81), indices tensor([ 381, 172, 945, ..., 304, 1512, 2420])\n", + "deriv_tensor (40, 81), indices tensor([ 381, 172, 945, ..., 304, 1512, 2420])\n", + "deriv_tensor (40, 81), indices tensor([ 381, 172, 945, ..., 304, 1512, 2420])\n", + "deriv_tensor (40, 81), indices tensor([ 381, 172, 945, ..., 304, 1512, 2420])\n", + "deriv_tensor (40, 81), indices tensor([2081, 2889, 3118, ..., 210, 2166, 1168])\n", + "deriv_tensor (40, 81), indices tensor([2081, 2889, 3118, ..., 210, 2166, 1168])\n", + "deriv_tensor (40, 81), indices tensor([2081, 2889, 3118, ..., 210, 2166, 1168])\n", + "deriv_tensor (40, 81), indices tensor([2081, 2889, 3118, ..., 210, 2166, 1168])\n", + "deriv_tensor (40, 81), indices tensor([1833, 1359, 2122, ..., 309, 673, 1902])\n", + "deriv_tensor (40, 81), indices tensor([1833, 1359, 2122, ..., 309, 673, 1902])\n", + "deriv_tensor (40, 81), indices tensor([1833, 1359, 2122, ..., 309, 673, 1902])\n", + "deriv_tensor (40, 81), indices tensor([1833, 1359, 2122, ..., 309, 673, 1902])\n", + "deriv_tensor (40, 81), indices tensor([2001, 2716, 2735, ..., 2130, 2677, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2001, 2716, 2735, ..., 2130, 2677, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2001, 2716, 2735, ..., 2130, 2677, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2001, 2716, 2735, ..., 2130, 2677, 2103])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1768, 1036, ..., 108, 226, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1768, 1036, ..., 108, 226, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1768, 1036, ..., 108, 226, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1768, 1036, ..., 108, 226, 3108])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 415, 154, ..., 104, 1008, 2897])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 415, 154, ..., 104, 1008, 2897])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 415, 154, ..., 104, 1008, 2897])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 415, 154, ..., 104, 1008, 2897])\n", + "deriv_tensor (40, 81), indices tensor([2845, 3056, 2264, ..., 2415, 1598, 783])\n", + "deriv_tensor (40, 81), indices tensor([2845, 3056, 2264, ..., 2415, 1598, 783])\n", + "deriv_tensor (40, 81), indices tensor([2845, 3056, 2264, ..., 2415, 1598, 783])\n", + "deriv_tensor (40, 81), indices tensor([2845, 3056, 2264, ..., 2415, 1598, 783])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2675, 3101, ..., 2475, 2388, 1492])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2675, 3101, ..., 2475, 2388, 1492])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2675, 3101, ..., 2475, 2388, 1492])\n", + "deriv_tensor (40, 81), indices tensor([ 72, 2675, 3101, ..., 2475, 2388, 1492])\n", + "deriv_tensor (40, 81), indices tensor([2297, 668, 1495, ..., 222, 2414, 2545])\n", + "deriv_tensor (40, 81), indices tensor([2297, 668, 1495, ..., 222, 2414, 2545])\n", + "deriv_tensor (40, 81), indices tensor([2297, 668, 1495, ..., 222, 2414, 2545])\n", + "deriv_tensor (40, 81), indices tensor([2297, 668, 1495, ..., 222, 2414, 2545])\n", + "deriv_tensor (40, 81), indices tensor([2422, 540, 81, ..., 530, 2831, 2775])\n", + "deriv_tensor (40, 81), indices tensor([2422, 540, 81, ..., 530, 2831, 2775])\n", + "deriv_tensor (40, 81), indices tensor([2422, 540, 81, ..., 530, 2831, 2775])\n", + "deriv_tensor (40, 81), indices tensor([2422, 540, 81, ..., 530, 2831, 2775])\n", + "deriv_tensor (40, 81), indices tensor([1132, 2163, 919, ..., 630, 1337, 51])\n", + "deriv_tensor (40, 81), indices tensor([1132, 2163, 919, ..., 630, 1337, 51])\n", + "deriv_tensor (40, 81), indices tensor([1132, 2163, 919, ..., 630, 1337, 51])\n", + "deriv_tensor (40, 81), indices tensor([1132, 2163, 919, ..., 630, 1337, 51])\n", + "deriv_tensor (40, 81), indices tensor([2005, 671, 414, ..., 1843, 3141, 1263])\n", + "deriv_tensor (40, 81), indices tensor([2005, 671, 414, ..., 1843, 3141, 1263])\n", + "deriv_tensor (40, 81), indices tensor([2005, 671, 414, ..., 1843, 3141, 1263])\n", + "deriv_tensor (40, 81), indices tensor([2005, 671, 414, ..., 1843, 3141, 1263])\n", + "deriv_tensor (40, 81), indices tensor([1217, 1500, 2739, ..., 2371, 124, 3239])\n", + "deriv_tensor (40, 81), indices tensor([1217, 1500, 2739, ..., 2371, 124, 3239])\n", + "deriv_tensor (40, 81), indices tensor([1217, 1500, 2739, ..., 2371, 124, 3239])\n", + "deriv_tensor (40, 81), indices tensor([1217, 1500, 2739, ..., 2371, 124, 3239])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 399, 3083, ..., 2583, 3172, 1650])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 399, 3083, ..., 2583, 3172, 1650])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 399, 3083, ..., 2583, 3172, 1650])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 399, 3083, ..., 2583, 3172, 1650])\n", + "deriv_tensor (40, 81), indices tensor([1693, 2432, 2940, ..., 2542, 2788, 2864])\n", + "deriv_tensor (40, 81), indices tensor([1693, 2432, 2940, ..., 2542, 2788, 2864])\n", + "deriv_tensor (40, 81), indices tensor([1693, 2432, 2940, ..., 2542, 2788, 2864])\n", + "deriv_tensor (40, 81), indices tensor([1693, 2432, 2940, ..., 2542, 2788, 2864])\n", + "deriv_tensor (40, 81), indices tensor([1385, 63, 148, ..., 1168, 1326, 2779])\n", + "deriv_tensor (40, 81), indices tensor([1385, 63, 148, ..., 1168, 1326, 2779])\n", + "deriv_tensor (40, 81), indices tensor([1385, 63, 148, ..., 1168, 1326, 2779])\n", + "deriv_tensor (40, 81), indices tensor([1385, 63, 148, ..., 1168, 1326, 2779])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 2436, 1231, ..., 908, 1743, 1918])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 2436, 1231, ..., 908, 1743, 1918])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 2436, 1231, ..., 908, 1743, 1918])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 2436, 1231, ..., 908, 1743, 1918])\n", + "deriv_tensor (40, 81), indices tensor([ 759, 2512, 1582, ..., 2775, 2482, 699])\n", + "deriv_tensor (40, 81), indices tensor([ 759, 2512, 1582, ..., 2775, 2482, 699])\n", + "deriv_tensor (40, 81), indices tensor([ 759, 2512, 1582, ..., 2775, 2482, 699])\n", + "deriv_tensor (40, 81), indices tensor([ 759, 2512, 1582, ..., 2775, 2482, 699])\n", + "deriv_tensor (40, 81), indices tensor([2377, 1466, 127, ..., 2056, 147, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2377, 1466, 127, ..., 2056, 147, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2377, 1466, 127, ..., 2056, 147, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2377, 1466, 127, ..., 2056, 147, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2706, 672, 2492, ..., 3016, 1552, 1178])\n", + "deriv_tensor (40, 81), indices tensor([2706, 672, 2492, ..., 3016, 1552, 1178])\n", + "deriv_tensor (40, 81), indices tensor([2706, 672, 2492, ..., 3016, 1552, 1178])\n", + "deriv_tensor (40, 81), indices tensor([2706, 672, 2492, ..., 3016, 1552, 1178])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 2622, 1674, ..., 1891, 2871, 2667])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 2622, 1674, ..., 1891, 2871, 2667])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 2622, 1674, ..., 1891, 2871, 2667])\n", + "deriv_tensor (40, 81), indices tensor([ 326, 2622, 1674, ..., 1891, 2871, 2667])\n", + "deriv_tensor (40, 81), indices tensor([1004, 2684, 1293, ..., 2501, 2369, 2989])\n", + "deriv_tensor (40, 81), indices tensor([1004, 2684, 1293, ..., 2501, 2369, 2989])\n", + "deriv_tensor (40, 81), indices tensor([1004, 2684, 1293, ..., 2501, 2369, 2989])\n", + "deriv_tensor (40, 81), indices tensor([1004, 2684, 1293, ..., 2501, 2369, 2989])\n", + "deriv_tensor (40, 81), indices tensor([1861, 1995, 399, ..., 2923, 1691, 2729])\n", + "deriv_tensor (40, 81), indices tensor([1861, 1995, 399, ..., 2923, 1691, 2729])\n", + "deriv_tensor (40, 81), indices tensor([1861, 1995, 399, ..., 2923, 1691, 2729])\n", + "deriv_tensor (40, 81), indices tensor([1861, 1995, 399, ..., 2923, 1691, 2729])\n", + "deriv_tensor (40, 81), indices tensor([1996, 2554, 3121, ..., 1005, 1476, 299])\n", + "deriv_tensor (40, 81), indices tensor([1996, 2554, 3121, ..., 1005, 1476, 299])\n", + "deriv_tensor (40, 81), indices tensor([1996, 2554, 3121, ..., 1005, 1476, 299])\n", + "deriv_tensor (40, 81), indices tensor([1996, 2554, 3121, ..., 1005, 1476, 299])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2299, 1962, ..., 109, 1346, 2775])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2299, 1962, ..., 109, 1346, 2775])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2299, 1962, ..., 109, 1346, 2775])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2299, 1962, ..., 109, 1346, 2775])\n", + "deriv_tensor (40, 81), indices tensor([1580, 1657, 1383, ..., 334, 2289, 3035])\n", + "deriv_tensor (40, 81), indices tensor([1580, 1657, 1383, ..., 334, 2289, 3035])\n", + "deriv_tensor (40, 81), indices tensor([1580, 1657, 1383, ..., 334, 2289, 3035])\n", + "deriv_tensor (40, 81), indices tensor([1580, 1657, 1383, ..., 334, 2289, 3035])\n", + "deriv_tensor (40, 81), indices tensor([2525, 2746, 1741, ..., 2502, 1846, 2459])\n", + "deriv_tensor (40, 81), indices tensor([2525, 2746, 1741, ..., 2502, 1846, 2459])\n", + "deriv_tensor (40, 81), indices tensor([2525, 2746, 1741, ..., 2502, 1846, 2459])\n", + "deriv_tensor (40, 81), indices tensor([2525, 2746, 1741, ..., 2502, 1846, 2459])\n", + "deriv_tensor (40, 81), indices tensor([2619, 738, 1952, ..., 2078, 1687, 2239])\n", + "deriv_tensor (40, 81), indices tensor([2619, 738, 1952, ..., 2078, 1687, 2239])\n", + "deriv_tensor (40, 81), indices tensor([2619, 738, 1952, ..., 2078, 1687, 2239])\n", + "deriv_tensor (40, 81), indices tensor([2619, 738, 1952, ..., 2078, 1687, 2239])\n", + "deriv_tensor (40, 81), indices tensor([ 529, 941, 2518, ..., 3038, 685, 132])\n", + "deriv_tensor (40, 81), indices tensor([ 529, 941, 2518, ..., 3038, 685, 132])\n", + "deriv_tensor (40, 81), indices tensor([ 529, 941, 2518, ..., 3038, 685, 132])\n", + "deriv_tensor (40, 81), indices tensor([ 529, 941, 2518, ..., 3038, 685, 132])\n", + "deriv_tensor (40, 81), indices tensor([1850, 1579, 1653, ..., 1563, 2227, 2109])\n", + "deriv_tensor (40, 81), indices tensor([1850, 1579, 1653, ..., 1563, 2227, 2109])\n", + "deriv_tensor (40, 81), indices tensor([1850, 1579, 1653, ..., 1563, 2227, 2109])\n", + "deriv_tensor (40, 81), indices tensor([1850, 1579, 1653, ..., 1563, 2227, 2109])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1001, 262, ..., 2698, 126, 2425])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1001, 262, ..., 2698, 126, 2425])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1001, 262, ..., 2698, 126, 2425])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 1001, 262, ..., 2698, 126, 2425])\n", + "deriv_tensor (40, 81), indices tensor([2319, 2682, 3128, ..., 1256, 1100, 1446])\n", + "deriv_tensor (40, 81), indices tensor([2319, 2682, 3128, ..., 1256, 1100, 1446])\n", + "deriv_tensor (40, 81), indices tensor([2319, 2682, 3128, ..., 1256, 1100, 1446])\n", + "deriv_tensor (40, 81), indices tensor([2319, 2682, 3128, ..., 1256, 1100, 1446])\n", + "deriv_tensor (40, 81), indices tensor([ 602, 2786, 324, ..., 1761, 2633, 1620])\n", + "deriv_tensor (40, 81), indices tensor([ 602, 2786, 324, ..., 1761, 2633, 1620])\n", + "deriv_tensor (40, 81), indices tensor([ 602, 2786, 324, ..., 1761, 2633, 1620])\n", + "deriv_tensor (40, 81), indices tensor([ 602, 2786, 324, ..., 1761, 2633, 1620])\n", + "deriv_tensor (40, 81), indices tensor([2944, 2219, 2701, ..., 600, 3188, 1871])\n", + "deriv_tensor (40, 81), indices tensor([2944, 2219, 2701, ..., 600, 3188, 1871])\n", + "deriv_tensor (40, 81), indices tensor([2944, 2219, 2701, ..., 600, 3188, 1871])\n", + "deriv_tensor (40, 81), indices tensor([2944, 2219, 2701, ..., 600, 3188, 1871])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 691, 3079, ..., 723, 1641, 2418])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 691, 3079, ..., 723, 1641, 2418])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 691, 3079, ..., 723, 1641, 2418])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 691, 3079, ..., 723, 1641, 2418])\n", + "deriv_tensor (40, 81), indices tensor([ 807, 189, 1795, ..., 3197, 1003, 2544])\n", + "deriv_tensor (40, 81), indices tensor([ 807, 189, 1795, ..., 3197, 1003, 2544])\n", + "deriv_tensor (40, 81), indices tensor([ 807, 189, 1795, ..., 3197, 1003, 2544])\n", + "deriv_tensor (40, 81), indices tensor([ 807, 189, 1795, ..., 3197, 1003, 2544])\n", + "deriv_tensor (40, 81), indices tensor([2601, 2724, 1828, ..., 2639, 928, 2876])\n", + "deriv_tensor (40, 81), indices tensor([2601, 2724, 1828, ..., 2639, 928, 2876])\n", + "deriv_tensor (40, 81), indices tensor([2601, 2724, 1828, ..., 2639, 928, 2876])\n", + "deriv_tensor (40, 81), indices tensor([2601, 2724, 1828, ..., 2639, 928, 2876])\n", + "deriv_tensor (40, 81), indices tensor([1608, 1884, 3011, ..., 2393, 958, 1709])\n", + "deriv_tensor (40, 81), indices tensor([1608, 1884, 3011, ..., 2393, 958, 1709])\n", + "deriv_tensor (40, 81), indices tensor([1608, 1884, 3011, ..., 2393, 958, 1709])\n", + "deriv_tensor (40, 81), indices tensor([1608, 1884, 3011, ..., 2393, 958, 1709])\n", + "deriv_tensor (40, 81), indices tensor([1395, 3020, 2680, ..., 2893, 2615, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1395, 3020, 2680, ..., 2893, 2615, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1395, 3020, 2680, ..., 2893, 2615, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1395, 3020, 2680, ..., 2893, 2615, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1778, 1987, 2248, ..., 1130, 240, 2209])\n", + "deriv_tensor (40, 81), indices tensor([1778, 1987, 2248, ..., 1130, 240, 2209])\n", + "deriv_tensor (40, 81), indices tensor([1778, 1987, 2248, ..., 1130, 240, 2209])\n", + "deriv_tensor (40, 81), indices tensor([1778, 1987, 2248, ..., 1130, 240, 2209])\n", + "deriv_tensor (40, 81), indices tensor([1683, 2083, 2434, ..., 2685, 2295, 284])\n", + "deriv_tensor (40, 81), indices tensor([1683, 2083, 2434, ..., 2685, 2295, 284])\n", + "deriv_tensor (40, 81), indices tensor([1683, 2083, 2434, ..., 2685, 2295, 284])\n", + "deriv_tensor (40, 81), indices tensor([1683, 2083, 2434, ..., 2685, 2295, 284])\n", + "deriv_tensor (40, 81), indices tensor([2909, 251, 506, ..., 1912, 2942, 2474])\n", + "deriv_tensor (40, 81), indices tensor([2909, 251, 506, ..., 1912, 2942, 2474])\n", + "deriv_tensor (40, 81), indices tensor([2909, 251, 506, ..., 1912, 2942, 2474])\n", + "deriv_tensor (40, 81), indices tensor([2909, 251, 506, ..., 1912, 2942, 2474])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 323, 2626, ..., 3149, 442, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 323, 2626, ..., 3149, 442, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 323, 2626, ..., 3149, 442, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 323, 2626, ..., 3149, 442, 1804])\n", + "deriv_tensor (40, 81), indices tensor([ 274, 54, 1569, ..., 2508, 629, 1477])\n", + "deriv_tensor (40, 81), indices tensor([ 274, 54, 1569, ..., 2508, 629, 1477])\n", + "deriv_tensor (40, 81), indices tensor([ 274, 54, 1569, ..., 2508, 629, 1477])\n", + "deriv_tensor (40, 81), indices tensor([ 274, 54, 1569, ..., 2508, 629, 1477])\n", + "deriv_tensor (40, 81), indices tensor([2502, 318, 1689, ..., 2115, 137, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2502, 318, 1689, ..., 2115, 137, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2502, 318, 1689, ..., 2115, 137, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2502, 318, 1689, ..., 2115, 137, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2420, 383, 3105, ..., 656, 1562, 1225])\n", + "deriv_tensor (40, 81), indices tensor([2420, 383, 3105, ..., 656, 1562, 1225])\n", + "deriv_tensor (40, 81), indices tensor([2420, 383, 3105, ..., 656, 1562, 1225])\n", + "deriv_tensor (40, 81), indices tensor([2420, 383, 3105, ..., 656, 1562, 1225])\n", + "deriv_tensor (40, 81), indices tensor([2253, 1485, 1901, ..., 2329, 768, 1791])\n", + "deriv_tensor (40, 81), indices tensor([2253, 1485, 1901, ..., 2329, 768, 1791])\n", + "deriv_tensor (40, 81), indices tensor([2253, 1485, 1901, ..., 2329, 768, 1791])\n", + "deriv_tensor (40, 81), indices tensor([2253, 1485, 1901, ..., 2329, 768, 1791])\n", + "deriv_tensor (40, 81), indices tensor([ 179, 1052, 590, ..., 780, 2702, 2683])\n", + "deriv_tensor (40, 81), indices tensor([ 179, 1052, 590, ..., 780, 2702, 2683])\n", + "deriv_tensor (40, 81), indices tensor([ 179, 1052, 590, ..., 780, 2702, 2683])\n", + "deriv_tensor (40, 81), indices tensor([ 179, 1052, 590, ..., 780, 2702, 2683])\n", + "deriv_tensor (40, 81), indices tensor([2054, 156, 999, ..., 2264, 2510, 2789])\n", + "deriv_tensor (40, 81), indices tensor([2054, 156, 999, ..., 2264, 2510, 2789])\n", + "deriv_tensor (40, 81), indices tensor([2054, 156, 999, ..., 2264, 2510, 2789])\n", + "deriv_tensor (40, 81), indices tensor([2054, 156, 999, ..., 2264, 2510, 2789])\n", + "deriv_tensor (40, 81), indices tensor([2255, 542, 1719, ..., 2608, 3164, 2367])\n", + "deriv_tensor (40, 81), indices tensor([2255, 542, 1719, ..., 2608, 3164, 2367])\n", + "deriv_tensor (40, 81), indices tensor([2255, 542, 1719, ..., 2608, 3164, 2367])\n", + "deriv_tensor (40, 81), indices tensor([2255, 542, 1719, ..., 2608, 3164, 2367])\n", + "deriv_tensor (40, 81), indices tensor([ 378, 2371, 1300, ..., 1443, 2806, 1438])\n", + "deriv_tensor (40, 81), indices tensor([ 378, 2371, 1300, ..., 1443, 2806, 1438])\n", + "deriv_tensor (40, 81), indices tensor([ 378, 2371, 1300, ..., 1443, 2806, 1438])\n", + "deriv_tensor (40, 81), indices tensor([ 378, 2371, 1300, ..., 1443, 2806, 1438])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2329, 2336, ..., 878, 2435, 1605])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2329, 2336, ..., 878, 2435, 1605])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2329, 2336, ..., 878, 2435, 1605])\n", + "deriv_tensor (40, 81), indices tensor([1398, 2329, 2336, ..., 878, 2435, 1605])\n", + "deriv_tensor (40, 81), indices tensor([2200, 653, 3050, ..., 1223, 2220, 2798])\n", + "deriv_tensor (40, 81), indices tensor([2200, 653, 3050, ..., 1223, 2220, 2798])\n", + "deriv_tensor (40, 81), indices tensor([2200, 653, 3050, ..., 1223, 2220, 2798])\n", + "deriv_tensor (40, 81), indices tensor([2200, 653, 3050, ..., 1223, 2220, 2798])\n", + "deriv_tensor (40, 81), indices tensor([ 258, 604, 45, ..., 2074, 2764, 1732])\n", + "deriv_tensor (40, 81), indices tensor([ 258, 604, 45, ..., 2074, 2764, 1732])\n", + "deriv_tensor (40, 81), indices tensor([ 258, 604, 45, ..., 2074, 2764, 1732])\n", + "deriv_tensor (40, 81), indices tensor([ 258, 604, 45, ..., 2074, 2764, 1732])\n", + "deriv_tensor (40, 81), indices tensor([ 819, 396, 3110, ..., 875, 658, 2569])\n", + "deriv_tensor (40, 81), indices tensor([ 819, 396, 3110, ..., 875, 658, 2569])\n", + "deriv_tensor (40, 81), indices tensor([ 819, 396, 3110, ..., 875, 658, 2569])\n", + "deriv_tensor (40, 81), indices tensor([ 819, 396, 3110, ..., 875, 658, 2569])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2166, 2857, ..., 1279, 2893, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2166, 2857, ..., 1279, 2893, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2166, 2857, ..., 1279, 2893, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2166, 2857, ..., 1279, 2893, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2389, 787, 1149, ..., 846, 1793, 69])\n", + "deriv_tensor (40, 81), indices tensor([2389, 787, 1149, ..., 846, 1793, 69])\n", + "deriv_tensor (40, 81), indices tensor([2389, 787, 1149, ..., 846, 1793, 69])\n", + "deriv_tensor (40, 81), indices tensor([2389, 787, 1149, ..., 846, 1793, 69])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 2200, 3170, ..., 2766, 1653, 2787])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 2200, 3170, ..., 2766, 1653, 2787])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 2200, 3170, ..., 2766, 1653, 2787])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 2200, 3170, ..., 2766, 1653, 2787])\n", + "deriv_tensor (40, 81), indices tensor([3203, 2488, 885, ..., 2537, 2621, 2127])\n", + "deriv_tensor (40, 81), indices tensor([3203, 2488, 885, ..., 2537, 2621, 2127])\n", + "deriv_tensor (40, 81), indices tensor([3203, 2488, 885, ..., 2537, 2621, 2127])\n", + "deriv_tensor (40, 81), indices tensor([3203, 2488, 885, ..., 2537, 2621, 2127])\n", + "deriv_tensor (40, 81), indices tensor([1955, 2734, 124, ..., 828, 1328, 162])\n", + "deriv_tensor (40, 81), indices tensor([1955, 2734, 124, ..., 828, 1328, 162])\n", + "deriv_tensor (40, 81), indices tensor([1955, 2734, 124, ..., 828, 1328, 162])\n", + "deriv_tensor (40, 81), indices tensor([1955, 2734, 124, ..., 828, 1328, 162])\n", + "deriv_tensor (40, 81), indices tensor([1600, 929, 1782, ..., 9, 2275, 1862])\n", + "deriv_tensor (40, 81), indices tensor([1600, 929, 1782, ..., 9, 2275, 1862])\n", + "deriv_tensor (40, 81), indices tensor([1600, 929, 1782, ..., 9, 2275, 1862])\n", + "deriv_tensor (40, 81), indices tensor([1600, 929, 1782, ..., 9, 2275, 1862])\n", + "deriv_tensor (40, 81), indices tensor([2107, 860, 2904, ..., 1087, 2990, 1389])\n", + "deriv_tensor (40, 81), indices tensor([2107, 860, 2904, ..., 1087, 2990, 1389])\n", + "deriv_tensor (40, 81), indices tensor([2107, 860, 2904, ..., 1087, 2990, 1389])\n", + "deriv_tensor (40, 81), indices tensor([2107, 860, 2904, ..., 1087, 2990, 1389])\n", + "deriv_tensor (40, 81), indices tensor([1418, 1867, 767, ..., 166, 1343, 381])\n", + "deriv_tensor (40, 81), indices tensor([1418, 1867, 767, ..., 166, 1343, 381])\n", + "deriv_tensor (40, 81), indices tensor([1418, 1867, 767, ..., 166, 1343, 381])\n", + "deriv_tensor (40, 81), indices tensor([1418, 1867, 767, ..., 166, 1343, 381])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 3068, 1891, ..., 1915, 471, 2581])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 3068, 1891, ..., 1915, 471, 2581])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 3068, 1891, ..., 1915, 471, 2581])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 3068, 1891, ..., 1915, 471, 2581])\n", + "deriv_tensor (40, 81), indices tensor([ 551, 1175, 804, ..., 255, 1830, 1023])\n", + "deriv_tensor (40, 81), indices tensor([ 551, 1175, 804, ..., 255, 1830, 1023])\n", + "deriv_tensor (40, 81), indices tensor([ 551, 1175, 804, ..., 255, 1830, 1023])\n", + "deriv_tensor (40, 81), indices tensor([ 551, 1175, 804, ..., 255, 1830, 1023])\n", + "deriv_tensor (40, 81), indices tensor([1194, 1664, 3205, ..., 43, 23, 1796])\n", + "deriv_tensor (40, 81), indices tensor([1194, 1664, 3205, ..., 43, 23, 1796])\n", + "deriv_tensor (40, 81), indices tensor([1194, 1664, 3205, ..., 43, 23, 1796])\n", + "deriv_tensor (40, 81), indices tensor([1194, 1664, 3205, ..., 43, 23, 1796])\n", + "deriv_tensor (40, 81), indices tensor([3071, 717, 3175, ..., 2996, 3214, 2333])\n", + "deriv_tensor (40, 81), indices tensor([3071, 717, 3175, ..., 2996, 3214, 2333])\n", + "deriv_tensor (40, 81), indices tensor([3071, 717, 3175, ..., 2996, 3214, 2333])\n", + "deriv_tensor (40, 81), indices tensor([3071, 717, 3175, ..., 2996, 3214, 2333])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2774, 3223, ..., 2782, 2136, 2676])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2774, 3223, ..., 2782, 2136, 2676])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2774, 3223, ..., 2782, 2136, 2676])\n", + "deriv_tensor (40, 81), indices tensor([1332, 2774, 3223, ..., 2782, 2136, 2676])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1292, 1907, ..., 1320, 1854, 233])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1292, 1907, ..., 1320, 1854, 233])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1292, 1907, ..., 1320, 1854, 233])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1292, 1907, ..., 1320, 1854, 233])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 818, 1946, ..., 1434, 139, 3138])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 818, 1946, ..., 1434, 139, 3138])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 818, 1946, ..., 1434, 139, 3138])\n", + "deriv_tensor (40, 81), indices tensor([ 515, 818, 1946, ..., 1434, 139, 3138])\n", + "deriv_tensor (40, 81), indices tensor([2679, 1895, 929, ..., 1031, 1992, 2600])\n", + "deriv_tensor (40, 81), indices tensor([2679, 1895, 929, ..., 1031, 1992, 2600])\n", + "deriv_tensor (40, 81), indices tensor([2679, 1895, 929, ..., 1031, 1992, 2600])\n", + "deriv_tensor (40, 81), indices tensor([2679, 1895, 929, ..., 1031, 1992, 2600])\n", + "deriv_tensor (40, 81), indices tensor([ 902, 2268, 1156, ..., 944, 1845, 1862])\n", + "deriv_tensor (40, 81), indices tensor([ 902, 2268, 1156, ..., 944, 1845, 1862])\n", + "deriv_tensor (40, 81), indices tensor([ 902, 2268, 1156, ..., 944, 1845, 1862])\n", + "deriv_tensor (40, 81), indices tensor([ 902, 2268, 1156, ..., 944, 1845, 1862])\n", + "deriv_tensor (40, 81), indices tensor([1590, 250, 1752, ..., 2250, 962, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1590, 250, 1752, ..., 2250, 962, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1590, 250, 1752, ..., 2250, 962, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1590, 250, 1752, ..., 2250, 962, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1214, 291, 2567, ..., 1117, 1845, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1214, 291, 2567, ..., 1117, 1845, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1214, 291, 2567, ..., 1117, 1845, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1214, 291, 2567, ..., 1117, 1845, 2992])\n", + "deriv_tensor (40, 81), indices tensor([ 410, 318, 547, ..., 1665, 837, 1308])\n", + "deriv_tensor (40, 81), indices tensor([ 410, 318, 547, ..., 1665, 837, 1308])\n", + "deriv_tensor (40, 81), indices tensor([ 410, 318, 547, ..., 1665, 837, 1308])\n", + "deriv_tensor (40, 81), indices tensor([ 410, 318, 547, ..., 1665, 837, 1308])\n", + "deriv_tensor (40, 81), indices tensor([1392, 2338, 533, ..., 1570, 371, 1739])\n", + "deriv_tensor (40, 81), indices tensor([1392, 2338, 533, ..., 1570, 371, 1739])\n", + "deriv_tensor (40, 81), indices tensor([1392, 2338, 533, ..., 1570, 371, 1739])\n", + "deriv_tensor (40, 81), indices tensor([1392, 2338, 533, ..., 1570, 371, 1739])\n", + "deriv_tensor (40, 81), indices tensor([ 172, 2988, 1635, ..., 2223, 2732, 3063])\n", + "deriv_tensor (40, 81), indices tensor([ 172, 2988, 1635, ..., 2223, 2732, 3063])\n", + "deriv_tensor (40, 81), indices tensor([ 172, 2988, 1635, ..., 2223, 2732, 3063])\n", + "deriv_tensor (40, 81), indices tensor([ 172, 2988, 1635, ..., 2223, 2732, 3063])\n", + "deriv_tensor (40, 81), indices tensor([2295, 2293, 3074, ..., 1548, 2768, 2934])\n", + "deriv_tensor (40, 81), indices tensor([2295, 2293, 3074, ..., 1548, 2768, 2934])\n", + "deriv_tensor (40, 81), indices tensor([2295, 2293, 3074, ..., 1548, 2768, 2934])\n", + "deriv_tensor (40, 81), indices tensor([2295, 2293, 3074, ..., 1548, 2768, 2934])\n", + "deriv_tensor (40, 81), indices tensor([2866, 2987, 2149, ..., 1863, 1052, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2866, 2987, 2149, ..., 1863, 1052, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2866, 2987, 2149, ..., 1863, 1052, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2866, 2987, 2149, ..., 1863, 1052, 1312])\n", + "deriv_tensor (40, 81), indices tensor([1234, 1125, 1856, ..., 597, 1701, 2851])\n", + "deriv_tensor (40, 81), indices tensor([1234, 1125, 1856, ..., 597, 1701, 2851])\n", + "deriv_tensor (40, 81), indices tensor([1234, 1125, 1856, ..., 597, 1701, 2851])\n", + "deriv_tensor (40, 81), indices tensor([1234, 1125, 1856, ..., 597, 1701, 2851])\n", + "deriv_tensor (40, 81), indices tensor([2684, 323, 3037, ..., 1267, 2266, 2767])\n", + "deriv_tensor (40, 81), indices tensor([2684, 323, 3037, ..., 1267, 2266, 2767])\n", + "deriv_tensor (40, 81), indices tensor([2684, 323, 3037, ..., 1267, 2266, 2767])\n", + "deriv_tensor (40, 81), indices tensor([2684, 323, 3037, ..., 1267, 2266, 2767])\n", + "deriv_tensor (40, 81), indices tensor([2296, 1441, 1657, ..., 2831, 1070, 2671])\n", + "deriv_tensor (40, 81), indices tensor([2296, 1441, 1657, ..., 2831, 1070, 2671])\n", + "deriv_tensor (40, 81), indices tensor([2296, 1441, 1657, ..., 2831, 1070, 2671])\n", + "deriv_tensor (40, 81), indices tensor([2296, 1441, 1657, ..., 2831, 1070, 2671])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 157, 2096, ..., 411, 559, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 157, 2096, ..., 411, 559, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 157, 2096, ..., 411, 559, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 243, 157, 2096, ..., 411, 559, 1063])\n", + "deriv_tensor (40, 81), indices tensor([2858, 2969, 2200, ..., 1895, 810, 675])\n", + "deriv_tensor (40, 81), indices tensor([2858, 2969, 2200, ..., 1895, 810, 675])\n", + "deriv_tensor (40, 81), indices tensor([2858, 2969, 2200, ..., 1895, 810, 675])\n", + "deriv_tensor (40, 81), indices tensor([2858, 2969, 2200, ..., 1895, 810, 675])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 2597, 2587, ..., 229, 1401, 2384])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 2597, 2587, ..., 229, 1401, 2384])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 2597, 2587, ..., 229, 1401, 2384])\n", + "deriv_tensor (40, 81), indices tensor([ 896, 2597, 2587, ..., 229, 1401, 2384])\n", + "deriv_tensor (40, 81), indices tensor([2777, 138, 3215, ..., 2592, 1489, 899])\n", + "deriv_tensor (40, 81), indices tensor([2777, 138, 3215, ..., 2592, 1489, 899])\n", + "deriv_tensor (40, 81), indices tensor([2777, 138, 3215, ..., 2592, 1489, 899])\n", + "deriv_tensor (40, 81), indices tensor([2777, 138, 3215, ..., 2592, 1489, 899])\n", + "deriv_tensor (40, 81), indices tensor([ 284, 906, 480, ..., 1426, 1283, 1893])\n", + "deriv_tensor (40, 81), indices tensor([ 284, 906, 480, ..., 1426, 1283, 1893])\n", + "deriv_tensor (40, 81), indices tensor([ 284, 906, 480, ..., 1426, 1283, 1893])\n", + "deriv_tensor (40, 81), indices tensor([ 284, 906, 480, ..., 1426, 1283, 1893])\n", + "deriv_tensor (40, 81), indices tensor([1676, 829, 867, ..., 1024, 2670, 128])\n", + "deriv_tensor (40, 81), indices tensor([1676, 829, 867, ..., 1024, 2670, 128])\n", + "deriv_tensor (40, 81), indices tensor([1676, 829, 867, ..., 1024, 2670, 128])\n", + "deriv_tensor (40, 81), indices tensor([1676, 829, 867, ..., 1024, 2670, 128])\n", + "deriv_tensor (40, 81), indices tensor([1029, 2465, 2847, ..., 1209, 3143, 520])\n", + "deriv_tensor (40, 81), indices tensor([1029, 2465, 2847, ..., 1209, 3143, 520])\n", + "deriv_tensor (40, 81), indices tensor([1029, 2465, 2847, ..., 1209, 3143, 520])\n", + "deriv_tensor (40, 81), indices tensor([1029, 2465, 2847, ..., 1209, 3143, 520])\n", + "deriv_tensor (40, 81), indices tensor([2622, 3184, 50, ..., 378, 7, 1314])\n", + "deriv_tensor (40, 81), indices tensor([2622, 3184, 50, ..., 378, 7, 1314])\n", + "deriv_tensor (40, 81), indices tensor([2622, 3184, 50, ..., 378, 7, 1314])\n", + "deriv_tensor (40, 81), indices tensor([2622, 3184, 50, ..., 378, 7, 1314])\n", + "deriv_tensor (40, 81), indices tensor([1615, 1628, 2032, ..., 1190, 849, 487])\n", + "deriv_tensor (40, 81), indices tensor([1615, 1628, 2032, ..., 1190, 849, 487])\n", + "deriv_tensor (40, 81), indices tensor([1615, 1628, 2032, ..., 1190, 849, 487])\n", + "deriv_tensor (40, 81), indices tensor([1615, 1628, 2032, ..., 1190, 849, 487])\n", + "deriv_tensor (40, 81), indices tensor([358, 894, 310, ..., 753, 997, 622])\n", + "deriv_tensor (40, 81), indices tensor([358, 894, 310, ..., 753, 997, 622])\n", + "deriv_tensor (40, 81), indices tensor([358, 894, 310, ..., 753, 997, 622])\n", + "deriv_tensor (40, 81), indices tensor([358, 894, 310, ..., 753, 997, 622])\n", + "deriv_tensor (40, 81), indices tensor([2415, 988, 2527, ..., 138, 1357, 1674])\n", + "deriv_tensor (40, 81), indices tensor([2415, 988, 2527, ..., 138, 1357, 1674])\n", + "deriv_tensor (40, 81), indices tensor([2415, 988, 2527, ..., 138, 1357, 1674])\n", + "deriv_tensor (40, 81), indices tensor([2415, 988, 2527, ..., 138, 1357, 1674])\n", + "deriv_tensor (40, 81), indices tensor([ 264, 1526, 1741, ..., 406, 571, 2183])\n", + "deriv_tensor (40, 81), indices tensor([ 264, 1526, 1741, ..., 406, 571, 2183])\n", + "deriv_tensor (40, 81), indices tensor([ 264, 1526, 1741, ..., 406, 571, 2183])\n", + "deriv_tensor (40, 81), indices tensor([ 264, 1526, 1741, ..., 406, 571, 2183])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1047, 901, ..., 2788, 2644, 2697])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1047, 901, ..., 2788, 2644, 2697])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1047, 901, ..., 2788, 2644, 2697])\n", + "deriv_tensor (40, 81), indices tensor([1889, 1047, 901, ..., 2788, 2644, 2697])\n", + "deriv_tensor (40, 81), indices tensor([ 593, 1635, 1783, ..., 2887, 1398, 2794])\n", + "deriv_tensor (40, 81), indices tensor([ 593, 1635, 1783, ..., 2887, 1398, 2794])\n", + "deriv_tensor (40, 81), indices tensor([ 593, 1635, 1783, ..., 2887, 1398, 2794])\n", + "deriv_tensor (40, 81), indices tensor([ 593, 1635, 1783, ..., 2887, 1398, 2794])\n", + "deriv_tensor (40, 81), indices tensor([1173, 1526, 3077, ..., 2130, 1080, 165])\n", + "deriv_tensor (40, 81), indices tensor([1173, 1526, 3077, ..., 2130, 1080, 165])\n", + "deriv_tensor (40, 81), indices tensor([1173, 1526, 3077, ..., 2130, 1080, 165])\n", + "deriv_tensor (40, 81), indices tensor([1173, 1526, 3077, ..., 2130, 1080, 165])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 176, 1223, ..., 268, 1709, 1220])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 176, 1223, ..., 268, 1709, 1220])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 176, 1223, ..., 268, 1709, 1220])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 176, 1223, ..., 268, 1709, 1220])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1029, 3040, ..., 430, 558, 1671])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1029, 3040, ..., 430, 558, 1671])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1029, 3040, ..., 430, 558, 1671])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1029, 3040, ..., 430, 558, 1671])\n", + "deriv_tensor (40, 81), indices tensor([ 936, 2366, 206, ..., 48, 1372, 3143])\n", + "deriv_tensor (40, 81), indices tensor([ 936, 2366, 206, ..., 48, 1372, 3143])\n", + "deriv_tensor (40, 81), indices tensor([ 936, 2366, 206, ..., 48, 1372, 3143])\n", + "deriv_tensor (40, 81), indices tensor([ 936, 2366, 206, ..., 48, 1372, 3143])\n", + "deriv_tensor (40, 81), indices tensor([1161, 2261, 52, ..., 3003, 1205, 512])\n", + "deriv_tensor (40, 81), indices tensor([1161, 2261, 52, ..., 3003, 1205, 512])\n", + "deriv_tensor (40, 81), indices tensor([1161, 2261, 52, ..., 3003, 1205, 512])\n", + "deriv_tensor (40, 81), indices tensor([1161, 2261, 52, ..., 3003, 1205, 512])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1616, 725, ..., 407, 1620, 2705])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1616, 725, ..., 407, 1620, 2705])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1616, 725, ..., 407, 1620, 2705])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1616, 725, ..., 407, 1620, 2705])\n", + "deriv_tensor (40, 81), indices tensor([2031, 387, 2004, ..., 1432, 1955, 1533])\n", + "deriv_tensor (40, 81), indices tensor([2031, 387, 2004, ..., 1432, 1955, 1533])\n", + "deriv_tensor (40, 81), indices tensor([2031, 387, 2004, ..., 1432, 1955, 1533])\n", + "deriv_tensor (40, 81), indices tensor([2031, 387, 2004, ..., 1432, 1955, 1533])\n", + "deriv_tensor (40, 81), indices tensor([1958, 939, 552, ..., 1413, 1341, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1958, 939, 552, ..., 1413, 1341, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1958, 939, 552, ..., 1413, 1341, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1958, 939, 552, ..., 1413, 1341, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1641, 3083, 2899, ..., 2582, 1581, 2078])\n", + "deriv_tensor (40, 81), indices tensor([1641, 3083, 2899, ..., 2582, 1581, 2078])\n", + "deriv_tensor (40, 81), indices tensor([1641, 3083, 2899, ..., 2582, 1581, 2078])\n", + "deriv_tensor (40, 81), indices tensor([1641, 3083, 2899, ..., 2582, 1581, 2078])\n", + "deriv_tensor (40, 81), indices tensor([1079, 547, 219, ..., 640, 1989, 1423])\n", + "deriv_tensor (40, 81), indices tensor([1079, 547, 219, ..., 640, 1989, 1423])\n", + "deriv_tensor (40, 81), indices tensor([1079, 547, 219, ..., 640, 1989, 1423])\n", + "deriv_tensor (40, 81), indices tensor([1079, 547, 219, ..., 640, 1989, 1423])\n", + "deriv_tensor (40, 81), indices tensor([2654, 318, 1251, ..., 829, 2176, 1387])\n", + "deriv_tensor (40, 81), indices tensor([2654, 318, 1251, ..., 829, 2176, 1387])\n", + "deriv_tensor (40, 81), indices tensor([2654, 318, 1251, ..., 829, 2176, 1387])\n", + "deriv_tensor (40, 81), indices tensor([2654, 318, 1251, ..., 829, 2176, 1387])\n", + "deriv_tensor (40, 81), indices tensor([ 455, 2142, 1620, ..., 61, 3173, 809])\n", + "deriv_tensor (40, 81), indices tensor([ 455, 2142, 1620, ..., 61, 3173, 809])\n", + "deriv_tensor (40, 81), indices tensor([ 455, 2142, 1620, ..., 61, 3173, 809])\n", + "deriv_tensor (40, 81), indices tensor([ 455, 2142, 1620, ..., 61, 3173, 809])\n", + "deriv_tensor (40, 81), indices tensor([2231, 108, 510, ..., 1119, 245, 88])\n", + "deriv_tensor (40, 81), indices tensor([2231, 108, 510, ..., 1119, 245, 88])\n", + "deriv_tensor (40, 81), indices tensor([2231, 108, 510, ..., 1119, 245, 88])\n", + "deriv_tensor (40, 81), indices tensor([2231, 108, 510, ..., 1119, 245, 88])\n", + "deriv_tensor (40, 81), indices tensor([1673, 2080, 1030, ..., 1992, 700, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1673, 2080, 1030, ..., 1992, 700, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1673, 2080, 1030, ..., 1992, 700, 1976])\n", + "deriv_tensor (40, 81), indices tensor([1673, 2080, 1030, ..., 1992, 700, 1976])\n", + "deriv_tensor (40, 81), indices tensor([2079, 2184, 2481, ..., 100, 2440, 393])\n", + "deriv_tensor (40, 81), indices tensor([2079, 2184, 2481, ..., 100, 2440, 393])\n", + "deriv_tensor (40, 81), indices tensor([2079, 2184, 2481, ..., 100, 2440, 393])\n", + "deriv_tensor (40, 81), indices tensor([2079, 2184, 2481, ..., 100, 2440, 393])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 2010, 1344, ..., 2686, 3014, 2847])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 2010, 1344, ..., 2686, 3014, 2847])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 2010, 1344, ..., 2686, 3014, 2847])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 2010, 1344, ..., 2686, 3014, 2847])\n", + "deriv_tensor (40, 81), indices tensor([3069, 3150, 2864, ..., 1715, 3220, 2017])\n", + "deriv_tensor (40, 81), indices tensor([3069, 3150, 2864, ..., 1715, 3220, 2017])\n", + "deriv_tensor (40, 81), indices tensor([3069, 3150, 2864, ..., 1715, 3220, 2017])\n", + "deriv_tensor (40, 81), indices tensor([3069, 3150, 2864, ..., 1715, 3220, 2017])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1907, 2424, ..., 296, 330, 1899])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1907, 2424, ..., 296, 330, 1899])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1907, 2424, ..., 296, 330, 1899])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1907, 2424, ..., 296, 330, 1899])\n", + "deriv_tensor (40, 81), indices tensor([ 353, 415, 2755, ..., 1276, 2617, 3058])\n", + "deriv_tensor (40, 81), indices tensor([ 353, 415, 2755, ..., 1276, 2617, 3058])\n", + "deriv_tensor (40, 81), indices tensor([ 353, 415, 2755, ..., 1276, 2617, 3058])\n", + "deriv_tensor (40, 81), indices tensor([ 353, 415, 2755, ..., 1276, 2617, 3058])\n", + "deriv_tensor (40, 81), indices tensor([1478, 2753, 1390, ..., 2882, 1732, 552])\n", + "deriv_tensor (40, 81), indices tensor([1478, 2753, 1390, ..., 2882, 1732, 552])\n", + "deriv_tensor (40, 81), indices tensor([1478, 2753, 1390, ..., 2882, 1732, 552])\n", + "deriv_tensor (40, 81), indices tensor([1478, 2753, 1390, ..., 2882, 1732, 552])\n", + "deriv_tensor (40, 81), indices tensor([ 43, 1448, 563, ..., 676, 2347, 1007])\n", + "deriv_tensor (40, 81), indices tensor([ 43, 1448, 563, ..., 676, 2347, 1007])\n", + "deriv_tensor (40, 81), indices tensor([ 43, 1448, 563, ..., 676, 2347, 1007])\n", + "deriv_tensor (40, 81), indices tensor([ 43, 1448, 563, ..., 676, 2347, 1007])\n", + "deriv_tensor (40, 81), indices tensor([ 227, 1476, 1164, ..., 386, 1444, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 227, 1476, 1164, ..., 386, 1444, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 227, 1476, 1164, ..., 386, 1444, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 227, 1476, 1164, ..., 386, 1444, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 723, 766, 1222, ..., 2204, 857, 269])\n", + "deriv_tensor (40, 81), indices tensor([ 723, 766, 1222, ..., 2204, 857, 269])\n", + "deriv_tensor (40, 81), indices tensor([ 723, 766, 1222, ..., 2204, 857, 269])\n", + "deriv_tensor (40, 81), indices tensor([ 723, 766, 1222, ..., 2204, 857, 269])\n", + "deriv_tensor (40, 81), indices tensor([1493, 482, 1605, ..., 2676, 1974, 790])\n", + "deriv_tensor (40, 81), indices tensor([1493, 482, 1605, ..., 2676, 1974, 790])\n", + "deriv_tensor (40, 81), indices tensor([1493, 482, 1605, ..., 2676, 1974, 790])\n", + "deriv_tensor (40, 81), indices tensor([1493, 482, 1605, ..., 2676, 1974, 790])\n", + "deriv_tensor (40, 81), indices tensor([2160, 1474, 2332, ..., 3097, 1894, 2154])\n", + "deriv_tensor (40, 81), indices tensor([2160, 1474, 2332, ..., 3097, 1894, 2154])\n", + "deriv_tensor (40, 81), indices tensor([2160, 1474, 2332, ..., 3097, 1894, 2154])\n", + "deriv_tensor (40, 81), indices tensor([2160, 1474, 2332, ..., 3097, 1894, 2154])\n", + "deriv_tensor (40, 81), indices tensor([2536, 2739, 1886, ..., 604, 2701, 945])\n", + "deriv_tensor (40, 81), indices tensor([2536, 2739, 1886, ..., 604, 2701, 945])\n", + "deriv_tensor (40, 81), indices tensor([2536, 2739, 1886, ..., 604, 2701, 945])\n", + "deriv_tensor (40, 81), indices tensor([2536, 2739, 1886, ..., 604, 2701, 945])\n", + "deriv_tensor (40, 81), indices tensor([1445, 2615, 1846, ..., 3175, 1093, 936])\n", + "deriv_tensor (40, 81), indices tensor([1445, 2615, 1846, ..., 3175, 1093, 936])\n", + "deriv_tensor (40, 81), indices tensor([1445, 2615, 1846, ..., 3175, 1093, 936])\n", + "deriv_tensor (40, 81), indices tensor([1445, 2615, 1846, ..., 3175, 1093, 936])\n", + "deriv_tensor (40, 81), indices tensor([1887, 2427, 2673, ..., 3158, 2072, 169])\n", + "deriv_tensor (40, 81), indices tensor([1887, 2427, 2673, ..., 3158, 2072, 169])\n", + "deriv_tensor (40, 81), indices tensor([1887, 2427, 2673, ..., 3158, 2072, 169])\n", + "deriv_tensor (40, 81), indices tensor([1887, 2427, 2673, ..., 3158, 2072, 169])\n", + "deriv_tensor (40, 81), indices tensor([1427, 693, 3026, ..., 1559, 1173, 2259])\n", + "deriv_tensor (40, 81), indices tensor([1427, 693, 3026, ..., 1559, 1173, 2259])\n", + "deriv_tensor (40, 81), indices tensor([1427, 693, 3026, ..., 1559, 1173, 2259])\n", + "deriv_tensor (40, 81), indices tensor([1427, 693, 3026, ..., 1559, 1173, 2259])\n", + "deriv_tensor (40, 81), indices tensor([3198, 2679, 271, ..., 1244, 1199, 1776])\n", + "deriv_tensor (40, 81), indices tensor([3198, 2679, 271, ..., 1244, 1199, 1776])\n", + "deriv_tensor (40, 81), indices tensor([3198, 2679, 271, ..., 1244, 1199, 1776])\n", + "deriv_tensor (40, 81), indices tensor([3198, 2679, 271, ..., 1244, 1199, 1776])\n", + "deriv_tensor (40, 81), indices tensor([2593, 2583, 1323, ..., 2175, 2563, 2475])\n", + "deriv_tensor (40, 81), indices tensor([2593, 2583, 1323, ..., 2175, 2563, 2475])\n", + "deriv_tensor (40, 81), indices tensor([2593, 2583, 1323, ..., 2175, 2563, 2475])\n", + "deriv_tensor (40, 81), indices tensor([2593, 2583, 1323, ..., 2175, 2563, 2475])\n", + "deriv_tensor (40, 81), indices tensor([1401, 77, 2417, ..., 1941, 1613, 2787])\n", + "deriv_tensor (40, 81), indices tensor([1401, 77, 2417, ..., 1941, 1613, 2787])\n", + "deriv_tensor (40, 81), indices tensor([1401, 77, 2417, ..., 1941, 1613, 2787])\n", + "deriv_tensor (40, 81), indices tensor([1401, 77, 2417, ..., 1941, 1613, 2787])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 3059, 962, ..., 298, 2031, 882])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 3059, 962, ..., 298, 2031, 882])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 3059, 962, ..., 298, 2031, 882])\n", + "deriv_tensor (40, 81), indices tensor([ 411, 3059, 962, ..., 298, 2031, 882])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 2514, 1605, ..., 1550, 95, 1032])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 2514, 1605, ..., 1550, 95, 1032])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 2514, 1605, ..., 1550, 95, 1032])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 2514, 1605, ..., 1550, 95, 1032])\n", + "deriv_tensor (40, 81), indices tensor([1224, 2632, 2310, ..., 970, 2105, 2519])\n", + "deriv_tensor (40, 81), indices tensor([1224, 2632, 2310, ..., 970, 2105, 2519])\n", + "deriv_tensor (40, 81), indices tensor([1224, 2632, 2310, ..., 970, 2105, 2519])\n", + "deriv_tensor (40, 81), indices tensor([1224, 2632, 2310, ..., 970, 2105, 2519])\n", + "deriv_tensor (40, 81), indices tensor([3088, 3121, 814, ..., 2271, 324, 2160])\n", + "deriv_tensor (40, 81), indices tensor([3088, 3121, 814, ..., 2271, 324, 2160])\n", + "deriv_tensor (40, 81), indices tensor([3088, 3121, 814, ..., 2271, 324, 2160])\n", + "deriv_tensor (40, 81), indices tensor([3088, 3121, 814, ..., 2271, 324, 2160])\n", + "deriv_tensor (40, 81), indices tensor([1954, 1921, 1707, ..., 2456, 2832, 3237])\n", + "deriv_tensor (40, 81), indices tensor([1954, 1921, 1707, ..., 2456, 2832, 3237])\n", + "deriv_tensor (40, 81), indices tensor([1954, 1921, 1707, ..., 2456, 2832, 3237])\n", + "deriv_tensor (40, 81), indices tensor([1954, 1921, 1707, ..., 2456, 2832, 3237])\n", + "deriv_tensor (40, 81), indices tensor([2373, 403, 558, ..., 2627, 449, 21])\n", + "deriv_tensor (40, 81), indices tensor([2373, 403, 558, ..., 2627, 449, 21])\n", + "deriv_tensor (40, 81), indices tensor([2373, 403, 558, ..., 2627, 449, 21])\n", + "deriv_tensor (40, 81), indices tensor([2373, 403, 558, ..., 2627, 449, 21])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1374, 2061, ..., 2638, 2625, 1386])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1374, 2061, ..., 2638, 2625, 1386])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1374, 2061, ..., 2638, 2625, 1386])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1374, 2061, ..., 2638, 2625, 1386])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1046, 832, ..., 2451, 562, 2147])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1046, 832, ..., 2451, 562, 2147])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1046, 832, ..., 2451, 562, 2147])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1046, 832, ..., 2451, 562, 2147])\n", + "deriv_tensor (40, 81), indices tensor([1417, 2106, 397, ..., 1076, 1580, 86])\n", + "deriv_tensor (40, 81), indices tensor([1417, 2106, 397, ..., 1076, 1580, 86])\n", + "deriv_tensor (40, 81), indices tensor([1417, 2106, 397, ..., 1076, 1580, 86])\n", + "deriv_tensor (40, 81), indices tensor([1417, 2106, 397, ..., 1076, 1580, 86])\n", + "deriv_tensor (40, 81), indices tensor([1767, 317, 3141, ..., 1074, 2295, 2632])\n", + "deriv_tensor (40, 81), indices tensor([1767, 317, 3141, ..., 1074, 2295, 2632])\n", + "deriv_tensor (40, 81), indices tensor([1767, 317, 3141, ..., 1074, 2295, 2632])\n", + "deriv_tensor (40, 81), indices tensor([1767, 317, 3141, ..., 1074, 2295, 2632])\n", + "deriv_tensor (40, 81), indices tensor([1825, 2379, 3087, ..., 2209, 2713, 1744])\n", + "deriv_tensor (40, 81), indices tensor([1825, 2379, 3087, ..., 2209, 2713, 1744])\n", + "deriv_tensor (40, 81), indices tensor([1825, 2379, 3087, ..., 2209, 2713, 1744])\n", + "deriv_tensor (40, 81), indices tensor([1825, 2379, 3087, ..., 2209, 2713, 1744])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1059, 1905, ..., 2399, 1877, 3033])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1059, 1905, ..., 2399, 1877, 3033])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1059, 1905, ..., 2399, 1877, 3033])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1059, 1905, ..., 2399, 1877, 3033])\n", + "deriv_tensor (40, 81), indices tensor([2324, 732, 606, ..., 2464, 2704, 796])\n", + "deriv_tensor (40, 81), indices tensor([2324, 732, 606, ..., 2464, 2704, 796])\n", + "deriv_tensor (40, 81), indices tensor([2324, 732, 606, ..., 2464, 2704, 796])\n", + "deriv_tensor (40, 81), indices tensor([2324, 732, 606, ..., 2464, 2704, 796])\n", + "deriv_tensor (40, 81), indices tensor([1552, 1741, 593, ..., 1307, 2366, 311])\n", + "deriv_tensor (40, 81), indices tensor([1552, 1741, 593, ..., 1307, 2366, 311])\n", + "deriv_tensor (40, 81), indices tensor([1552, 1741, 593, ..., 1307, 2366, 311])\n", + "deriv_tensor (40, 81), indices tensor([1552, 1741, 593, ..., 1307, 2366, 311])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1614, 2580, ..., 764, 1106, 912])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1614, 2580, ..., 764, 1106, 912])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1614, 2580, ..., 764, 1106, 912])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1614, 2580, ..., 764, 1106, 912])\n", + "deriv_tensor (40, 81), indices tensor([2748, 1241, 1180, ..., 2573, 1510, 1410])\n", + "deriv_tensor (40, 81), indices tensor([2748, 1241, 1180, ..., 2573, 1510, 1410])\n", + "deriv_tensor (40, 81), indices tensor([2748, 1241, 1180, ..., 2573, 1510, 1410])\n", + "deriv_tensor (40, 81), indices tensor([2748, 1241, 1180, ..., 2573, 1510, 1410])\n", + "deriv_tensor (40, 81), indices tensor([1573, 210, 107, ..., 1552, 618, 1698])\n", + "deriv_tensor (40, 81), indices tensor([1573, 210, 107, ..., 1552, 618, 1698])\n", + "deriv_tensor (40, 81), indices tensor([1573, 210, 107, ..., 1552, 618, 1698])\n", + "deriv_tensor (40, 81), indices tensor([1573, 210, 107, ..., 1552, 618, 1698])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 2904, 2620, ..., 497, 1720, 1985])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 2904, 2620, ..., 497, 1720, 1985])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 2904, 2620, ..., 497, 1720, 1985])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 2904, 2620, ..., 497, 1720, 1985])\n", + "deriv_tensor (40, 81), indices tensor([2265, 934, 1568, ..., 2263, 1673, 2741])\n", + "deriv_tensor (40, 81), indices tensor([2265, 934, 1568, ..., 2263, 1673, 2741])\n", + "deriv_tensor (40, 81), indices tensor([2265, 934, 1568, ..., 2263, 1673, 2741])\n", + "deriv_tensor (40, 81), indices tensor([2265, 934, 1568, ..., 2263, 1673, 2741])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 523, 1119, ..., 912, 1641, 323])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 523, 1119, ..., 912, 1641, 323])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 523, 1119, ..., 912, 1641, 323])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 523, 1119, ..., 912, 1641, 323])\n", + "deriv_tensor (40, 81), indices tensor([ 669, 2884, 2505, ..., 148, 2942, 2632])\n", + "deriv_tensor (40, 81), indices tensor([ 669, 2884, 2505, ..., 148, 2942, 2632])\n", + "deriv_tensor (40, 81), indices tensor([ 669, 2884, 2505, ..., 148, 2942, 2632])\n", + "deriv_tensor (40, 81), indices tensor([ 669, 2884, 2505, ..., 148, 2942, 2632])\n", + "deriv_tensor (40, 81), indices tensor([2352, 1665, 2425, ..., 1402, 3152, 2476])\n", + "deriv_tensor (40, 81), indices tensor([2352, 1665, 2425, ..., 1402, 3152, 2476])\n", + "deriv_tensor (40, 81), indices tensor([2352, 1665, 2425, ..., 1402, 3152, 2476])\n", + "deriv_tensor (40, 81), indices tensor([2352, 1665, 2425, ..., 1402, 3152, 2476])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 326, 2287, ..., 1459, 2054, 2523])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 326, 2287, ..., 1459, 2054, 2523])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 326, 2287, ..., 1459, 2054, 2523])\n", + "deriv_tensor (40, 81), indices tensor([ 721, 326, 2287, ..., 1459, 2054, 2523])\n", + "deriv_tensor (40, 81), indices tensor([1441, 1144, 1348, ..., 341, 1283, 1506])\n", + "deriv_tensor (40, 81), indices tensor([1441, 1144, 1348, ..., 341, 1283, 1506])\n", + "deriv_tensor (40, 81), indices tensor([1441, 1144, 1348, ..., 341, 1283, 1506])\n", + "deriv_tensor (40, 81), indices tensor([1441, 1144, 1348, ..., 341, 1283, 1506])\n", + "deriv_tensor (40, 81), indices tensor([1257, 1387, 2787, ..., 2841, 1670, 2819])\n", + "deriv_tensor (40, 81), indices tensor([1257, 1387, 2787, ..., 2841, 1670, 2819])\n", + "deriv_tensor (40, 81), indices tensor([1257, 1387, 2787, ..., 2841, 1670, 2819])\n", + "deriv_tensor (40, 81), indices tensor([1257, 1387, 2787, ..., 2841, 1670, 2819])\n", + "deriv_tensor (40, 81), indices tensor([1689, 320, 2097, ..., 3139, 3104, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1689, 320, 2097, ..., 3139, 3104, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1689, 320, 2097, ..., 3139, 3104, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1689, 320, 2097, ..., 3139, 3104, 2937])\n", + "deriv_tensor (40, 81), indices tensor([1843, 1358, 1408, ..., 327, 3100, 2161])\n", + "deriv_tensor (40, 81), indices tensor([1843, 1358, 1408, ..., 327, 3100, 2161])\n", + "deriv_tensor (40, 81), indices tensor([1843, 1358, 1408, ..., 327, 3100, 2161])\n", + "deriv_tensor (40, 81), indices tensor([1843, 1358, 1408, ..., 327, 3100, 2161])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2658, 1582, ..., 2194, 181, 2535])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2658, 1582, ..., 2194, 181, 2535])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2658, 1582, ..., 2194, 181, 2535])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2658, 1582, ..., 2194, 181, 2535])\n", + "deriv_tensor (40, 81), indices tensor([1357, 2912, 160, ..., 2729, 1110, 387])\n", + "deriv_tensor (40, 81), indices tensor([1357, 2912, 160, ..., 2729, 1110, 387])\n", + "deriv_tensor (40, 81), indices tensor([1357, 2912, 160, ..., 2729, 1110, 387])\n", + "deriv_tensor (40, 81), indices tensor([1357, 2912, 160, ..., 2729, 1110, 387])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 2893, 1806, ..., 1547, 1979, 169])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 2893, 1806, ..., 1547, 1979, 169])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 2893, 1806, ..., 1547, 1979, 169])\n", + "deriv_tensor (40, 81), indices tensor([ 444, 2893, 1806, ..., 1547, 1979, 169])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2716, 694, ..., 2733, 599, 2764])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2716, 694, ..., 2733, 599, 2764])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2716, 694, ..., 2733, 599, 2764])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2716, 694, ..., 2733, 599, 2764])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1673, 1590, ..., 138, 433, 2488])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1673, 1590, ..., 138, 433, 2488])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1673, 1590, ..., 138, 433, 2488])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1673, 1590, ..., 138, 433, 2488])\n", + "deriv_tensor (40, 81), indices tensor([1538, 3148, 815, ..., 2151, 2611, 1809])\n", + "deriv_tensor (40, 81), indices tensor([1538, 3148, 815, ..., 2151, 2611, 1809])\n", + "deriv_tensor (40, 81), indices tensor([1538, 3148, 815, ..., 2151, 2611, 1809])\n", + "deriv_tensor (40, 81), indices tensor([1538, 3148, 815, ..., 2151, 2611, 1809])\n", + "deriv_tensor (40, 81), indices tensor([ 507, 1517, 469, ..., 2931, 1, 2880])\n", + "deriv_tensor (40, 81), indices tensor([ 507, 1517, 469, ..., 2931, 1, 2880])\n", + "deriv_tensor (40, 81), indices tensor([ 507, 1517, 469, ..., 2931, 1, 2880])\n", + "deriv_tensor (40, 81), indices tensor([ 507, 1517, 469, ..., 2931, 1, 2880])\n", + "deriv_tensor (40, 81), indices tensor([1609, 2796, 1450, ..., 304, 459, 3006])\n", + "deriv_tensor (40, 81), indices tensor([1609, 2796, 1450, ..., 304, 459, 3006])\n", + "deriv_tensor (40, 81), indices tensor([1609, 2796, 1450, ..., 304, 459, 3006])\n", + "deriv_tensor (40, 81), indices tensor([1609, 2796, 1450, ..., 304, 459, 3006])\n", + "deriv_tensor (40, 81), indices tensor([1645, 2354, 86, ..., 2113, 2711, 2695])\n", + "deriv_tensor (40, 81), indices tensor([1645, 2354, 86, ..., 2113, 2711, 2695])\n", + "deriv_tensor (40, 81), indices tensor([1645, 2354, 86, ..., 2113, 2711, 2695])\n", + "deriv_tensor (40, 81), indices tensor([1645, 2354, 86, ..., 2113, 2711, 2695])\n", + "deriv_tensor (40, 81), indices tensor([1211, 279, 1302, ..., 3233, 21, 1460])\n", + "deriv_tensor (40, 81), indices tensor([1211, 279, 1302, ..., 3233, 21, 1460])\n", + "deriv_tensor (40, 81), indices tensor([1211, 279, 1302, ..., 3233, 21, 1460])\n", + "deriv_tensor (40, 81), indices tensor([1211, 279, 1302, ..., 3233, 21, 1460])\n", + "deriv_tensor (40, 81), indices tensor([1292, 537, 2919, ..., 54, 156, 2242])\n", + "deriv_tensor (40, 81), indices tensor([1292, 537, 2919, ..., 54, 156, 2242])\n", + "deriv_tensor (40, 81), indices tensor([1292, 537, 2919, ..., 54, 156, 2242])\n", + "deriv_tensor (40, 81), indices tensor([1292, 537, 2919, ..., 54, 156, 2242])\n", + "deriv_tensor (40, 81), indices tensor([2499, 559, 94, ..., 477, 2221, 3100])\n", + "deriv_tensor (40, 81), indices tensor([2499, 559, 94, ..., 477, 2221, 3100])\n", + "deriv_tensor (40, 81), indices tensor([2499, 559, 94, ..., 477, 2221, 3100])\n", + "deriv_tensor (40, 81), indices tensor([2499, 559, 94, ..., 477, 2221, 3100])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2709, 2485, ..., 218, 2002, 2839])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2709, 2485, ..., 218, 2002, 2839])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2709, 2485, ..., 218, 2002, 2839])\n", + "deriv_tensor (40, 81), indices tensor([2761, 2709, 2485, ..., 218, 2002, 2839])\n", + "deriv_tensor (40, 81), indices tensor([3229, 2365, 2622, ..., 1986, 2192, 109])\n", + "deriv_tensor (40, 81), indices tensor([3229, 2365, 2622, ..., 1986, 2192, 109])\n", + "deriv_tensor (40, 81), indices tensor([3229, 2365, 2622, ..., 1986, 2192, 109])\n", + "deriv_tensor (40, 81), indices tensor([3229, 2365, 2622, ..., 1986, 2192, 109])\n", + "deriv_tensor (40, 81), indices tensor([ 302, 906, 1568, ..., 754, 2262, 1303])\n", + "deriv_tensor (40, 81), indices tensor([ 302, 906, 1568, ..., 754, 2262, 1303])\n", + "deriv_tensor (40, 81), indices tensor([ 302, 906, 1568, ..., 754, 2262, 1303])\n", + "deriv_tensor (40, 81), indices tensor([ 302, 906, 1568, ..., 754, 2262, 1303])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2706, 1284, ..., 2478, 545, 2805])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2706, 1284, ..., 2478, 545, 2805])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2706, 1284, ..., 2478, 545, 2805])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2706, 1284, ..., 2478, 545, 2805])\n", + "deriv_tensor (40, 81), indices tensor([2270, 2749, 1666, ..., 2621, 1954, 354])\n", + "deriv_tensor (40, 81), indices tensor([2270, 2749, 1666, ..., 2621, 1954, 354])\n", + "deriv_tensor (40, 81), indices tensor([2270, 2749, 1666, ..., 2621, 1954, 354])\n", + "deriv_tensor (40, 81), indices tensor([2270, 2749, 1666, ..., 2621, 1954, 354])\n", + "deriv_tensor (40, 81), indices tensor([1994, 966, 2912, ..., 2151, 2353, 721])\n", + "deriv_tensor (40, 81), indices tensor([1994, 966, 2912, ..., 2151, 2353, 721])\n", + "deriv_tensor (40, 81), indices tensor([1994, 966, 2912, ..., 2151, 2353, 721])\n", + "deriv_tensor (40, 81), indices tensor([1994, 966, 2912, ..., 2151, 2353, 721])\n", + "deriv_tensor (40, 81), indices tensor([1823, 1833, 547, ..., 343, 706, 3238])\n", + "deriv_tensor (40, 81), indices tensor([1823, 1833, 547, ..., 343, 706, 3238])\n", + "deriv_tensor (40, 81), indices tensor([1823, 1833, 547, ..., 343, 706, 3238])\n", + "deriv_tensor (40, 81), indices tensor([1823, 1833, 547, ..., 343, 706, 3238])\n", + "deriv_tensor (40, 81), indices tensor([ 848, 1122, 2070, ..., 2332, 22, 2282])\n", + "deriv_tensor (40, 81), indices tensor([ 848, 1122, 2070, ..., 2332, 22, 2282])\n", + "deriv_tensor (40, 81), indices tensor([ 848, 1122, 2070, ..., 2332, 22, 2282])\n", + "deriv_tensor (40, 81), indices tensor([ 848, 1122, 2070, ..., 2332, 22, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2239, 821, 1417, ..., 401, 481, 2229])\n", + "deriv_tensor (40, 81), indices tensor([2239, 821, 1417, ..., 401, 481, 2229])\n", + "deriv_tensor (40, 81), indices tensor([2239, 821, 1417, ..., 401, 481, 2229])\n", + "deriv_tensor (40, 81), indices tensor([2239, 821, 1417, ..., 401, 481, 2229])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2864, 2525, ..., 2201, 632, 1643])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2864, 2525, ..., 2201, 632, 1643])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2864, 2525, ..., 2201, 632, 1643])\n", + "deriv_tensor (40, 81), indices tensor([1382, 2864, 2525, ..., 2201, 632, 1643])\n", + "deriv_tensor (40, 81), indices tensor([ 555, 984, 3179, ..., 678, 597, 1231])\n", + "deriv_tensor (40, 81), indices tensor([ 555, 984, 3179, ..., 678, 597, 1231])\n", + "deriv_tensor (40, 81), indices tensor([ 555, 984, 3179, ..., 678, 597, 1231])\n", + "deriv_tensor (40, 81), indices tensor([ 555, 984, 3179, ..., 678, 597, 1231])\n", + "deriv_tensor (40, 81), indices tensor([2000, 2040, 316, ..., 2331, 2145, 1536])\n", + "deriv_tensor (40, 81), indices tensor([2000, 2040, 316, ..., 2331, 2145, 1536])\n", + "deriv_tensor (40, 81), indices tensor([2000, 2040, 316, ..., 2331, 2145, 1536])\n", + "deriv_tensor (40, 81), indices tensor([2000, 2040, 316, ..., 2331, 2145, 1536])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 2386, 1846, ..., 3131, 871, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 2386, 1846, ..., 3131, 871, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 2386, 1846, ..., 3131, 871, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 223, 2386, 1846, ..., 3131, 871, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 646, 2833, ..., 2374, 139, 565])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 646, 2833, ..., 2374, 139, 565])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 646, 2833, ..., 2374, 139, 565])\n", + "deriv_tensor (40, 81), indices tensor([ 517, 646, 2833, ..., 2374, 139, 565])\n", + "deriv_tensor (40, 81), indices tensor([1559, 2974, 256, ..., 2648, 940, 495])\n", + "deriv_tensor (40, 81), indices tensor([1559, 2974, 256, ..., 2648, 940, 495])\n", + "deriv_tensor (40, 81), indices tensor([1559, 2974, 256, ..., 2648, 940, 495])\n", + "deriv_tensor (40, 81), indices tensor([1559, 2974, 256, ..., 2648, 940, 495])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 3083, 464, ..., 148, 93, 2888])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 3083, 464, ..., 148, 93, 2888])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 3083, 464, ..., 148, 93, 2888])\n", + "deriv_tensor (40, 81), indices tensor([ 96, 3083, 464, ..., 148, 93, 2888])\n", + "deriv_tensor (40, 81), indices tensor([1388, 1900, 1500, ..., 3062, 739, 536])\n", + "deriv_tensor (40, 81), indices tensor([1388, 1900, 1500, ..., 3062, 739, 536])\n", + "deriv_tensor (40, 81), indices tensor([1388, 1900, 1500, ..., 3062, 739, 536])\n", + "deriv_tensor (40, 81), indices tensor([1388, 1900, 1500, ..., 3062, 739, 536])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 1997, 920, ..., 0, 1412, 102])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 1997, 920, ..., 0, 1412, 102])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 1997, 920, ..., 0, 1412, 102])\n", + "deriv_tensor (40, 81), indices tensor([ 576, 1997, 920, ..., 0, 1412, 102])\n", + "deriv_tensor (40, 81), indices tensor([2899, 391, 2443, ..., 1455, 1519, 515])\n", + "deriv_tensor (40, 81), indices tensor([2899, 391, 2443, ..., 1455, 1519, 515])\n", + "deriv_tensor (40, 81), indices tensor([2899, 391, 2443, ..., 1455, 1519, 515])\n", + "deriv_tensor (40, 81), indices tensor([2899, 391, 2443, ..., 1455, 1519, 515])\n", + "deriv_tensor (40, 81), indices tensor([2997, 120, 2433, ..., 2995, 340, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2997, 120, 2433, ..., 2995, 340, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2997, 120, 2433, ..., 2995, 340, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2997, 120, 2433, ..., 2995, 340, 1274])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2690, 1342, ..., 833, 2903, 2174])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2690, 1342, ..., 833, 2903, 2174])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2690, 1342, ..., 833, 2903, 2174])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 2690, 1342, ..., 833, 2903, 2174])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 2383, 329, ..., 1452, 993, 2896])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 2383, 329, ..., 1452, 993, 2896])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 2383, 329, ..., 1452, 993, 2896])\n", + "deriv_tensor (40, 81), indices tensor([ 293, 2383, 329, ..., 1452, 993, 2896])\n", + "deriv_tensor (40, 81), indices tensor([381, 76, 94, ..., 843, 3, 630])\n", + "deriv_tensor (40, 81), indices tensor([381, 76, 94, ..., 843, 3, 630])\n", + "deriv_tensor (40, 81), indices tensor([381, 76, 94, ..., 843, 3, 630])\n", + "deriv_tensor (40, 81), indices tensor([381, 76, 94, ..., 843, 3, 630])\n", + "deriv_tensor (40, 81), indices tensor([2152, 1820, 2203, ..., 2918, 1752, 1049])\n", + "deriv_tensor (40, 81), indices tensor([2152, 1820, 2203, ..., 2918, 1752, 1049])\n", + "deriv_tensor (40, 81), indices tensor([2152, 1820, 2203, ..., 2918, 1752, 1049])\n", + "deriv_tensor (40, 81), indices tensor([2152, 1820, 2203, ..., 2918, 1752, 1049])\n", + "deriv_tensor (40, 81), indices tensor([ 161, 347, 45, ..., 3081, 1411, 301])\n", + "deriv_tensor (40, 81), indices tensor([ 161, 347, 45, ..., 3081, 1411, 301])\n", + "deriv_tensor (40, 81), indices tensor([ 161, 347, 45, ..., 3081, 1411, 301])\n", + "deriv_tensor (40, 81), indices tensor([ 161, 347, 45, ..., 3081, 1411, 301])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 1633, 915, ..., 22, 650, 2086])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 1633, 915, ..., 22, 650, 2086])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 1633, 915, ..., 22, 650, 2086])\n", + "deriv_tensor (40, 81), indices tensor([ 440, 1633, 915, ..., 22, 650, 2086])\n", + "deriv_tensor (40, 81), indices tensor([2616, 2383, 1567, ..., 1375, 2610, 2098])\n", + "deriv_tensor (40, 81), indices tensor([2616, 2383, 1567, ..., 1375, 2610, 2098])\n", + "deriv_tensor (40, 81), indices tensor([2616, 2383, 1567, ..., 1375, 2610, 2098])\n", + "deriv_tensor (40, 81), indices tensor([2616, 2383, 1567, ..., 1375, 2610, 2098])\n", + "deriv_tensor (40, 81), indices tensor([1510, 843, 3204, ..., 1873, 495, 1330])\n", + "deriv_tensor (40, 81), indices tensor([1510, 843, 3204, ..., 1873, 495, 1330])\n", + "deriv_tensor (40, 81), indices tensor([1510, 843, 3204, ..., 1873, 495, 1330])\n", + "deriv_tensor (40, 81), indices tensor([1510, 843, 3204, ..., 1873, 495, 1330])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 1567, 1047, ..., 517, 1921, 1664])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 1567, 1047, ..., 517, 1921, 1664])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 1567, 1047, ..., 517, 1921, 1664])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 1567, 1047, ..., 517, 1921, 1664])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 856, 2320, ..., 278, 2225, 2077])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 856, 2320, ..., 278, 2225, 2077])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 856, 2320, ..., 278, 2225, 2077])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 856, 2320, ..., 278, 2225, 2077])\n", + "deriv_tensor (40, 81), indices tensor([1143, 3020, 1592, ..., 2128, 684, 2355])\n", + "deriv_tensor (40, 81), indices tensor([1143, 3020, 1592, ..., 2128, 684, 2355])\n", + "deriv_tensor (40, 81), indices tensor([1143, 3020, 1592, ..., 2128, 684, 2355])\n", + "deriv_tensor (40, 81), indices tensor([1143, 3020, 1592, ..., 2128, 684, 2355])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2474, 395, ..., 631, 735, 2485])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2474, 395, ..., 631, 735, 2485])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2474, 395, ..., 631, 735, 2485])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2474, 395, ..., 631, 735, 2485])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1840, 3228, ..., 1774, 54, 837])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1840, 3228, ..., 1774, 54, 837])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1840, 3228, ..., 1774, 54, 837])\n", + "deriv_tensor (40, 81), indices tensor([ 405, 1840, 3228, ..., 1774, 54, 837])\n", + "deriv_tensor (40, 81), indices tensor([1983, 790, 295, ..., 2363, 1783, 720])\n", + "deriv_tensor (40, 81), indices tensor([1983, 790, 295, ..., 2363, 1783, 720])\n", + "deriv_tensor (40, 81), indices tensor([1983, 790, 295, ..., 2363, 1783, 720])\n", + "deriv_tensor (40, 81), indices tensor([1983, 790, 295, ..., 2363, 1783, 720])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1382, 1447, ..., 2690, 1931, 1565])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1382, 1447, ..., 2690, 1931, 1565])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1382, 1447, ..., 2690, 1931, 1565])\n", + "deriv_tensor (40, 81), indices tensor([2540, 1382, 1447, ..., 2690, 1931, 1565])\n", + "deriv_tensor (40, 81), indices tensor([2077, 554, 3239, ..., 517, 134, 2113])\n", + "deriv_tensor (40, 81), indices tensor([2077, 554, 3239, ..., 517, 134, 2113])\n", + "deriv_tensor (40, 81), indices tensor([2077, 554, 3239, ..., 517, 134, 2113])\n", + "deriv_tensor (40, 81), indices tensor([2077, 554, 3239, ..., 517, 134, 2113])\n", + "deriv_tensor (40, 81), indices tensor([1129, 522, 2144, ..., 2998, 1401, 408])\n", + "deriv_tensor (40, 81), indices tensor([1129, 522, 2144, ..., 2998, 1401, 408])\n", + "deriv_tensor (40, 81), indices tensor([1129, 522, 2144, ..., 2998, 1401, 408])\n", + "deriv_tensor (40, 81), indices tensor([1129, 522, 2144, ..., 2998, 1401, 408])\n", + "deriv_tensor (40, 81), indices tensor([2241, 914, 1261, ..., 980, 2274, 507])\n", + "deriv_tensor (40, 81), indices tensor([2241, 914, 1261, ..., 980, 2274, 507])\n", + "deriv_tensor (40, 81), indices tensor([2241, 914, 1261, ..., 980, 2274, 507])\n", + "deriv_tensor (40, 81), indices tensor([2241, 914, 1261, ..., 980, 2274, 507])\n", + "deriv_tensor (40, 81), indices tensor([2289, 297, 1747, ..., 3093, 717, 1554])\n", + "deriv_tensor (40, 81), indices tensor([2289, 297, 1747, ..., 3093, 717, 1554])\n", + "deriv_tensor (40, 81), indices tensor([2289, 297, 1747, ..., 3093, 717, 1554])\n", + "deriv_tensor (40, 81), indices tensor([2289, 297, 1747, ..., 3093, 717, 1554])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1531, 1553, ..., 2850, 1545, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1531, 1553, ..., 2850, 1545, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1531, 1553, ..., 2850, 1545, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1531, 1553, ..., 2850, 1545, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2856, 1327, ..., 2336, 2550, 2510])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2856, 1327, ..., 2336, 2550, 2510])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2856, 1327, ..., 2336, 2550, 2510])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 2856, 1327, ..., 2336, 2550, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1620, 17, 2981, ..., 2553, 653, 2043])\n", + "deriv_tensor (40, 81), indices tensor([1620, 17, 2981, ..., 2553, 653, 2043])\n", + "deriv_tensor (40, 81), indices tensor([1620, 17, 2981, ..., 2553, 653, 2043])\n", + "deriv_tensor (40, 81), indices tensor([1620, 17, 2981, ..., 2553, 653, 2043])\n", + "deriv_tensor (40, 81), indices tensor([1815, 1822, 1926, ..., 2222, 1940, 347])\n", + "deriv_tensor (40, 81), indices tensor([1815, 1822, 1926, ..., 2222, 1940, 347])\n", + "deriv_tensor (40, 81), indices tensor([1815, 1822, 1926, ..., 2222, 1940, 347])\n", + "deriv_tensor (40, 81), indices tensor([1815, 1822, 1926, ..., 2222, 1940, 347])\n", + "deriv_tensor (40, 81), indices tensor([ 15, 2365, 1018, ..., 911, 2529, 1355])\n", + "deriv_tensor (40, 81), indices tensor([ 15, 2365, 1018, ..., 911, 2529, 1355])\n", + "deriv_tensor (40, 81), indices tensor([ 15, 2365, 1018, ..., 911, 2529, 1355])\n", + "deriv_tensor (40, 81), indices tensor([ 15, 2365, 1018, ..., 911, 2529, 1355])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 831, 531, ..., 1470, 78, 701])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 831, 531, ..., 1470, 78, 701])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 831, 531, ..., 1470, 78, 701])\n", + "deriv_tensor (40, 81), indices tensor([ 619, 831, 531, ..., 1470, 78, 701])\n", + "deriv_tensor (40, 81), indices tensor([1339, 2494, 2359, ..., 1105, 650, 2346])\n", + "deriv_tensor (40, 81), indices tensor([1339, 2494, 2359, ..., 1105, 650, 2346])\n", + "deriv_tensor (40, 81), indices tensor([1339, 2494, 2359, ..., 1105, 650, 2346])\n", + "deriv_tensor (40, 81), indices tensor([1339, 2494, 2359, ..., 1105, 650, 2346])\n", + "deriv_tensor (40, 81), indices tensor([3116, 797, 92, ..., 1393, 1243, 2549])\n", + "deriv_tensor (40, 81), indices tensor([3116, 797, 92, ..., 1393, 1243, 2549])\n", + "deriv_tensor (40, 81), indices tensor([3116, 797, 92, ..., 1393, 1243, 2549])\n", + "deriv_tensor (40, 81), indices tensor([3116, 797, 92, ..., 1393, 1243, 2549])\n", + "deriv_tensor (40, 81), indices tensor([ 924, 156, 2069, ..., 741, 2810, 459])\n", + "deriv_tensor (40, 81), indices tensor([ 924, 156, 2069, ..., 741, 2810, 459])\n", + "deriv_tensor (40, 81), indices tensor([ 924, 156, 2069, ..., 741, 2810, 459])\n", + "deriv_tensor (40, 81), indices tensor([ 924, 156, 2069, ..., 741, 2810, 459])\n", + "deriv_tensor (40, 81), indices tensor([1350, 3160, 1897, ..., 441, 1109, 223])\n", + "deriv_tensor (40, 81), indices tensor([1350, 3160, 1897, ..., 441, 1109, 223])\n", + "deriv_tensor (40, 81), indices tensor([1350, 3160, 1897, ..., 441, 1109, 223])\n", + "deriv_tensor (40, 81), indices tensor([1350, 3160, 1897, ..., 441, 1109, 223])\n", + "deriv_tensor (40, 81), indices tensor([3122, 3186, 2608, ..., 768, 2085, 1269])\n", + "deriv_tensor (40, 81), indices tensor([3122, 3186, 2608, ..., 768, 2085, 1269])\n", + "deriv_tensor (40, 81), indices tensor([3122, 3186, 2608, ..., 768, 2085, 1269])\n", + "deriv_tensor (40, 81), indices tensor([3122, 3186, 2608, ..., 768, 2085, 1269])\n", + "deriv_tensor (40, 81), indices tensor([1114, 196, 2437, ..., 2405, 1727, 2103])\n", + "deriv_tensor (40, 81), indices tensor([1114, 196, 2437, ..., 2405, 1727, 2103])\n", + "deriv_tensor (40, 81), indices tensor([1114, 196, 2437, ..., 2405, 1727, 2103])\n", + "deriv_tensor (40, 81), indices tensor([1114, 196, 2437, ..., 2405, 1727, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2819, 2545, 640, ..., 3065, 2113, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2819, 2545, 640, ..., 3065, 2113, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2819, 2545, 640, ..., 3065, 2113, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2819, 2545, 640, ..., 3065, 2113, 1274])\n", + "deriv_tensor (40, 81), indices tensor([2250, 1786, 1929, ..., 2410, 1573, 2190])\n", + "deriv_tensor (40, 81), indices tensor([2250, 1786, 1929, ..., 2410, 1573, 2190])\n", + "deriv_tensor (40, 81), indices tensor([2250, 1786, 1929, ..., 2410, 1573, 2190])\n", + "deriv_tensor (40, 81), indices tensor([2250, 1786, 1929, ..., 2410, 1573, 2190])\n", + "deriv_tensor (40, 81), indices tensor([1262, 105, 1019, ..., 1382, 2870, 2694])\n", + "deriv_tensor (40, 81), indices tensor([1262, 105, 1019, ..., 1382, 2870, 2694])\n", + "deriv_tensor (40, 81), indices tensor([1262, 105, 1019, ..., 1382, 2870, 2694])\n", + "deriv_tensor (40, 81), indices tensor([1262, 105, 1019, ..., 1382, 2870, 2694])\n", + "deriv_tensor (40, 81), indices tensor([1019, 863, 2666, ..., 176, 3222, 65])\n", + "deriv_tensor (40, 81), indices tensor([1019, 863, 2666, ..., 176, 3222, 65])\n", + "deriv_tensor (40, 81), indices tensor([1019, 863, 2666, ..., 176, 3222, 65])\n", + "deriv_tensor (40, 81), indices tensor([1019, 863, 2666, ..., 176, 3222, 65])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1820, 2272, ..., 2192, 66, 1572])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1820, 2272, ..., 2192, 66, 1572])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1820, 2272, ..., 2192, 66, 1572])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1820, 2272, ..., 2192, 66, 1572])\n", + "deriv_tensor (40, 81), indices tensor([2580, 1787, 542, ..., 2260, 77, 634])\n", + "deriv_tensor (40, 81), indices tensor([2580, 1787, 542, ..., 2260, 77, 634])\n", + "deriv_tensor (40, 81), indices tensor([2580, 1787, 542, ..., 2260, 77, 634])\n", + "deriv_tensor (40, 81), indices tensor([2580, 1787, 542, ..., 2260, 77, 634])\n", + "deriv_tensor (40, 81), indices tensor([2285, 2633, 2467, ..., 470, 2135, 1562])\n", + "deriv_tensor (40, 81), indices tensor([2285, 2633, 2467, ..., 470, 2135, 1562])\n", + "deriv_tensor (40, 81), indices tensor([2285, 2633, 2467, ..., 470, 2135, 1562])\n", + "deriv_tensor (40, 81), indices tensor([2285, 2633, 2467, ..., 470, 2135, 1562])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1829, 1941, ..., 916, 2024, 1798])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1829, 1941, ..., 916, 2024, 1798])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1829, 1941, ..., 916, 2024, 1798])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1829, 1941, ..., 916, 2024, 1798])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 1266, 185, ..., 2813, 1815, 885])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 1266, 185, ..., 2813, 1815, 885])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 1266, 185, ..., 2813, 1815, 885])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 1266, 185, ..., 2813, 1815, 885])\n", + "deriv_tensor (40, 81), indices tensor([2623, 2261, 614, ..., 2319, 2633, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2623, 2261, 614, ..., 2319, 2633, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2623, 2261, 614, ..., 2319, 2633, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2623, 2261, 614, ..., 2319, 2633, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2213, 1875, 52, ..., 2997, 1337, 1235])\n", + "deriv_tensor (40, 81), indices tensor([2213, 1875, 52, ..., 2997, 1337, 1235])\n", + "deriv_tensor (40, 81), indices tensor([2213, 1875, 52, ..., 2997, 1337, 1235])\n", + "deriv_tensor (40, 81), indices tensor([2213, 1875, 52, ..., 2997, 1337, 1235])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1723, 1691, ..., 692, 134, 3135])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1723, 1691, ..., 692, 134, 3135])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1723, 1691, ..., 692, 134, 3135])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1723, 1691, ..., 692, 134, 3135])\n", + "deriv_tensor (40, 81), indices tensor([ 26, 2738, 2607, ..., 291, 387, 2592])\n", + "deriv_tensor (40, 81), indices tensor([ 26, 2738, 2607, ..., 291, 387, 2592])\n", + "deriv_tensor (40, 81), indices tensor([ 26, 2738, 2607, ..., 291, 387, 2592])\n", + "deriv_tensor (40, 81), indices tensor([ 26, 2738, 2607, ..., 291, 387, 2592])\n", + "deriv_tensor (40, 81), indices tensor([1374, 670, 2871, ..., 1873, 2090, 785])\n", + "deriv_tensor (40, 81), indices tensor([1374, 670, 2871, ..., 1873, 2090, 785])\n", + "deriv_tensor (40, 81), indices tensor([1374, 670, 2871, ..., 1873, 2090, 785])\n", + "deriv_tensor (40, 81), indices tensor([1374, 670, 2871, ..., 1873, 2090, 785])\n", + "deriv_tensor (40, 81), indices tensor([2931, 151, 2343, ..., 904, 610, 1760])\n", + "deriv_tensor (40, 81), indices tensor([2931, 151, 2343, ..., 904, 610, 1760])\n", + "deriv_tensor (40, 81), indices tensor([2931, 151, 2343, ..., 904, 610, 1760])\n", + "deriv_tensor (40, 81), indices tensor([2931, 151, 2343, ..., 904, 610, 1760])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 254, 1596, ..., 2625, 1296, 2478])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 254, 1596, ..., 2625, 1296, 2478])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 254, 1596, ..., 2625, 1296, 2478])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 254, 1596, ..., 2625, 1296, 2478])\n", + "deriv_tensor (40, 81), indices tensor([3061, 741, 810, ..., 1163, 1021, 2449])\n", + "deriv_tensor (40, 81), indices tensor([3061, 741, 810, ..., 1163, 1021, 2449])\n", + "deriv_tensor (40, 81), indices tensor([3061, 741, 810, ..., 1163, 1021, 2449])\n", + "deriv_tensor (40, 81), indices tensor([3061, 741, 810, ..., 1163, 1021, 2449])\n", + "deriv_tensor (40, 81), indices tensor([1273, 2810, 1241, ..., 2873, 1275, 2307])\n", + "deriv_tensor (40, 81), indices tensor([1273, 2810, 1241, ..., 2873, 1275, 2307])\n", + "deriv_tensor (40, 81), indices tensor([1273, 2810, 1241, ..., 2873, 1275, 2307])\n", + "deriv_tensor (40, 81), indices tensor([1273, 2810, 1241, ..., 2873, 1275, 2307])\n", + "deriv_tensor (40, 81), indices tensor([ 462, 2137, 84, ..., 555, 1783, 998])\n", + "deriv_tensor (40, 81), indices tensor([ 462, 2137, 84, ..., 555, 1783, 998])\n", + "deriv_tensor (40, 81), indices tensor([ 462, 2137, 84, ..., 555, 1783, 998])\n", + "deriv_tensor (40, 81), indices tensor([ 462, 2137, 84, ..., 555, 1783, 998])\n", + "deriv_tensor (40, 81), indices tensor([ 220, 2351, 90, ..., 1271, 3138, 3025])\n", + "deriv_tensor (40, 81), indices tensor([ 220, 2351, 90, ..., 1271, 3138, 3025])\n", + "deriv_tensor (40, 81), indices tensor([ 220, 2351, 90, ..., 1271, 3138, 3025])\n", + "deriv_tensor (40, 81), indices tensor([ 220, 2351, 90, ..., 1271, 3138, 3025])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1166, 535, ..., 1324, 2706, 1488])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1166, 535, ..., 1324, 2706, 1488])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1166, 535, ..., 1324, 2706, 1488])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 1166, 535, ..., 1324, 2706, 1488])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1480, 173, ..., 2398, 802, 2679])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1480, 173, ..., 2398, 802, 2679])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1480, 173, ..., 2398, 802, 2679])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1480, 173, ..., 2398, 802, 2679])\n", + "deriv_tensor (40, 81), indices tensor([1221, 1774, 2003, ..., 2380, 566, 2231])\n", + "deriv_tensor (40, 81), indices tensor([1221, 1774, 2003, ..., 2380, 566, 2231])\n", + "deriv_tensor (40, 81), indices tensor([1221, 1774, 2003, ..., 2380, 566, 2231])\n", + "deriv_tensor (40, 81), indices tensor([1221, 1774, 2003, ..., 2380, 566, 2231])\n", + "deriv_tensor (40, 81), indices tensor([2581, 951, 35, ..., 186, 894, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2581, 951, 35, ..., 186, 894, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2581, 951, 35, ..., 186, 894, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2581, 951, 35, ..., 186, 894, 2101])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 2332, 1225, ..., 1729, 2283, 974])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 2332, 1225, ..., 1729, 2283, 974])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 2332, 1225, ..., 1729, 2283, 974])\n", + "deriv_tensor (40, 81), indices tensor([ 277, 2332, 1225, ..., 1729, 2283, 974])\n", + "deriv_tensor (40, 81), indices tensor([2339, 766, 247, ..., 90, 2102, 592])\n", + "deriv_tensor (40, 81), indices tensor([2339, 766, 247, ..., 90, 2102, 592])\n", + "deriv_tensor (40, 81), indices tensor([2339, 766, 247, ..., 90, 2102, 592])\n", + "deriv_tensor (40, 81), indices tensor([2339, 766, 247, ..., 90, 2102, 592])\n", + "deriv_tensor (40, 81), indices tensor([2861, 1651, 1779, ..., 1931, 1943, 1606])\n", + "deriv_tensor (40, 81), indices tensor([2861, 1651, 1779, ..., 1931, 1943, 1606])\n", + "deriv_tensor (40, 81), indices tensor([2861, 1651, 1779, ..., 1931, 1943, 1606])\n", + "deriv_tensor (40, 81), indices tensor([2861, 1651, 1779, ..., 1931, 1943, 1606])\n", + "deriv_tensor (40, 81), indices tensor([1432, 797, 268, ..., 1105, 753, 1391])\n", + "deriv_tensor (40, 81), indices tensor([1432, 797, 268, ..., 1105, 753, 1391])\n", + "deriv_tensor (40, 81), indices tensor([1432, 797, 268, ..., 1105, 753, 1391])\n", + "deriv_tensor (40, 81), indices tensor([1432, 797, 268, ..., 1105, 753, 1391])\n", + "deriv_tensor (40, 81), indices tensor([2632, 2436, 1664, ..., 988, 1856, 913])\n", + "deriv_tensor (40, 81), indices tensor([2632, 2436, 1664, ..., 988, 1856, 913])\n", + "deriv_tensor (40, 81), indices tensor([2632, 2436, 1664, ..., 988, 1856, 913])\n", + "deriv_tensor (40, 81), indices tensor([2632, 2436, 1664, ..., 988, 1856, 913])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 3205, 1516, ..., 1639, 573, 3060])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 3205, 1516, ..., 1639, 573, 3060])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 3205, 1516, ..., 1639, 573, 3060])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 3205, 1516, ..., 1639, 573, 3060])\n", + "deriv_tensor (40, 81), indices tensor([1619, 749, 1050, ..., 869, 2979, 1226])\n", + "deriv_tensor (40, 81), indices tensor([1619, 749, 1050, ..., 869, 2979, 1226])\n", + "deriv_tensor (40, 81), indices tensor([1619, 749, 1050, ..., 869, 2979, 1226])\n", + "deriv_tensor (40, 81), indices tensor([1619, 749, 1050, ..., 869, 2979, 1226])\n", + "deriv_tensor (40, 81), indices tensor([2929, 1434, 463, ..., 285, 755, 3224])\n", + "deriv_tensor (40, 81), indices tensor([2929, 1434, 463, ..., 285, 755, 3224])\n", + "deriv_tensor (40, 81), indices tensor([2929, 1434, 463, ..., 285, 755, 3224])\n", + "deriv_tensor (40, 81), indices tensor([2929, 1434, 463, ..., 285, 755, 3224])\n", + "deriv_tensor (40, 81), indices tensor([1952, 1266, 242, ..., 1537, 1750, 3184])\n", + "deriv_tensor (40, 81), indices tensor([1952, 1266, 242, ..., 1537, 1750, 3184])\n", + "deriv_tensor (40, 81), indices tensor([1952, 1266, 242, ..., 1537, 1750, 3184])\n", + "deriv_tensor (40, 81), indices tensor([1952, 1266, 242, ..., 1537, 1750, 3184])\n", + "deriv_tensor (40, 81), indices tensor([2575, 785, 563, ..., 100, 3024, 1795])\n", + "deriv_tensor (40, 81), indices tensor([2575, 785, 563, ..., 100, 3024, 1795])\n", + "deriv_tensor (40, 81), indices tensor([2575, 785, 563, ..., 100, 3024, 1795])\n", + "deriv_tensor (40, 81), indices tensor([2575, 785, 563, ..., 100, 3024, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 1446, 366, ..., 136, 468, 3046])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 1446, 366, ..., 136, 468, 3046])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 1446, 366, ..., 136, 468, 3046])\n", + "deriv_tensor (40, 81), indices tensor([ 904, 1446, 366, ..., 136, 468, 3046])\n", + "deriv_tensor (40, 81), indices tensor([ 740, 1198, 2916, ..., 1596, 857, 1236])\n", + "deriv_tensor (40, 81), indices tensor([ 740, 1198, 2916, ..., 1596, 857, 1236])\n", + "deriv_tensor (40, 81), indices tensor([ 740, 1198, 2916, ..., 1596, 857, 1236])\n", + "deriv_tensor (40, 81), indices tensor([ 740, 1198, 2916, ..., 1596, 857, 1236])\n", + "deriv_tensor (40, 81), indices tensor([ 359, 1250, 2257, ..., 2947, 660, 1682])\n", + "deriv_tensor (40, 81), indices tensor([ 359, 1250, 2257, ..., 2947, 660, 1682])\n", + "deriv_tensor (40, 81), indices tensor([ 359, 1250, 2257, ..., 2947, 660, 1682])\n", + "deriv_tensor (40, 81), indices tensor([ 359, 1250, 2257, ..., 2947, 660, 1682])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2312, 2132, ..., 1596, 2572, 206])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2312, 2132, ..., 1596, 2572, 206])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2312, 2132, ..., 1596, 2572, 206])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2312, 2132, ..., 1596, 2572, 206])\n", + "deriv_tensor (40, 81), indices tensor([2002, 75, 1813, ..., 1396, 2488, 2763])\n", + "deriv_tensor (40, 81), indices tensor([2002, 75, 1813, ..., 1396, 2488, 2763])\n", + "deriv_tensor (40, 81), indices tensor([2002, 75, 1813, ..., 1396, 2488, 2763])\n", + "deriv_tensor (40, 81), indices tensor([2002, 75, 1813, ..., 1396, 2488, 2763])\n", + "deriv_tensor (40, 81), indices tensor([2810, 2248, 421, ..., 3050, 3180, 2390])\n", + "deriv_tensor (40, 81), indices tensor([2810, 2248, 421, ..., 3050, 3180, 2390])\n", + "deriv_tensor (40, 81), indices tensor([2810, 2248, 421, ..., 3050, 3180, 2390])\n", + "deriv_tensor (40, 81), indices tensor([2810, 2248, 421, ..., 3050, 3180, 2390])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1952, 1247, ..., 298, 2687, 211])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1952, 1247, ..., 298, 2687, 211])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1952, 1247, ..., 298, 2687, 211])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1952, 1247, ..., 298, 2687, 211])\n", + "deriv_tensor (40, 81), indices tensor([1675, 962, 2435, ..., 554, 2057, 417])\n", + "deriv_tensor (40, 81), indices tensor([1675, 962, 2435, ..., 554, 2057, 417])\n", + "deriv_tensor (40, 81), indices tensor([1675, 962, 2435, ..., 554, 2057, 417])\n", + "deriv_tensor (40, 81), indices tensor([1675, 962, 2435, ..., 554, 2057, 417])\n", + "deriv_tensor (40, 81), indices tensor([2545, 3158, 1815, ..., 1206, 1121, 2487])\n", + "deriv_tensor (40, 81), indices tensor([2545, 3158, 1815, ..., 1206, 1121, 2487])\n", + "deriv_tensor (40, 81), indices tensor([2545, 3158, 1815, ..., 1206, 1121, 2487])\n", + "deriv_tensor (40, 81), indices tensor([2545, 3158, 1815, ..., 1206, 1121, 2487])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1741, 155, ..., 3161, 3114, 1857])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1741, 155, ..., 3161, 3114, 1857])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1741, 155, ..., 3161, 3114, 1857])\n", + "deriv_tensor (40, 81), indices tensor([2071, 1741, 155, ..., 3161, 3114, 1857])\n", + "deriv_tensor (40, 81), indices tensor([2601, 3230, 1278, ..., 1771, 1962, 2542])\n", + "deriv_tensor (40, 81), indices tensor([2601, 3230, 1278, ..., 1771, 1962, 2542])\n", + "deriv_tensor (40, 81), indices tensor([2601, 3230, 1278, ..., 1771, 1962, 2542])\n", + "deriv_tensor (40, 81), indices tensor([2601, 3230, 1278, ..., 1771, 1962, 2542])\n", + "deriv_tensor (40, 81), indices tensor([1827, 1558, 66, ..., 2526, 3195, 254])\n", + "deriv_tensor (40, 81), indices tensor([1827, 1558, 66, ..., 2526, 3195, 254])\n", + "deriv_tensor (40, 81), indices tensor([1827, 1558, 66, ..., 2526, 3195, 254])\n", + "deriv_tensor (40, 81), indices tensor([1827, 1558, 66, ..., 2526, 3195, 254])\n", + "deriv_tensor (40, 81), indices tensor([ 938, 1442, 2282, ..., 2558, 629, 1930])\n", + "deriv_tensor (40, 81), indices tensor([ 938, 1442, 2282, ..., 2558, 629, 1930])\n", + "deriv_tensor (40, 81), indices tensor([ 938, 1442, 2282, ..., 2558, 629, 1930])\n", + "deriv_tensor (40, 81), indices tensor([ 938, 1442, 2282, ..., 2558, 629, 1930])\n", + "deriv_tensor (40, 81), indices tensor([2081, 3066, 1511, ..., 2793, 2228, 1883])\n", + "deriv_tensor (40, 81), indices tensor([2081, 3066, 1511, ..., 2793, 2228, 1883])\n", + "deriv_tensor (40, 81), indices tensor([2081, 3066, 1511, ..., 2793, 2228, 1883])\n", + "deriv_tensor (40, 81), indices tensor([2081, 3066, 1511, ..., 2793, 2228, 1883])\n", + "deriv_tensor (40, 81), indices tensor([1324, 2772, 2728, ..., 2962, 1926, 2742])\n", + "deriv_tensor (40, 81), indices tensor([1324, 2772, 2728, ..., 2962, 1926, 2742])\n", + "deriv_tensor (40, 81), indices tensor([1324, 2772, 2728, ..., 2962, 1926, 2742])\n", + "deriv_tensor (40, 81), indices tensor([1324, 2772, 2728, ..., 2962, 1926, 2742])\n", + "deriv_tensor (40, 81), indices tensor([ 969, 2925, 902, ..., 2815, 3075, 194])\n", + "deriv_tensor (40, 81), indices tensor([ 969, 2925, 902, ..., 2815, 3075, 194])\n", + "deriv_tensor (40, 81), indices tensor([ 969, 2925, 902, ..., 2815, 3075, 194])\n", + "deriv_tensor (40, 81), indices tensor([ 969, 2925, 902, ..., 2815, 3075, 194])\n", + "deriv_tensor (40, 81), indices tensor([1719, 1395, 96, ..., 2949, 1439, 342])\n", + "deriv_tensor (40, 81), indices tensor([1719, 1395, 96, ..., 2949, 1439, 342])\n", + "deriv_tensor (40, 81), indices tensor([1719, 1395, 96, ..., 2949, 1439, 342])\n", + "deriv_tensor (40, 81), indices tensor([1719, 1395, 96, ..., 2949, 1439, 342])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1291, 2665, ..., 274, 848, 3062])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1291, 2665, ..., 274, 848, 3062])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1291, 2665, ..., 274, 848, 3062])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1291, 2665, ..., 274, 848, 3062])\n", + "deriv_tensor (40, 81), indices tensor([1781, 1784, 2984, ..., 2541, 765, 487])\n", + "deriv_tensor (40, 81), indices tensor([1781, 1784, 2984, ..., 2541, 765, 487])\n", + "deriv_tensor (40, 81), indices tensor([1781, 1784, 2984, ..., 2541, 765, 487])\n", + "deriv_tensor (40, 81), indices tensor([1781, 1784, 2984, ..., 2541, 765, 487])\n", + "deriv_tensor (40, 81), indices tensor([ 798, 2364, 72, ..., 1423, 944, 2973])\n", + "deriv_tensor (40, 81), indices tensor([ 798, 2364, 72, ..., 1423, 944, 2973])\n", + "deriv_tensor (40, 81), indices tensor([ 798, 2364, 72, ..., 1423, 944, 2973])\n", + "deriv_tensor (40, 81), indices tensor([ 798, 2364, 72, ..., 1423, 944, 2973])\n", + "deriv_tensor (40, 81), indices tensor([1707, 1580, 2073, ..., 312, 1637, 576])\n", + "deriv_tensor (40, 81), indices tensor([1707, 1580, 2073, ..., 312, 1637, 576])\n", + "deriv_tensor (40, 81), indices tensor([1707, 1580, 2073, ..., 312, 1637, 576])\n", + "deriv_tensor (40, 81), indices tensor([1707, 1580, 2073, ..., 312, 1637, 576])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3225, 2056, ..., 2703, 2542, 1094])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3225, 2056, ..., 2703, 2542, 1094])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3225, 2056, ..., 2703, 2542, 1094])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3225, 2056, ..., 2703, 2542, 1094])\n", + "deriv_tensor (40, 81), indices tensor([2246, 108, 1985, ..., 1312, 1022, 2371])\n", + "deriv_tensor (40, 81), indices tensor([2246, 108, 1985, ..., 1312, 1022, 2371])\n", + "deriv_tensor (40, 81), indices tensor([2246, 108, 1985, ..., 1312, 1022, 2371])\n", + "deriv_tensor (40, 81), indices tensor([2246, 108, 1985, ..., 1312, 1022, 2371])\n", + "deriv_tensor (40, 81), indices tensor([2749, 3178, 1156, ..., 758, 2670, 2179])\n", + "deriv_tensor (40, 81), indices tensor([2749, 3178, 1156, ..., 758, 2670, 2179])\n", + "deriv_tensor (40, 81), indices tensor([2749, 3178, 1156, ..., 758, 2670, 2179])\n", + "deriv_tensor (40, 81), indices tensor([2749, 3178, 1156, ..., 758, 2670, 2179])\n", + "deriv_tensor (40, 81), indices tensor([2492, 3007, 1777, ..., 1103, 3221, 650])\n", + "deriv_tensor (40, 81), indices tensor([2492, 3007, 1777, ..., 1103, 3221, 650])\n", + "deriv_tensor (40, 81), indices tensor([2492, 3007, 1777, ..., 1103, 3221, 650])\n", + "deriv_tensor (40, 81), indices tensor([2492, 3007, 1777, ..., 1103, 3221, 650])\n", + "deriv_tensor (40, 81), indices tensor([3046, 37, 1090, ..., 3070, 2026, 1004])\n", + "deriv_tensor (40, 81), indices tensor([3046, 37, 1090, ..., 3070, 2026, 1004])\n", + "deriv_tensor (40, 81), indices tensor([3046, 37, 1090, ..., 3070, 2026, 1004])\n", + "deriv_tensor (40, 81), indices tensor([3046, 37, 1090, ..., 3070, 2026, 1004])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 3186, 2309, ..., 1993, 2367, 661])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 3186, 2309, ..., 1993, 2367, 661])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 3186, 2309, ..., 1993, 2367, 661])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 3186, 2309, ..., 1993, 2367, 661])\n", + "deriv_tensor (40, 81), indices tensor([1807, 2013, 3106, ..., 1160, 67, 1267])\n", + "deriv_tensor (40, 81), indices tensor([1807, 2013, 3106, ..., 1160, 67, 1267])\n", + "deriv_tensor (40, 81), indices tensor([1807, 2013, 3106, ..., 1160, 67, 1267])\n", + "deriv_tensor (40, 81), indices tensor([1807, 2013, 3106, ..., 1160, 67, 1267])\n", + "deriv_tensor (40, 81), indices tensor([1259, 636, 2775, ..., 2712, 2564, 2009])\n", + "deriv_tensor (40, 81), indices tensor([1259, 636, 2775, ..., 2712, 2564, 2009])\n", + "deriv_tensor (40, 81), indices tensor([1259, 636, 2775, ..., 2712, 2564, 2009])\n", + "deriv_tensor (40, 81), indices tensor([1259, 636, 2775, ..., 2712, 2564, 2009])\n", + "deriv_tensor (40, 81), indices tensor([ 409, 2043, 1879, ..., 253, 611, 94])\n", + "deriv_tensor (40, 81), indices tensor([ 409, 2043, 1879, ..., 253, 611, 94])\n", + "deriv_tensor (40, 81), indices tensor([ 409, 2043, 1879, ..., 253, 611, 94])\n", + "deriv_tensor (40, 81), indices tensor([ 409, 2043, 1879, ..., 253, 611, 94])\n", + "deriv_tensor (40, 81), indices tensor([1425, 1478, 595, ..., 1507, 139, 2997])\n", + "deriv_tensor (40, 81), indices tensor([1425, 1478, 595, ..., 1507, 139, 2997])\n", + "deriv_tensor (40, 81), indices tensor([1425, 1478, 595, ..., 1507, 139, 2997])\n", + "deriv_tensor (40, 81), indices tensor([1425, 1478, 595, ..., 1507, 139, 2997])\n", + "deriv_tensor (40, 81), indices tensor([2838, 1571, 2059, ..., 2750, 2826, 454])\n", + "deriv_tensor (40, 81), indices tensor([2838, 1571, 2059, ..., 2750, 2826, 454])\n", + "deriv_tensor (40, 81), indices tensor([2838, 1571, 2059, ..., 2750, 2826, 454])\n", + "deriv_tensor (40, 81), indices tensor([2838, 1571, 2059, ..., 2750, 2826, 454])\n", + "deriv_tensor (40, 81), indices tensor([2357, 3221, 1122, ..., 1714, 597, 1066])\n", + "deriv_tensor (40, 81), indices tensor([2357, 3221, 1122, ..., 1714, 597, 1066])\n", + "deriv_tensor (40, 81), indices tensor([2357, 3221, 1122, ..., 1714, 597, 1066])\n", + "deriv_tensor (40, 81), indices tensor([2357, 3221, 1122, ..., 1714, 597, 1066])\n", + "deriv_tensor (40, 81), indices tensor([2844, 1708, 445, ..., 670, 2684, 2766])\n", + "deriv_tensor (40, 81), indices tensor([2844, 1708, 445, ..., 670, 2684, 2766])\n", + "deriv_tensor (40, 81), indices tensor([2844, 1708, 445, ..., 670, 2684, 2766])\n", + "deriv_tensor (40, 81), indices tensor([2844, 1708, 445, ..., 670, 2684, 2766])\n", + "deriv_tensor (40, 81), indices tensor([1951, 3200, 1010, ..., 2987, 1341, 1453])\n", + "deriv_tensor (40, 81), indices tensor([1951, 3200, 1010, ..., 2987, 1341, 1453])\n", + "deriv_tensor (40, 81), indices tensor([1951, 3200, 1010, ..., 2987, 1341, 1453])\n", + "deriv_tensor (40, 81), indices tensor([1951, 3200, 1010, ..., 2987, 1341, 1453])\n", + "deriv_tensor (40, 81), indices tensor([2747, 137, 1282, ..., 239, 768, 42])\n", + "deriv_tensor (40, 81), indices tensor([2747, 137, 1282, ..., 239, 768, 42])\n", + "deriv_tensor (40, 81), indices tensor([2747, 137, 1282, ..., 239, 768, 42])\n", + "deriv_tensor (40, 81), indices tensor([2747, 137, 1282, ..., 239, 768, 42])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1433, 1302, ..., 529, 2932, 541])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1433, 1302, ..., 529, 2932, 541])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1433, 1302, ..., 529, 2932, 541])\n", + "deriv_tensor (40, 81), indices tensor([1208, 1433, 1302, ..., 529, 2932, 541])\n", + "deriv_tensor (40, 81), indices tensor([1902, 1038, 861, ..., 1998, 2351, 2094])\n", + "deriv_tensor (40, 81), indices tensor([1902, 1038, 861, ..., 1998, 2351, 2094])\n", + "deriv_tensor (40, 81), indices tensor([1902, 1038, 861, ..., 1998, 2351, 2094])\n", + "deriv_tensor (40, 81), indices tensor([1902, 1038, 861, ..., 1998, 2351, 2094])\n", + "deriv_tensor (40, 81), indices tensor([1089, 3139, 2285, ..., 350, 1248, 143])\n", + "deriv_tensor (40, 81), indices tensor([1089, 3139, 2285, ..., 350, 1248, 143])\n", + "deriv_tensor (40, 81), indices tensor([1089, 3139, 2285, ..., 350, 1248, 143])\n", + "deriv_tensor (40, 81), indices tensor([1089, 3139, 2285, ..., 350, 1248, 143])\n", + "deriv_tensor (40, 81), indices tensor([2348, 2522, 2310, ..., 681, 1063, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2348, 2522, 2310, ..., 681, 1063, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2348, 2522, 2310, ..., 681, 1063, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2348, 2522, 2310, ..., 681, 1063, 2966])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1555, 116, ..., 785, 829, 1634])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1555, 116, ..., 785, 829, 1634])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1555, 116, ..., 785, 829, 1634])\n", + "deriv_tensor (40, 81), indices tensor([ 469, 1555, 116, ..., 785, 829, 1634])\n", + "deriv_tensor (40, 81), indices tensor([1928, 1087, 1312, ..., 1615, 1463, 812])\n", + "deriv_tensor (40, 81), indices tensor([1928, 1087, 1312, ..., 1615, 1463, 812])\n", + "deriv_tensor (40, 81), indices tensor([1928, 1087, 1312, ..., 1615, 1463, 812])\n", + "deriv_tensor (40, 81), indices tensor([1928, 1087, 1312, ..., 1615, 1463, 812])\n", + "deriv_tensor (40, 81), indices tensor([2832, 2811, 3065, ..., 2084, 2425, 869])\n", + "deriv_tensor (40, 81), indices tensor([2832, 2811, 3065, ..., 2084, 2425, 869])\n", + "deriv_tensor (40, 81), indices tensor([2832, 2811, 3065, ..., 2084, 2425, 869])\n", + "deriv_tensor (40, 81), indices tensor([2832, 2811, 3065, ..., 2084, 2425, 869])\n", + "deriv_tensor (40, 81), indices tensor([1481, 22, 605, ..., 2796, 2565, 922])\n", + "deriv_tensor (40, 81), indices tensor([1481, 22, 605, ..., 2796, 2565, 922])\n", + "deriv_tensor (40, 81), indices tensor([1481, 22, 605, ..., 2796, 2565, 922])\n", + "deriv_tensor (40, 81), indices tensor([1481, 22, 605, ..., 2796, 2565, 922])\n", + "deriv_tensor (40, 81), indices tensor([2016, 859, 1415, ..., 1611, 2940, 1658])\n", + "deriv_tensor (40, 81), indices tensor([2016, 859, 1415, ..., 1611, 2940, 1658])\n", + "deriv_tensor (40, 81), indices tensor([2016, 859, 1415, ..., 1611, 2940, 1658])\n", + "deriv_tensor (40, 81), indices tensor([2016, 859, 1415, ..., 1611, 2940, 1658])\n", + "deriv_tensor (40, 81), indices tensor([1183, 1729, 2062, ..., 2589, 2757, 948])\n", + "deriv_tensor (40, 81), indices tensor([1183, 1729, 2062, ..., 2589, 2757, 948])\n", + "deriv_tensor (40, 81), indices tensor([1183, 1729, 2062, ..., 2589, 2757, 948])\n", + "deriv_tensor (40, 81), indices tensor([1183, 1729, 2062, ..., 2589, 2757, 948])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 434, 2382, ..., 3085, 914, 2388])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 434, 2382, ..., 3085, 914, 2388])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 434, 2382, ..., 3085, 914, 2388])\n", + "deriv_tensor (40, 81), indices tensor([ 148, 434, 2382, ..., 3085, 914, 2388])\n", + "deriv_tensor (40, 81), indices tensor([1515, 287, 2326, ..., 2111, 1170, 2931])\n", + "deriv_tensor (40, 81), indices tensor([1515, 287, 2326, ..., 2111, 1170, 2931])\n", + "deriv_tensor (40, 81), indices tensor([1515, 287, 2326, ..., 2111, 1170, 2931])\n", + "deriv_tensor (40, 81), indices tensor([1515, 287, 2326, ..., 2111, 1170, 2931])\n", + "deriv_tensor (40, 81), indices tensor([ 51, 1312, 2685, ..., 1721, 2357, 1930])\n", + "deriv_tensor (40, 81), indices tensor([ 51, 1312, 2685, ..., 1721, 2357, 1930])\n", + "deriv_tensor (40, 81), indices tensor([ 51, 1312, 2685, ..., 1721, 2357, 1930])\n", + "deriv_tensor (40, 81), indices tensor([ 51, 1312, 2685, ..., 1721, 2357, 1930])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1324, 2200, ..., 2780, 1404, 1216])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1324, 2200, ..., 2780, 1404, 1216])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1324, 2200, ..., 2780, 1404, 1216])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1324, 2200, ..., 2780, 1404, 1216])\n", + "deriv_tensor (40, 81), indices tensor([2162, 1343, 1738, ..., 802, 887, 1220])\n", + "deriv_tensor (40, 81), indices tensor([2162, 1343, 1738, ..., 802, 887, 1220])\n", + "deriv_tensor (40, 81), indices tensor([2162, 1343, 1738, ..., 802, 887, 1220])\n", + "deriv_tensor (40, 81), indices tensor([2162, 1343, 1738, ..., 802, 887, 1220])\n", + "deriv_tensor (40, 81), indices tensor([3167, 2135, 2184, ..., 1717, 1735, 2556])\n", + "deriv_tensor (40, 81), indices tensor([3167, 2135, 2184, ..., 1717, 1735, 2556])\n", + "deriv_tensor (40, 81), indices tensor([3167, 2135, 2184, ..., 1717, 1735, 2556])\n", + "deriv_tensor (40, 81), indices tensor([3167, 2135, 2184, ..., 1717, 1735, 2556])\n", + "deriv_tensor (40, 81), indices tensor([2519, 1837, 308, ..., 1591, 2322, 113])\n", + "deriv_tensor (40, 81), indices tensor([2519, 1837, 308, ..., 1591, 2322, 113])\n", + "deriv_tensor (40, 81), indices tensor([2519, 1837, 308, ..., 1591, 2322, 113])\n", + "deriv_tensor (40, 81), indices tensor([2519, 1837, 308, ..., 1591, 2322, 113])\n", + "deriv_tensor (40, 81), indices tensor([3207, 3233, 1060, ..., 92, 1248, 1420])\n", + "deriv_tensor (40, 81), indices tensor([3207, 3233, 1060, ..., 92, 1248, 1420])\n", + "deriv_tensor (40, 81), indices tensor([3207, 3233, 1060, ..., 92, 1248, 1420])\n", + "deriv_tensor (40, 81), indices tensor([3207, 3233, 1060, ..., 92, 1248, 1420])\n", + "deriv_tensor (40, 81), indices tensor([2818, 775, 1414, ..., 2888, 351, 36])\n", + "deriv_tensor (40, 81), indices tensor([2818, 775, 1414, ..., 2888, 351, 36])\n", + "deriv_tensor (40, 81), indices tensor([2818, 775, 1414, ..., 2888, 351, 36])\n", + "deriv_tensor (40, 81), indices tensor([2818, 775, 1414, ..., 2888, 351, 36])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2694, 1938, ..., 2193, 174, 2996])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2694, 1938, ..., 2193, 174, 2996])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2694, 1938, ..., 2193, 174, 2996])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2694, 1938, ..., 2193, 174, 2996])\n", + "deriv_tensor (40, 81), indices tensor([ 652, 1613, 1737, ..., 985, 625, 1361])\n", + "deriv_tensor (40, 81), indices tensor([ 652, 1613, 1737, ..., 985, 625, 1361])\n", + "deriv_tensor (40, 81), indices tensor([ 652, 1613, 1737, ..., 985, 625, 1361])\n", + "deriv_tensor (40, 81), indices tensor([ 652, 1613, 1737, ..., 985, 625, 1361])\n", + "deriv_tensor (40, 81), indices tensor([3061, 2902, 1083, ..., 2725, 811, 1109])\n", + "deriv_tensor (40, 81), indices tensor([3061, 2902, 1083, ..., 2725, 811, 1109])\n", + "deriv_tensor (40, 81), indices tensor([3061, 2902, 1083, ..., 2725, 811, 1109])\n", + "deriv_tensor (40, 81), indices tensor([3061, 2902, 1083, ..., 2725, 811, 1109])\n", + "deriv_tensor (40, 81), indices tensor([ 481, 2651, 2713, ..., 1452, 2519, 579])\n", + "deriv_tensor (40, 81), indices tensor([ 481, 2651, 2713, ..., 1452, 2519, 579])\n", + "deriv_tensor (40, 81), indices tensor([ 481, 2651, 2713, ..., 1452, 2519, 579])\n", + "deriv_tensor (40, 81), indices tensor([ 481, 2651, 2713, ..., 1452, 2519, 579])\n", + "deriv_tensor (40, 81), indices tensor([ 692, 1887, 1527, ..., 2774, 1022, 3118])\n", + "deriv_tensor (40, 81), indices tensor([ 692, 1887, 1527, ..., 2774, 1022, 3118])\n", + "deriv_tensor (40, 81), indices tensor([ 692, 1887, 1527, ..., 2774, 1022, 3118])\n", + "deriv_tensor (40, 81), indices tensor([ 692, 1887, 1527, ..., 2774, 1022, 3118])\n", + "deriv_tensor (40, 81), indices tensor([1769, 1415, 2073, ..., 2489, 1711, 2673])\n", + "deriv_tensor (40, 81), indices tensor([1769, 1415, 2073, ..., 2489, 1711, 2673])\n", + "deriv_tensor (40, 81), indices tensor([1769, 1415, 2073, ..., 2489, 1711, 2673])\n", + "deriv_tensor (40, 81), indices tensor([1769, 1415, 2073, ..., 2489, 1711, 2673])\n", + "deriv_tensor (40, 81), indices tensor([1064, 2326, 1666, ..., 2415, 1196, 604])\n", + "deriv_tensor (40, 81), indices tensor([1064, 2326, 1666, ..., 2415, 1196, 604])\n", + "deriv_tensor (40, 81), indices tensor([1064, 2326, 1666, ..., 2415, 1196, 604])\n", + "deriv_tensor (40, 81), indices tensor([1064, 2326, 1666, ..., 2415, 1196, 604])\n", + "deriv_tensor (40, 81), indices tensor([1041, 2119, 2436, ..., 783, 1758, 139])\n", + "deriv_tensor (40, 81), indices tensor([1041, 2119, 2436, ..., 783, 1758, 139])\n", + "deriv_tensor (40, 81), indices tensor([1041, 2119, 2436, ..., 783, 1758, 139])\n", + "deriv_tensor (40, 81), indices tensor([1041, 2119, 2436, ..., 783, 1758, 139])\n", + "deriv_tensor (40, 81), indices tensor([1443, 2549, 1146, ..., 529, 829, 1754])\n", + "deriv_tensor (40, 81), indices tensor([1443, 2549, 1146, ..., 529, 829, 1754])\n", + "deriv_tensor (40, 81), indices tensor([1443, 2549, 1146, ..., 529, 829, 1754])\n", + "deriv_tensor (40, 81), indices tensor([1443, 2549, 1146, ..., 529, 829, 1754])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 1630, 1353, ..., 2576, 3046, 1075])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 1630, 1353, ..., 2576, 3046, 1075])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 1630, 1353, ..., 2576, 3046, 1075])\n", + "deriv_tensor (40, 81), indices tensor([ 764, 1630, 1353, ..., 2576, 3046, 1075])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2816, 1505, ..., 547, 744, 2271])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2816, 1505, ..., 547, 744, 2271])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2816, 1505, ..., 547, 744, 2271])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2816, 1505, ..., 547, 744, 2271])\n", + "deriv_tensor (40, 81), indices tensor([2425, 3141, 2955, ..., 1434, 2827, 3101])\n", + "deriv_tensor (40, 81), indices tensor([2425, 3141, 2955, ..., 1434, 2827, 3101])\n", + "deriv_tensor (40, 81), indices tensor([2425, 3141, 2955, ..., 1434, 2827, 3101])\n", + "deriv_tensor (40, 81), indices tensor([2425, 3141, 2955, ..., 1434, 2827, 3101])\n", + "deriv_tensor (40, 81), indices tensor([ 282, 2613, 2439, ..., 155, 544, 1465])\n", + "deriv_tensor (40, 81), indices tensor([ 282, 2613, 2439, ..., 155, 544, 1465])\n", + "deriv_tensor (40, 81), indices tensor([ 282, 2613, 2439, ..., 155, 544, 1465])\n", + "deriv_tensor (40, 81), indices tensor([ 282, 2613, 2439, ..., 155, 544, 1465])\n", + "deriv_tensor (40, 81), indices tensor([2030, 912, 2926, ..., 2700, 2831, 1700])\n", + "deriv_tensor (40, 81), indices tensor([2030, 912, 2926, ..., 2700, 2831, 1700])\n", + "deriv_tensor (40, 81), indices tensor([2030, 912, 2926, ..., 2700, 2831, 1700])\n", + "deriv_tensor (40, 81), indices tensor([2030, 912, 2926, ..., 2700, 2831, 1700])\n", + "deriv_tensor (40, 81), indices tensor([ 590, 2555, 1858, ..., 3168, 2750, 1227])\n", + "deriv_tensor (40, 81), indices tensor([ 590, 2555, 1858, ..., 3168, 2750, 1227])\n", + "deriv_tensor (40, 81), indices tensor([ 590, 2555, 1858, ..., 3168, 2750, 1227])\n", + "deriv_tensor (40, 81), indices tensor([ 590, 2555, 1858, ..., 3168, 2750, 1227])\n", + "deriv_tensor (40, 81), indices tensor([ 413, 163, 1749, ..., 2089, 2961, 1943])\n", + "deriv_tensor (40, 81), indices tensor([ 413, 163, 1749, ..., 2089, 2961, 1943])\n", + "deriv_tensor (40, 81), indices tensor([ 413, 163, 1749, ..., 2089, 2961, 1943])\n", + "deriv_tensor (40, 81), indices tensor([ 413, 163, 1749, ..., 2089, 2961, 1943])\n", + "deriv_tensor (40, 81), indices tensor([2486, 72, 2420, ..., 691, 2156, 2087])\n", + "deriv_tensor (40, 81), indices tensor([2486, 72, 2420, ..., 691, 2156, 2087])\n", + "deriv_tensor (40, 81), indices tensor([2486, 72, 2420, ..., 691, 2156, 2087])\n", + "deriv_tensor (40, 81), indices tensor([2486, 72, 2420, ..., 691, 2156, 2087])\n", + "deriv_tensor (40, 81), indices tensor([1490, 857, 2527, ..., 2366, 2304, 743])\n", + "deriv_tensor (40, 81), indices tensor([1490, 857, 2527, ..., 2366, 2304, 743])\n", + "deriv_tensor (40, 81), indices tensor([1490, 857, 2527, ..., 2366, 2304, 743])\n", + "deriv_tensor (40, 81), indices tensor([1490, 857, 2527, ..., 2366, 2304, 743])\n", + "deriv_tensor (40, 81), indices tensor([1257, 2427, 49, ..., 1620, 563, 3141])\n", + "deriv_tensor (40, 81), indices tensor([1257, 2427, 49, ..., 1620, 563, 3141])\n", + "deriv_tensor (40, 81), indices tensor([1257, 2427, 49, ..., 1620, 563, 3141])\n", + "deriv_tensor (40, 81), indices tensor([1257, 2427, 49, ..., 1620, 563, 3141])\n", + "deriv_tensor (40, 81), indices tensor([2625, 2982, 752, ..., 316, 2156, 1645])\n", + "deriv_tensor (40, 81), indices tensor([2625, 2982, 752, ..., 316, 2156, 1645])\n", + "deriv_tensor (40, 81), indices tensor([2625, 2982, 752, ..., 316, 2156, 1645])\n", + "deriv_tensor (40, 81), indices tensor([2625, 2982, 752, ..., 316, 2156, 1645])\n", + "deriv_tensor (40, 81), indices tensor([1312, 2270, 2603, ..., 2110, 1763, 2903])\n", + "deriv_tensor (40, 81), indices tensor([1312, 2270, 2603, ..., 2110, 1763, 2903])\n", + "deriv_tensor (40, 81), indices tensor([1312, 2270, 2603, ..., 2110, 1763, 2903])\n", + "deriv_tensor (40, 81), indices tensor([1312, 2270, 2603, ..., 2110, 1763, 2903])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 541, 766, ..., 823, 2250, 337])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 541, 766, ..., 823, 2250, 337])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 541, 766, ..., 823, 2250, 337])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 541, 766, ..., 823, 2250, 337])\n", + "deriv_tensor (40, 81), indices tensor([2830, 3226, 360, ..., 410, 754, 304])\n", + "deriv_tensor (40, 81), indices tensor([2830, 3226, 360, ..., 410, 754, 304])\n", + "deriv_tensor (40, 81), indices tensor([2830, 3226, 360, ..., 410, 754, 304])\n", + "deriv_tensor (40, 81), indices tensor([2830, 3226, 360, ..., 410, 754, 304])\n", + "deriv_tensor (40, 81), indices tensor([1577, 1966, 258, ..., 809, 3214, 173])\n", + "deriv_tensor (40, 81), indices tensor([1577, 1966, 258, ..., 809, 3214, 173])\n", + "deriv_tensor (40, 81), indices tensor([1577, 1966, 258, ..., 809, 3214, 173])\n", + "deriv_tensor (40, 81), indices tensor([1577, 1966, 258, ..., 809, 3214, 173])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1404, 287, ..., 1430, 683, 3124])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1404, 287, ..., 1430, 683, 3124])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1404, 287, ..., 1430, 683, 3124])\n", + "deriv_tensor (40, 81), indices tensor([2388, 1404, 287, ..., 1430, 683, 3124])\n", + "deriv_tensor (40, 81), indices tensor([1725, 2674, 553, ..., 932, 648, 1576])\n", + "deriv_tensor (40, 81), indices tensor([1725, 2674, 553, ..., 932, 648, 1576])\n", + "deriv_tensor (40, 81), indices tensor([1725, 2674, 553, ..., 932, 648, 1576])\n", + "deriv_tensor (40, 81), indices tensor([1725, 2674, 553, ..., 932, 648, 1576])\n", + "deriv_tensor (40, 81), indices tensor([ 85, 1963, 925, ..., 9, 910, 324])\n", + "deriv_tensor (40, 81), indices tensor([ 85, 1963, 925, ..., 9, 910, 324])\n", + "deriv_tensor (40, 81), indices tensor([ 85, 1963, 925, ..., 9, 910, 324])\n", + "deriv_tensor (40, 81), indices tensor([ 85, 1963, 925, ..., 9, 910, 324])\n", + "deriv_tensor (40, 81), indices tensor([2139, 445, 761, ..., 408, 3084, 365])\n", + "deriv_tensor (40, 81), indices tensor([2139, 445, 761, ..., 408, 3084, 365])\n", + "deriv_tensor (40, 81), indices tensor([2139, 445, 761, ..., 408, 3084, 365])\n", + "deriv_tensor (40, 81), indices tensor([2139, 445, 761, ..., 408, 3084, 365])\n", + "deriv_tensor (40, 81), indices tensor([2784, 1713, 3094, ..., 329, 3075, 335])\n", + "deriv_tensor (40, 81), indices tensor([2784, 1713, 3094, ..., 329, 3075, 335])\n", + "deriv_tensor (40, 81), indices tensor([2784, 1713, 3094, ..., 329, 3075, 335])\n", + "deriv_tensor (40, 81), indices tensor([2784, 1713, 3094, ..., 329, 3075, 335])\n", + "deriv_tensor (40, 81), indices tensor([1992, 2339, 2557, ..., 1834, 3165, 600])\n", + "deriv_tensor (40, 81), indices tensor([1992, 2339, 2557, ..., 1834, 3165, 600])\n", + "deriv_tensor (40, 81), indices tensor([1992, 2339, 2557, ..., 1834, 3165, 600])\n", + "deriv_tensor (40, 81), indices tensor([1992, 2339, 2557, ..., 1834, 3165, 600])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2263, 890, ..., 1659, 1716, 1593])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2263, 890, ..., 1659, 1716, 1593])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2263, 890, ..., 1659, 1716, 1593])\n", + "deriv_tensor (40, 81), indices tensor([2855, 2263, 890, ..., 1659, 1716, 1593])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2117, 2349, ..., 2726, 1369, 634])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2117, 2349, ..., 2726, 1369, 634])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2117, 2349, ..., 2726, 1369, 634])\n", + "deriv_tensor (40, 81), indices tensor([2268, 2117, 2349, ..., 2726, 1369, 634])\n", + "deriv_tensor (40, 81), indices tensor([ 746, 1952, 1436, ..., 2795, 2627, 2959])\n", + "deriv_tensor (40, 81), indices tensor([ 746, 1952, 1436, ..., 2795, 2627, 2959])\n", + "deriv_tensor (40, 81), indices tensor([ 746, 1952, 1436, ..., 2795, 2627, 2959])\n", + "deriv_tensor (40, 81), indices tensor([ 746, 1952, 1436, ..., 2795, 2627, 2959])\n", + "deriv_tensor (40, 81), indices tensor([2030, 787, 635, ..., 3232, 1071, 2803])\n", + "deriv_tensor (40, 81), indices tensor([2030, 787, 635, ..., 3232, 1071, 2803])\n", + "deriv_tensor (40, 81), indices tensor([2030, 787, 635, ..., 3232, 1071, 2803])\n", + "deriv_tensor (40, 81), indices tensor([2030, 787, 635, ..., 3232, 1071, 2803])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2194, 2691, ..., 1604, 2876, 1344])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2194, 2691, ..., 1604, 2876, 1344])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2194, 2691, ..., 1604, 2876, 1344])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 2194, 2691, ..., 1604, 2876, 1344])\n", + "deriv_tensor (40, 81), indices tensor([1938, 251, 1594, ..., 3020, 1422, 168])\n", + "deriv_tensor (40, 81), indices tensor([1938, 251, 1594, ..., 3020, 1422, 168])\n", + "deriv_tensor (40, 81), indices tensor([1938, 251, 1594, ..., 3020, 1422, 168])\n", + "deriv_tensor (40, 81), indices tensor([1938, 251, 1594, ..., 3020, 1422, 168])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 1134, 861, ..., 945, 1443, 1165])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 1134, 861, ..., 945, 1443, 1165])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 1134, 861, ..., 945, 1443, 1165])\n", + "deriv_tensor (40, 81), indices tensor([ 606, 1134, 861, ..., 945, 1443, 1165])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2839, 1292, ..., 1217, 2025, 2520])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2839, 1292, ..., 1217, 2025, 2520])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2839, 1292, ..., 1217, 2025, 2520])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2839, 1292, ..., 1217, 2025, 2520])\n", + "deriv_tensor (40, 81), indices tensor([2763, 1466, 586, ..., 717, 2166, 1611])\n", + "deriv_tensor (40, 81), indices tensor([2763, 1466, 586, ..., 717, 2166, 1611])\n", + "deriv_tensor (40, 81), indices tensor([2763, 1466, 586, ..., 717, 2166, 1611])\n", + "deriv_tensor (40, 81), indices tensor([2763, 1466, 586, ..., 717, 2166, 1611])\n", + "deriv_tensor (40, 81), indices tensor([2244, 2966, 2368, ..., 1727, 1437, 2163])\n", + "deriv_tensor (40, 81), indices tensor([2244, 2966, 2368, ..., 1727, 1437, 2163])\n", + "deriv_tensor (40, 81), indices tensor([2244, 2966, 2368, ..., 1727, 1437, 2163])\n", + "deriv_tensor (40, 81), indices tensor([2244, 2966, 2368, ..., 1727, 1437, 2163])\n", + "deriv_tensor (40, 81), indices tensor([2648, 2938, 810, ..., 2497, 2543, 2375])\n", + "deriv_tensor (40, 81), indices tensor([2648, 2938, 810, ..., 2497, 2543, 2375])\n", + "deriv_tensor (40, 81), indices tensor([2648, 2938, 810, ..., 2497, 2543, 2375])\n", + "deriv_tensor (40, 81), indices tensor([2648, 2938, 810, ..., 2497, 2543, 2375])\n", + "deriv_tensor (40, 81), indices tensor([2855, 1392, 2113, ..., 1265, 1582, 836])\n", + "deriv_tensor (40, 81), indices tensor([2855, 1392, 2113, ..., 1265, 1582, 836])\n", + "deriv_tensor (40, 81), indices tensor([2855, 1392, 2113, ..., 1265, 1582, 836])\n", + "deriv_tensor (40, 81), indices tensor([2855, 1392, 2113, ..., 1265, 1582, 836])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 1607, 3211, ..., 1389, 1532, 996])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 1607, 3211, ..., 1389, 1532, 996])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 1607, 3211, ..., 1389, 1532, 996])\n", + "deriv_tensor (40, 81), indices tensor([ 501, 1607, 3211, ..., 1389, 1532, 996])\n", + "deriv_tensor (40, 81), indices tensor([ 266, 552, 2797, ..., 2829, 1923, 2506])\n", + "deriv_tensor (40, 81), indices tensor([ 266, 552, 2797, ..., 2829, 1923, 2506])\n", + "deriv_tensor (40, 81), indices tensor([ 266, 552, 2797, ..., 2829, 1923, 2506])\n", + "deriv_tensor (40, 81), indices tensor([ 266, 552, 2797, ..., 2829, 1923, 2506])\n", + "deriv_tensor (40, 81), indices tensor([1203, 2050, 430, ..., 1321, 616, 3025])\n", + "deriv_tensor (40, 81), indices tensor([1203, 2050, 430, ..., 1321, 616, 3025])\n", + "deriv_tensor (40, 81), indices tensor([1203, 2050, 430, ..., 1321, 616, 3025])\n", + "deriv_tensor (40, 81), indices tensor([1203, 2050, 430, ..., 1321, 616, 3025])\n", + "deriv_tensor (40, 81), indices tensor([2598, 1153, 2958, ..., 274, 1385, 3109])\n", + "deriv_tensor (40, 81), indices tensor([2598, 1153, 2958, ..., 274, 1385, 3109])\n", + "deriv_tensor (40, 81), indices tensor([2598, 1153, 2958, ..., 274, 1385, 3109])\n", + "deriv_tensor (40, 81), indices tensor([2598, 1153, 2958, ..., 274, 1385, 3109])\n", + "deriv_tensor (40, 81), indices tensor([2896, 187, 2273, ..., 1311, 3018, 1173])\n", + "deriv_tensor (40, 81), indices tensor([2896, 187, 2273, ..., 1311, 3018, 1173])\n", + "deriv_tensor (40, 81), indices tensor([2896, 187, 2273, ..., 1311, 3018, 1173])\n", + "deriv_tensor (40, 81), indices tensor([2896, 187, 2273, ..., 1311, 3018, 1173])\n", + "deriv_tensor (40, 81), indices tensor([1127, 147, 2700, ..., 2042, 952, 1366])\n", + "deriv_tensor (40, 81), indices tensor([1127, 147, 2700, ..., 2042, 952, 1366])\n", + "deriv_tensor (40, 81), indices tensor([1127, 147, 2700, ..., 2042, 952, 1366])\n", + "deriv_tensor (40, 81), indices tensor([1127, 147, 2700, ..., 2042, 952, 1366])\n", + "deriv_tensor (40, 81), indices tensor([ 57, 795, 2512, ..., 259, 3033, 1210])\n", + "deriv_tensor (40, 81), indices tensor([ 57, 795, 2512, ..., 259, 3033, 1210])\n", + "deriv_tensor (40, 81), indices tensor([ 57, 795, 2512, ..., 259, 3033, 1210])\n", + "deriv_tensor (40, 81), indices tensor([ 57, 795, 2512, ..., 259, 3033, 1210])\n", + "deriv_tensor (40, 81), indices tensor([ 930, 30, 1960, ..., 2887, 2916, 830])\n", + "deriv_tensor (40, 81), indices tensor([ 930, 30, 1960, ..., 2887, 2916, 830])\n", + "deriv_tensor (40, 81), indices tensor([ 930, 30, 1960, ..., 2887, 2916, 830])\n", + "deriv_tensor (40, 81), indices tensor([ 930, 30, 1960, ..., 2887, 2916, 830])\n", + "deriv_tensor (40, 81), indices tensor([ 751, 1848, 508, ..., 2478, 406, 1934])\n", + "deriv_tensor (40, 81), indices tensor([ 751, 1848, 508, ..., 2478, 406, 1934])\n", + "deriv_tensor (40, 81), indices tensor([ 751, 1848, 508, ..., 2478, 406, 1934])\n", + "deriv_tensor (40, 81), indices tensor([ 751, 1848, 508, ..., 2478, 406, 1934])\n", + "deriv_tensor (40, 81), indices tensor([1665, 2838, 3091, ..., 2964, 2406, 1620])\n", + "deriv_tensor (40, 81), indices tensor([1665, 2838, 3091, ..., 2964, 2406, 1620])\n", + "deriv_tensor (40, 81), indices tensor([1665, 2838, 3091, ..., 2964, 2406, 1620])\n", + "deriv_tensor (40, 81), indices tensor([1665, 2838, 3091, ..., 2964, 2406, 1620])\n", + "deriv_tensor (40, 81), indices tensor([2413, 548, 2685, ..., 1953, 317, 1849])\n", + "deriv_tensor (40, 81), indices tensor([2413, 548, 2685, ..., 1953, 317, 1849])\n", + "deriv_tensor (40, 81), indices tensor([2413, 548, 2685, ..., 1953, 317, 1849])\n", + "deriv_tensor (40, 81), indices tensor([2413, 548, 2685, ..., 1953, 317, 1849])\n", + "deriv_tensor (40, 81), indices tensor([1259, 2908, 1842, ..., 1506, 297, 530])\n", + "deriv_tensor (40, 81), indices tensor([1259, 2908, 1842, ..., 1506, 297, 530])\n", + "deriv_tensor (40, 81), indices tensor([1259, 2908, 1842, ..., 1506, 297, 530])\n", + "deriv_tensor (40, 81), indices tensor([1259, 2908, 1842, ..., 1506, 297, 530])\n", + "deriv_tensor (40, 81), indices tensor([2518, 294, 1104, ..., 3100, 3130, 1058])\n", + "deriv_tensor (40, 81), indices tensor([2518, 294, 1104, ..., 3100, 3130, 1058])\n", + "deriv_tensor (40, 81), indices tensor([2518, 294, 1104, ..., 3100, 3130, 1058])\n", + "deriv_tensor (40, 81), indices tensor([2518, 294, 1104, ..., 3100, 3130, 1058])\n", + "deriv_tensor (40, 81), indices tensor([2547, 3203, 2161, ..., 1499, 1715, 409])\n", + "deriv_tensor (40, 81), indices tensor([2547, 3203, 2161, ..., 1499, 1715, 409])\n", + "deriv_tensor (40, 81), indices tensor([2547, 3203, 2161, ..., 1499, 1715, 409])\n", + "deriv_tensor (40, 81), indices tensor([2547, 3203, 2161, ..., 1499, 1715, 409])\n", + "deriv_tensor (40, 81), indices tensor([2491, 976, 3200, ..., 2650, 2690, 1430])\n", + "deriv_tensor (40, 81), indices tensor([2491, 976, 3200, ..., 2650, 2690, 1430])\n", + "deriv_tensor (40, 81), indices tensor([2491, 976, 3200, ..., 2650, 2690, 1430])\n", + "deriv_tensor (40, 81), indices tensor([2491, 976, 3200, ..., 2650, 2690, 1430])\n", + "deriv_tensor (40, 81), indices tensor([2428, 2934, 2660, ..., 909, 2820, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2428, 2934, 2660, ..., 909, 2820, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2428, 2934, 2660, ..., 909, 2820, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2428, 2934, 2660, ..., 909, 2820, 3148])\n", + "deriv_tensor (40, 81), indices tensor([3181, 691, 2012, ..., 2776, 2950, 871])\n", + "deriv_tensor (40, 81), indices tensor([3181, 691, 2012, ..., 2776, 2950, 871])\n", + "deriv_tensor (40, 81), indices tensor([3181, 691, 2012, ..., 2776, 2950, 871])\n", + "deriv_tensor (40, 81), indices tensor([3181, 691, 2012, ..., 2776, 2950, 871])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2051, 1638, ..., 2460, 2852, 2217])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2051, 1638, ..., 2460, 2852, 2217])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2051, 1638, ..., 2460, 2852, 2217])\n", + "deriv_tensor (40, 81), indices tensor([ 741, 2051, 1638, ..., 2460, 2852, 2217])\n", + "deriv_tensor (40, 81), indices tensor([2724, 1938, 845, ..., 2989, 622, 1402])\n", + "deriv_tensor (40, 81), indices tensor([2724, 1938, 845, ..., 2989, 622, 1402])\n", + "deriv_tensor (40, 81), indices tensor([2724, 1938, 845, ..., 2989, 622, 1402])\n", + "deriv_tensor (40, 81), indices tensor([2724, 1938, 845, ..., 2989, 622, 1402])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2066, 516, ..., 1493, 720, 2596])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2066, 516, ..., 1493, 720, 2596])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2066, 516, ..., 1493, 720, 2596])\n", + "deriv_tensor (40, 81), indices tensor([2038, 2066, 516, ..., 1493, 720, 2596])\n", + "deriv_tensor (40, 81), indices tensor([2111, 1303, 1770, ..., 2606, 2122, 2980])\n", + "deriv_tensor (40, 81), indices tensor([2111, 1303, 1770, ..., 2606, 2122, 2980])\n", + "deriv_tensor (40, 81), indices tensor([2111, 1303, 1770, ..., 2606, 2122, 2980])\n", + "deriv_tensor (40, 81), indices tensor([2111, 1303, 1770, ..., 2606, 2122, 2980])\n", + "deriv_tensor (40, 81), indices tensor([1008, 1795, 1828, ..., 2780, 734, 504])\n", + "deriv_tensor (40, 81), indices tensor([1008, 1795, 1828, ..., 2780, 734, 504])\n", + "deriv_tensor (40, 81), indices tensor([1008, 1795, 1828, ..., 2780, 734, 504])\n", + "deriv_tensor (40, 81), indices tensor([1008, 1795, 1828, ..., 2780, 734, 504])\n", + "deriv_tensor (40, 81), indices tensor([ 231, 2639, 2191, ..., 175, 2099, 786])\n", + "deriv_tensor (40, 81), indices tensor([ 231, 2639, 2191, ..., 175, 2099, 786])\n", + "deriv_tensor (40, 81), indices tensor([ 231, 2639, 2191, ..., 175, 2099, 786])\n", + "deriv_tensor (40, 81), indices tensor([ 231, 2639, 2191, ..., 175, 2099, 786])\n", + "deriv_tensor (40, 81), indices tensor([3107, 3185, 901, ..., 2271, 2019, 909])\n", + "deriv_tensor (40, 81), indices tensor([3107, 3185, 901, ..., 2271, 2019, 909])\n", + "deriv_tensor (40, 81), indices tensor([3107, 3185, 901, ..., 2271, 2019, 909])\n", + "deriv_tensor (40, 81), indices tensor([3107, 3185, 901, ..., 2271, 2019, 909])\n", + "deriv_tensor (40, 81), indices tensor([1497, 461, 3221, ..., 1854, 1108, 2649])\n", + "deriv_tensor (40, 81), indices tensor([1497, 461, 3221, ..., 1854, 1108, 2649])\n", + "deriv_tensor (40, 81), indices tensor([1497, 461, 3221, ..., 1854, 1108, 2649])\n", + "deriv_tensor (40, 81), indices tensor([1497, 461, 3221, ..., 1854, 1108, 2649])\n", + "deriv_tensor (40, 81), indices tensor([2878, 592, 1915, ..., 3066, 1339, 1317])\n", + "deriv_tensor (40, 81), indices tensor([2878, 592, 1915, ..., 3066, 1339, 1317])\n", + "deriv_tensor (40, 81), indices tensor([2878, 592, 1915, ..., 3066, 1339, 1317])\n", + "deriv_tensor (40, 81), indices tensor([2878, 592, 1915, ..., 3066, 1339, 1317])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 26, 2054, ..., 3175, 806, 2560])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 26, 2054, ..., 3175, 806, 2560])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 26, 2054, ..., 3175, 806, 2560])\n", + "deriv_tensor (40, 81), indices tensor([ 821, 26, 2054, ..., 3175, 806, 2560])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2052, 2648, ..., 1116, 3118, 2531])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2052, 2648, ..., 1116, 3118, 2531])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2052, 2648, ..., 1116, 3118, 2531])\n", + "deriv_tensor (40, 81), indices tensor([2825, 2052, 2648, ..., 1116, 3118, 2531])\n", + "deriv_tensor (40, 81), indices tensor([2833, 1669, 1314, ..., 1268, 632, 2893])\n", + "deriv_tensor (40, 81), indices tensor([2833, 1669, 1314, ..., 1268, 632, 2893])\n", + "deriv_tensor (40, 81), indices tensor([2833, 1669, 1314, ..., 1268, 632, 2893])\n", + "deriv_tensor (40, 81), indices tensor([2833, 1669, 1314, ..., 1268, 632, 2893])\n", + "deriv_tensor (40, 81), indices tensor([2530, 631, 3078, ..., 1133, 175, 813])\n", + "deriv_tensor (40, 81), indices tensor([2530, 631, 3078, ..., 1133, 175, 813])\n", + "deriv_tensor (40, 81), indices tensor([2530, 631, 3078, ..., 1133, 175, 813])\n", + "deriv_tensor (40, 81), indices tensor([2530, 631, 3078, ..., 1133, 175, 813])\n", + "deriv_tensor (40, 81), indices tensor([ 361, 525, 257, ..., 2519, 2476, 253])\n", + "deriv_tensor (40, 81), indices tensor([ 361, 525, 257, ..., 2519, 2476, 253])\n", + "deriv_tensor (40, 81), indices tensor([ 361, 525, 257, ..., 2519, 2476, 253])\n", + "deriv_tensor (40, 81), indices tensor([ 361, 525, 257, ..., 2519, 2476, 253])\n", + "deriv_tensor (40, 81), indices tensor([ 23, 2681, 1350, ..., 2508, 649, 2313])\n", + "deriv_tensor (40, 81), indices tensor([ 23, 2681, 1350, ..., 2508, 649, 2313])\n", + "deriv_tensor (40, 81), indices tensor([ 23, 2681, 1350, ..., 2508, 649, 2313])\n", + "deriv_tensor (40, 81), indices tensor([ 23, 2681, 1350, ..., 2508, 649, 2313])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1, 2983, ..., 1877, 197, 1895])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1, 2983, ..., 1877, 197, 1895])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1, 2983, ..., 1877, 197, 1895])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1, 2983, ..., 1877, 197, 1895])\n", + "deriv_tensor (40, 81), indices tensor([2637, 1374, 1793, ..., 2727, 837, 1324])\n", + "deriv_tensor (40, 81), indices tensor([2637, 1374, 1793, ..., 2727, 837, 1324])\n", + "deriv_tensor (40, 81), indices tensor([2637, 1374, 1793, ..., 2727, 837, 1324])\n", + "deriv_tensor (40, 81), indices tensor([2637, 1374, 1793, ..., 2727, 837, 1324])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2985, 985, ..., 2686, 703, 1109])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2985, 985, ..., 2686, 703, 1109])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2985, 985, ..., 2686, 703, 1109])\n", + "deriv_tensor (40, 81), indices tensor([ 412, 2985, 985, ..., 2686, 703, 1109])\n", + "deriv_tensor (40, 81), indices tensor([ 255, 1901, 2477, ..., 787, 1090, 2191])\n", + "deriv_tensor (40, 81), indices tensor([ 255, 1901, 2477, ..., 787, 1090, 2191])\n", + "deriv_tensor (40, 81), indices tensor([ 255, 1901, 2477, ..., 787, 1090, 2191])\n", + "deriv_tensor (40, 81), indices tensor([ 255, 1901, 2477, ..., 787, 1090, 2191])\n", + "deriv_tensor (40, 81), indices tensor([2614, 625, 3072, ..., 1559, 2278, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2614, 625, 3072, ..., 1559, 2278, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2614, 625, 3072, ..., 1559, 2278, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2614, 625, 3072, ..., 1559, 2278, 1903])\n", + "deriv_tensor (40, 81), indices tensor([1579, 3057, 2201, ..., 94, 31, 2577])\n", + "deriv_tensor (40, 81), indices tensor([1579, 3057, 2201, ..., 94, 31, 2577])\n", + "deriv_tensor (40, 81), indices tensor([1579, 3057, 2201, ..., 94, 31, 2577])\n", + "deriv_tensor (40, 81), indices tensor([1579, 3057, 2201, ..., 94, 31, 2577])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2844, 2220, ..., 2564, 1567, 124])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2844, 2220, ..., 2564, 1567, 124])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2844, 2220, ..., 2564, 1567, 124])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2844, 2220, ..., 2564, 1567, 124])\n", + "deriv_tensor (40, 81), indices tensor([1651, 250, 2613, ..., 2230, 1591, 558])\n", + "deriv_tensor (40, 81), indices tensor([1651, 250, 2613, ..., 2230, 1591, 558])\n", + "deriv_tensor (40, 81), indices tensor([1651, 250, 2613, ..., 2230, 1591, 558])\n", + "deriv_tensor (40, 81), indices tensor([1651, 250, 2613, ..., 2230, 1591, 558])\n", + "deriv_tensor (40, 81), indices tensor([ 134, 2257, 761, ..., 2413, 1799, 2915])\n", + "deriv_tensor (40, 81), indices tensor([ 134, 2257, 761, ..., 2413, 1799, 2915])\n", + "deriv_tensor (40, 81), indices tensor([ 134, 2257, 761, ..., 2413, 1799, 2915])\n", + "deriv_tensor (40, 81), indices tensor([ 134, 2257, 761, ..., 2413, 1799, 2915])\n", + "deriv_tensor (40, 81), indices tensor([3195, 775, 1111, ..., 2503, 2892, 2447])\n", + "deriv_tensor (40, 81), indices tensor([3195, 775, 1111, ..., 2503, 2892, 2447])\n", + "deriv_tensor (40, 81), indices tensor([3195, 775, 1111, ..., 2503, 2892, 2447])\n", + "deriv_tensor (40, 81), indices tensor([3195, 775, 1111, ..., 2503, 2892, 2447])\n", + "deriv_tensor (40, 81), indices tensor([2718, 2855, 2518, ..., 825, 1737, 1746])\n", + "deriv_tensor (40, 81), indices tensor([2718, 2855, 2518, ..., 825, 1737, 1746])\n", + "deriv_tensor (40, 81), indices tensor([2718, 2855, 2518, ..., 825, 1737, 1746])\n", + "deriv_tensor (40, 81), indices tensor([2718, 2855, 2518, ..., 825, 1737, 1746])\n", + "deriv_tensor (40, 81), indices tensor([2527, 1920, 2930, ..., 1483, 980, 58])\n", + "deriv_tensor (40, 81), indices tensor([2527, 1920, 2930, ..., 1483, 980, 58])\n", + "deriv_tensor (40, 81), indices tensor([2527, 1920, 2930, ..., 1483, 980, 58])\n", + "deriv_tensor (40, 81), indices tensor([2527, 1920, 2930, ..., 1483, 980, 58])\n", + "deriv_tensor (40, 81), indices tensor([1953, 1949, 2993, ..., 2416, 2480, 88])\n", + "deriv_tensor (40, 81), indices tensor([1953, 1949, 2993, ..., 2416, 2480, 88])\n", + "deriv_tensor (40, 81), indices tensor([1953, 1949, 2993, ..., 2416, 2480, 88])\n", + "deriv_tensor (40, 81), indices tensor([1953, 1949, 2993, ..., 2416, 2480, 88])\n", + "deriv_tensor (40, 81), indices tensor([1048, 71, 2763, ..., 1257, 380, 3115])\n", + "deriv_tensor (40, 81), indices tensor([1048, 71, 2763, ..., 1257, 380, 3115])\n", + "deriv_tensor (40, 81), indices tensor([1048, 71, 2763, ..., 1257, 380, 3115])\n", + "deriv_tensor (40, 81), indices tensor([1048, 71, 2763, ..., 1257, 380, 3115])\n", + "deriv_tensor (40, 81), indices tensor([1876, 2754, 74, ..., 1280, 2642, 2053])\n", + "deriv_tensor (40, 81), indices tensor([1876, 2754, 74, ..., 1280, 2642, 2053])\n", + "deriv_tensor (40, 81), indices tensor([1876, 2754, 74, ..., 1280, 2642, 2053])\n", + "deriv_tensor (40, 81), indices tensor([1876, 2754, 74, ..., 1280, 2642, 2053])\n", + "deriv_tensor (40, 81), indices tensor([2097, 529, 624, ..., 376, 1259, 1369])\n", + "deriv_tensor (40, 81), indices tensor([2097, 529, 624, ..., 376, 1259, 1369])\n", + "deriv_tensor (40, 81), indices tensor([2097, 529, 624, ..., 376, 1259, 1369])\n", + "deriv_tensor (40, 81), indices tensor([2097, 529, 624, ..., 376, 1259, 1369])\n", + "deriv_tensor (40, 81), indices tensor([ 162, 2589, 74, ..., 3109, 3198, 5])\n", + "deriv_tensor (40, 81), indices tensor([ 162, 2589, 74, ..., 3109, 3198, 5])\n", + "deriv_tensor (40, 81), indices tensor([ 162, 2589, 74, ..., 3109, 3198, 5])\n", + "deriv_tensor (40, 81), indices tensor([ 162, 2589, 74, ..., 3109, 3198, 5])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1970, 2424, ..., 2638, 780, 1621])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1970, 2424, ..., 2638, 780, 1621])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1970, 2424, ..., 2638, 780, 1621])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 1970, 2424, ..., 2638, 780, 1621])\n", + "deriv_tensor (40, 81), indices tensor([2850, 3044, 1839, ..., 1529, 313, 547])\n", + "deriv_tensor (40, 81), indices tensor([2850, 3044, 1839, ..., 1529, 313, 547])\n", + "deriv_tensor (40, 81), indices tensor([2850, 3044, 1839, ..., 1529, 313, 547])\n", + "deriv_tensor (40, 81), indices tensor([2850, 3044, 1839, ..., 1529, 313, 547])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2860, 1065, ..., 2403, 180, 2887])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2860, 1065, ..., 2403, 180, 2887])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2860, 1065, ..., 2403, 180, 2887])\n", + "deriv_tensor (40, 81), indices tensor([2404, 2860, 1065, ..., 2403, 180, 2887])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 271, 276, ..., 2939, 20, 1714])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 271, 276, ..., 2939, 20, 1714])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 271, 276, ..., 2939, 20, 1714])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 271, 276, ..., 2939, 20, 1714])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 1383, 1806, ..., 2342, 2301, 2780])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 1383, 1806, ..., 2342, 2301, 2780])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 1383, 1806, ..., 2342, 2301, 2780])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 1383, 1806, ..., 2342, 2301, 2780])\n", + "deriv_tensor (40, 81), indices tensor([1555, 2577, 2938, ..., 556, 1153, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1555, 2577, 2938, ..., 556, 1153, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1555, 2577, 2938, ..., 556, 1153, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1555, 2577, 2938, ..., 556, 1153, 2707])\n", + "deriv_tensor (40, 81), indices tensor([3021, 3028, 47, ..., 1875, 818, 1454])\n", + "deriv_tensor (40, 81), indices tensor([3021, 3028, 47, ..., 1875, 818, 1454])\n", + "deriv_tensor (40, 81), indices tensor([3021, 3028, 47, ..., 1875, 818, 1454])\n", + "deriv_tensor (40, 81), indices tensor([3021, 3028, 47, ..., 1875, 818, 1454])\n", + "deriv_tensor (40, 81), indices tensor([ 429, 1470, 458, ..., 1731, 1976, 2733])\n", + "deriv_tensor (40, 81), indices tensor([ 429, 1470, 458, ..., 1731, 1976, 2733])\n", + "deriv_tensor (40, 81), indices tensor([ 429, 1470, 458, ..., 1731, 1976, 2733])\n", + "deriv_tensor (40, 81), indices tensor([ 429, 1470, 458, ..., 1731, 1976, 2733])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 1266, 395, ..., 2266, 1203, 921])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 1266, 395, ..., 2266, 1203, 921])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 1266, 395, ..., 2266, 1203, 921])\n", + "deriv_tensor (40, 81), indices tensor([ 714, 1266, 395, ..., 2266, 1203, 921])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1774, 2402, ..., 1606, 2192, 869])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1774, 2402, ..., 1606, 2192, 869])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1774, 2402, ..., 1606, 2192, 869])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1774, 2402, ..., 1606, 2192, 869])\n", + "deriv_tensor (40, 81), indices tensor([2568, 1440, 97, ..., 990, 1150, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2568, 1440, 97, ..., 990, 1150, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2568, 1440, 97, ..., 990, 1150, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2568, 1440, 97, ..., 990, 1150, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2066, 2529, 3112, ..., 1083, 1449, 3198])\n", + "deriv_tensor (40, 81), indices tensor([2066, 2529, 3112, ..., 1083, 1449, 3198])\n", + "deriv_tensor (40, 81), indices tensor([2066, 2529, 3112, ..., 1083, 1449, 3198])\n", + "deriv_tensor (40, 81), indices tensor([2066, 2529, 3112, ..., 1083, 1449, 3198])\n", + "deriv_tensor (40, 81), indices tensor([ 332, 1211, 1706, ..., 1101, 706, 4])\n", + "deriv_tensor (40, 81), indices tensor([ 332, 1211, 1706, ..., 1101, 706, 4])\n", + "deriv_tensor (40, 81), indices tensor([ 332, 1211, 1706, ..., 1101, 706, 4])\n", + "deriv_tensor (40, 81), indices tensor([ 332, 1211, 1706, ..., 1101, 706, 4])\n", + "deriv_tensor (40, 81), indices tensor([1227, 1162, 1178, ..., 3213, 1339, 993])\n", + "deriv_tensor (40, 81), indices tensor([1227, 1162, 1178, ..., 3213, 1339, 993])\n", + "deriv_tensor (40, 81), indices tensor([1227, 1162, 1178, ..., 3213, 1339, 993])\n", + "deriv_tensor (40, 81), indices tensor([1227, 1162, 1178, ..., 3213, 1339, 993])\n", + "deriv_tensor (40, 81), indices tensor([ 953, 366, 624, ..., 1412, 2358, 1056])\n", + "deriv_tensor (40, 81), indices tensor([ 953, 366, 624, ..., 1412, 2358, 1056])\n", + "deriv_tensor (40, 81), indices tensor([ 953, 366, 624, ..., 1412, 2358, 1056])\n", + "deriv_tensor (40, 81), indices tensor([ 953, 366, 624, ..., 1412, 2358, 1056])\n", + "deriv_tensor (40, 81), indices tensor([2712, 500, 2040, ..., 869, 745, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2712, 500, 2040, ..., 869, 745, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2712, 500, 2040, ..., 869, 745, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2712, 500, 2040, ..., 869, 745, 2101])\n", + "deriv_tensor (40, 81), indices tensor([2772, 1052, 2285, ..., 2880, 1150, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2772, 1052, 2285, ..., 2880, 1150, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2772, 1052, 2285, ..., 2880, 1150, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2772, 1052, 2285, ..., 2880, 1150, 2282])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1888, 2711, ..., 2386, 442, 275])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1888, 2711, ..., 2386, 442, 275])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1888, 2711, ..., 2386, 442, 275])\n", + "deriv_tensor (40, 81), indices tensor([ 479, 1888, 2711, ..., 2386, 442, 275])\n", + "deriv_tensor (40, 81), indices tensor([ 11, 2444, 2223, ..., 277, 2093, 1125])\n", + "deriv_tensor (40, 81), indices tensor([ 11, 2444, 2223, ..., 277, 2093, 1125])\n", + "deriv_tensor (40, 81), indices tensor([ 11, 2444, 2223, ..., 277, 2093, 1125])\n", + "deriv_tensor (40, 81), indices tensor([ 11, 2444, 2223, ..., 277, 2093, 1125])\n", + "deriv_tensor (40, 81), indices tensor([ 536, 708, 707, ..., 2960, 2448, 1984])\n", + "deriv_tensor (40, 81), indices tensor([ 536, 708, 707, ..., 2960, 2448, 1984])\n", + "deriv_tensor (40, 81), indices tensor([ 536, 708, 707, ..., 2960, 2448, 1984])\n", + "deriv_tensor (40, 81), indices tensor([ 536, 708, 707, ..., 2960, 2448, 1984])\n", + "deriv_tensor (40, 81), indices tensor([1027, 2975, 1798, ..., 1646, 476, 2356])\n", + "deriv_tensor (40, 81), indices tensor([1027, 2975, 1798, ..., 1646, 476, 2356])\n", + "deriv_tensor (40, 81), indices tensor([1027, 2975, 1798, ..., 1646, 476, 2356])\n", + "deriv_tensor (40, 81), indices tensor([1027, 2975, 1798, ..., 1646, 476, 2356])\n", + "deriv_tensor (40, 81), indices tensor([1663, 1508, 2239, ..., 2493, 2343, 2633])\n", + "deriv_tensor (40, 81), indices tensor([1663, 1508, 2239, ..., 2493, 2343, 2633])\n", + "deriv_tensor (40, 81), indices tensor([1663, 1508, 2239, ..., 2493, 2343, 2633])\n", + "deriv_tensor (40, 81), indices tensor([1663, 1508, 2239, ..., 2493, 2343, 2633])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1794, 3050, ..., 1615, 1701, 2640])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1794, 3050, ..., 1615, 1701, 2640])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1794, 3050, ..., 1615, 1701, 2640])\n", + "deriv_tensor (40, 81), indices tensor([1065, 1794, 3050, ..., 1615, 1701, 2640])\n", + "deriv_tensor (40, 81), indices tensor([2118, 969, 92, ..., 2347, 2611, 2932])\n", + "deriv_tensor (40, 81), indices tensor([2118, 969, 92, ..., 2347, 2611, 2932])\n", + "deriv_tensor (40, 81), indices tensor([2118, 969, 92, ..., 2347, 2611, 2932])\n", + "deriv_tensor (40, 81), indices tensor([2118, 969, 92, ..., 2347, 2611, 2932])\n", + "deriv_tensor (40, 81), indices tensor([2642, 1172, 417, ..., 91, 2182, 1685])\n", + "deriv_tensor (40, 81), indices tensor([2642, 1172, 417, ..., 91, 2182, 1685])\n", + "deriv_tensor (40, 81), indices tensor([2642, 1172, 417, ..., 91, 2182, 1685])\n", + "deriv_tensor (40, 81), indices tensor([2642, 1172, 417, ..., 91, 2182, 1685])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2147, 465, ..., 2479, 2248, 200])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2147, 465, ..., 2479, 2248, 200])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2147, 465, ..., 2479, 2248, 200])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2147, 465, ..., 2479, 2248, 200])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 903, 2782, ..., 2280, 2917, 884])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 903, 2782, ..., 2280, 2917, 884])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 903, 2782, ..., 2280, 2917, 884])\n", + "deriv_tensor (40, 81), indices tensor([ 892, 903, 2782, ..., 2280, 2917, 884])\n", + "deriv_tensor (40, 81), indices tensor([1258, 925, 2359, ..., 1060, 1560, 2125])\n", + "deriv_tensor (40, 81), indices tensor([1258, 925, 2359, ..., 1060, 1560, 2125])\n", + "deriv_tensor (40, 81), indices tensor([1258, 925, 2359, ..., 1060, 1560, 2125])\n", + "deriv_tensor (40, 81), indices tensor([1258, 925, 2359, ..., 1060, 1560, 2125])\n", + "deriv_tensor (40, 81), indices tensor([2256, 109, 1328, ..., 659, 732, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2256, 109, 1328, ..., 659, 732, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2256, 109, 1328, ..., 659, 732, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2256, 109, 1328, ..., 659, 732, 2719])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 2178, 2182, ..., 33, 2598, 2596])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 2178, 2182, ..., 33, 2598, 2596])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 2178, 2182, ..., 33, 2598, 2596])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 2178, 2182, ..., 33, 2598, 2596])\n", + "deriv_tensor (40, 81), indices tensor([2685, 3016, 8, ..., 938, 1135, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2685, 3016, 8, ..., 938, 1135, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2685, 3016, 8, ..., 938, 1135, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2685, 3016, 8, ..., 938, 1135, 2282])\n", + "deriv_tensor (40, 81), indices tensor([2095, 1089, 368, ..., 1002, 2385, 1234])\n", + "deriv_tensor (40, 81), indices tensor([2095, 1089, 368, ..., 1002, 2385, 1234])\n", + "deriv_tensor (40, 81), indices tensor([2095, 1089, 368, ..., 1002, 2385, 1234])\n", + "deriv_tensor (40, 81), indices tensor([2095, 1089, 368, ..., 1002, 2385, 1234])\n", + "deriv_tensor (40, 81), indices tensor([1091, 1912, 589, ..., 3055, 2211, 3105])\n", + "deriv_tensor (40, 81), indices tensor([1091, 1912, 589, ..., 3055, 2211, 3105])\n", + "deriv_tensor (40, 81), indices tensor([1091, 1912, 589, ..., 3055, 2211, 3105])\n", + "deriv_tensor (40, 81), indices tensor([1091, 1912, 589, ..., 3055, 2211, 3105])\n", + "deriv_tensor (40, 81), indices tensor([2906, 351, 2294, ..., 175, 114, 2266])\n", + "deriv_tensor (40, 81), indices tensor([2906, 351, 2294, ..., 175, 114, 2266])\n", + "deriv_tensor (40, 81), indices tensor([2906, 351, 2294, ..., 175, 114, 2266])\n", + "deriv_tensor (40, 81), indices tensor([2906, 351, 2294, ..., 175, 114, 2266])\n", + "deriv_tensor (40, 81), indices tensor([2566, 2497, 2827, ..., 2387, 2879, 2710])\n", + "deriv_tensor (40, 81), indices tensor([2566, 2497, 2827, ..., 2387, 2879, 2710])\n", + "deriv_tensor (40, 81), indices tensor([2566, 2497, 2827, ..., 2387, 2879, 2710])\n", + "deriv_tensor (40, 81), indices tensor([2566, 2497, 2827, ..., 2387, 2879, 2710])\n", + "deriv_tensor (40, 81), indices tensor([3182, 980, 2932, ..., 428, 115, 2430])\n", + "deriv_tensor (40, 81), indices tensor([3182, 980, 2932, ..., 428, 115, 2430])\n", + "deriv_tensor (40, 81), indices tensor([3182, 980, 2932, ..., 428, 115, 2430])\n", + "deriv_tensor (40, 81), indices tensor([3182, 980, 2932, ..., 428, 115, 2430])\n", + "deriv_tensor (40, 81), indices tensor([1717, 827, 2906, ..., 1996, 2000, 2643])\n", + "deriv_tensor (40, 81), indices tensor([1717, 827, 2906, ..., 1996, 2000, 2643])\n", + "deriv_tensor (40, 81), indices tensor([1717, 827, 2906, ..., 1996, 2000, 2643])\n", + "deriv_tensor (40, 81), indices tensor([1717, 827, 2906, ..., 1996, 2000, 2643])\n", + "deriv_tensor (40, 81), indices tensor([2777, 276, 1436, ..., 3111, 3126, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2777, 276, 1436, ..., 3111, 3126, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2777, 276, 1436, ..., 3111, 3126, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2777, 276, 1436, ..., 3111, 3126, 3148])\n", + "deriv_tensor (40, 81), indices tensor([1585, 1177, 891, ..., 1339, 278, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1585, 1177, 891, ..., 1339, 278, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1585, 1177, 891, ..., 1339, 278, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1585, 1177, 891, ..., 1339, 278, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1433, 2335, 2567, ..., 3225, 467, 1221])\n", + "deriv_tensor (40, 81), indices tensor([1433, 2335, 2567, ..., 3225, 467, 1221])\n", + "deriv_tensor (40, 81), indices tensor([1433, 2335, 2567, ..., 3225, 467, 1221])\n", + "deriv_tensor (40, 81), indices tensor([1433, 2335, 2567, ..., 3225, 467, 1221])\n", + "deriv_tensor (40, 81), indices tensor([1495, 483, 659, ..., 1147, 1804, 2627])\n", + "deriv_tensor (40, 81), indices tensor([1495, 483, 659, ..., 1147, 1804, 2627])\n", + "deriv_tensor (40, 81), indices tensor([1495, 483, 659, ..., 1147, 1804, 2627])\n", + "deriv_tensor (40, 81), indices tensor([1495, 483, 659, ..., 1147, 1804, 2627])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 745, 2692, ..., 466, 2386, 2028])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 745, 2692, ..., 466, 2386, 2028])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 745, 2692, ..., 466, 2386, 2028])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 745, 2692, ..., 466, 2386, 2028])\n", + "deriv_tensor (40, 81), indices tensor([1032, 1034, 450, ..., 2627, 1075, 1493])\n", + "deriv_tensor (40, 81), indices tensor([1032, 1034, 450, ..., 2627, 1075, 1493])\n", + "deriv_tensor (40, 81), indices tensor([1032, 1034, 450, ..., 2627, 1075, 1493])\n", + "deriv_tensor (40, 81), indices tensor([1032, 1034, 450, ..., 2627, 1075, 1493])\n", + "deriv_tensor (40, 81), indices tensor([2760, 1102, 430, ..., 1986, 665, 2076])\n", + "deriv_tensor (40, 81), indices tensor([2760, 1102, 430, ..., 1986, 665, 2076])\n", + "deriv_tensor (40, 81), indices tensor([2760, 1102, 430, ..., 1986, 665, 2076])\n", + "deriv_tensor (40, 81), indices tensor([2760, 1102, 430, ..., 1986, 665, 2076])\n", + "deriv_tensor (40, 81), indices tensor([1875, 73, 769, ..., 2457, 279, 1860])\n", + "deriv_tensor (40, 81), indices tensor([1875, 73, 769, ..., 2457, 279, 1860])\n", + "deriv_tensor (40, 81), indices tensor([1875, 73, 769, ..., 2457, 279, 1860])\n", + "deriv_tensor (40, 81), indices tensor([1875, 73, 769, ..., 2457, 279, 1860])\n", + "deriv_tensor (40, 81), indices tensor([1240, 2842, 1993, ..., 704, 1060, 2107])\n", + "deriv_tensor (40, 81), indices tensor([1240, 2842, 1993, ..., 704, 1060, 2107])\n", + "deriv_tensor (40, 81), indices tensor([1240, 2842, 1993, ..., 704, 1060, 2107])\n", + "deriv_tensor (40, 81), indices tensor([1240, 2842, 1993, ..., 704, 1060, 2107])\n", + "deriv_tensor (40, 81), indices tensor([2336, 753, 2885, ..., 146, 490, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2336, 753, 2885, ..., 146, 490, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2336, 753, 2885, ..., 146, 490, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2336, 753, 2885, ..., 146, 490, 2058])\n", + "deriv_tensor (40, 81), indices tensor([2299, 1286, 3220, ..., 529, 1856, 695])\n", + "deriv_tensor (40, 81), indices tensor([2299, 1286, 3220, ..., 529, 1856, 695])\n", + "deriv_tensor (40, 81), indices tensor([2299, 1286, 3220, ..., 529, 1856, 695])\n", + "deriv_tensor (40, 81), indices tensor([2299, 1286, 3220, ..., 529, 1856, 695])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1557, 2935, ..., 2374, 1403, 537])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1557, 2935, ..., 2374, 1403, 537])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1557, 2935, ..., 2374, 1403, 537])\n", + "deriv_tensor (40, 81), indices tensor([2930, 1557, 2935, ..., 2374, 1403, 537])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 2021, 2485, ..., 3183, 3000, 2867])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 2021, 2485, ..., 3183, 3000, 2867])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 2021, 2485, ..., 3183, 3000, 2867])\n", + "deriv_tensor (40, 81), indices tensor([ 369, 2021, 2485, ..., 3183, 3000, 2867])\n", + "deriv_tensor (40, 81), indices tensor([3142, 481, 1818, ..., 173, 2, 2790])\n", + "deriv_tensor (40, 81), indices tensor([3142, 481, 1818, ..., 173, 2, 2790])\n", + "deriv_tensor (40, 81), indices tensor([3142, 481, 1818, ..., 173, 2, 2790])\n", + "deriv_tensor (40, 81), indices tensor([3142, 481, 1818, ..., 173, 2, 2790])\n", + "deriv_tensor (40, 81), indices tensor([3158, 1530, 538, ..., 1174, 1456, 1676])\n", + "deriv_tensor (40, 81), indices tensor([3158, 1530, 538, ..., 1174, 1456, 1676])\n", + "deriv_tensor (40, 81), indices tensor([3158, 1530, 538, ..., 1174, 1456, 1676])\n", + "deriv_tensor (40, 81), indices tensor([3158, 1530, 538, ..., 1174, 1456, 1676])\n", + "deriv_tensor (40, 81), indices tensor([3115, 766, 2922, ..., 741, 1268, 1215])\n", + "deriv_tensor (40, 81), indices tensor([3115, 766, 2922, ..., 741, 1268, 1215])\n", + "deriv_tensor (40, 81), indices tensor([3115, 766, 2922, ..., 741, 1268, 1215])\n", + "deriv_tensor (40, 81), indices tensor([3115, 766, 2922, ..., 741, 1268, 1215])\n", + "deriv_tensor (40, 81), indices tensor([3049, 1022, 2979, ..., 1296, 2761, 2041])\n", + "deriv_tensor (40, 81), indices tensor([3049, 1022, 2979, ..., 1296, 2761, 2041])\n", + "deriv_tensor (40, 81), indices tensor([3049, 1022, 2979, ..., 1296, 2761, 2041])\n", + "deriv_tensor (40, 81), indices tensor([3049, 1022, 2979, ..., 1296, 2761, 2041])\n", + "deriv_tensor (40, 81), indices tensor([1381, 1782, 1571, ..., 2543, 617, 1639])\n", + "deriv_tensor (40, 81), indices tensor([1381, 1782, 1571, ..., 2543, 617, 1639])\n", + "deriv_tensor (40, 81), indices tensor([1381, 1782, 1571, ..., 2543, 617, 1639])\n", + "deriv_tensor (40, 81), indices tensor([1381, 1782, 1571, ..., 2543, 617, 1639])\n", + "deriv_tensor (40, 81), indices tensor([2771, 2885, 2403, ..., 1664, 2015, 811])\n", + "deriv_tensor (40, 81), indices tensor([2771, 2885, 2403, ..., 1664, 2015, 811])\n", + "deriv_tensor (40, 81), indices tensor([2771, 2885, 2403, ..., 1664, 2015, 811])\n", + "deriv_tensor (40, 81), indices tensor([2771, 2885, 2403, ..., 1664, 2015, 811])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 14, 2579, ..., 2235, 2919, 1310])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 14, 2579, ..., 2235, 2919, 1310])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 14, 2579, ..., 2235, 2919, 1310])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 14, 2579, ..., 2235, 2919, 1310])\n", + "deriv_tensor (40, 81), indices tensor([1107, 508, 1017, ..., 781, 1267, 326])\n", + "deriv_tensor (40, 81), indices tensor([1107, 508, 1017, ..., 781, 1267, 326])\n", + "deriv_tensor (40, 81), indices tensor([1107, 508, 1017, ..., 781, 1267, 326])\n", + "deriv_tensor (40, 81), indices tensor([1107, 508, 1017, ..., 781, 1267, 326])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 817, 363, ..., 2308, 1052, 2794])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 817, 363, ..., 2308, 1052, 2794])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 817, 363, ..., 2308, 1052, 2794])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 817, 363, ..., 2308, 1052, 2794])\n", + "deriv_tensor (40, 81), indices tensor([2485, 979, 3199, ..., 3129, 1067, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2485, 979, 3199, ..., 3129, 1067, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2485, 979, 3199, ..., 3129, 1067, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2485, 979, 3199, ..., 3129, 1067, 1328])\n", + "deriv_tensor (40, 81), indices tensor([2795, 2708, 1734, ..., 1793, 678, 2900])\n", + "deriv_tensor (40, 81), indices tensor([2795, 2708, 1734, ..., 1793, 678, 2900])\n", + "deriv_tensor (40, 81), indices tensor([2795, 2708, 1734, ..., 1793, 678, 2900])\n", + "deriv_tensor (40, 81), indices tensor([2795, 2708, 1734, ..., 1793, 678, 2900])\n", + "deriv_tensor (40, 81), indices tensor([ 915, 337, 1293, ..., 2519, 1055, 2108])\n", + "deriv_tensor (40, 81), indices tensor([ 915, 337, 1293, ..., 2519, 1055, 2108])\n", + "deriv_tensor (40, 81), indices tensor([ 915, 337, 1293, ..., 2519, 1055, 2108])\n", + "deriv_tensor (40, 81), indices tensor([ 915, 337, 1293, ..., 2519, 1055, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1315, 84, 1083, ..., 2007, 1262, 2066])\n", + "deriv_tensor (40, 81), indices tensor([1315, 84, 1083, ..., 2007, 1262, 2066])\n", + "deriv_tensor (40, 81), indices tensor([1315, 84, 1083, ..., 2007, 1262, 2066])\n", + "deriv_tensor (40, 81), indices tensor([1315, 84, 1083, ..., 2007, 1262, 2066])\n", + "deriv_tensor (40, 81), indices tensor([2083, 67, 2073, ..., 2008, 604, 520])\n", + "deriv_tensor (40, 81), indices tensor([2083, 67, 2073, ..., 2008, 604, 520])\n", + "deriv_tensor (40, 81), indices tensor([2083, 67, 2073, ..., 2008, 604, 520])\n", + "deriv_tensor (40, 81), indices tensor([2083, 67, 2073, ..., 2008, 604, 520])\n", + "deriv_tensor (40, 81), indices tensor([1729, 146, 2042, ..., 2368, 372, 853])\n", + "deriv_tensor (40, 81), indices tensor([1729, 146, 2042, ..., 2368, 372, 853])\n", + "deriv_tensor (40, 81), indices tensor([1729, 146, 2042, ..., 2368, 372, 853])\n", + "deriv_tensor (40, 81), indices tensor([1729, 146, 2042, ..., 2368, 372, 853])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2406, 1559, ..., 854, 1680, 708])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2406, 1559, ..., 854, 1680, 708])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2406, 1559, ..., 854, 1680, 708])\n", + "deriv_tensor (40, 81), indices tensor([ 710, 2406, 1559, ..., 854, 1680, 708])\n", + "deriv_tensor (40, 81), indices tensor([3075, 3067, 1004, ..., 301, 1635, 1725])\n", + "deriv_tensor (40, 81), indices tensor([3075, 3067, 1004, ..., 301, 1635, 1725])\n", + "deriv_tensor (40, 81), indices tensor([3075, 3067, 1004, ..., 301, 1635, 1725])\n", + "deriv_tensor (40, 81), indices tensor([3075, 3067, 1004, ..., 301, 1635, 1725])\n", + "deriv_tensor (40, 81), indices tensor([1397, 1325, 2366, ..., 635, 615, 398])\n", + "deriv_tensor (40, 81), indices tensor([1397, 1325, 2366, ..., 635, 615, 398])\n", + "deriv_tensor (40, 81), indices tensor([1397, 1325, 2366, ..., 635, 615, 398])\n", + "deriv_tensor (40, 81), indices tensor([1397, 1325, 2366, ..., 635, 615, 398])\n", + "deriv_tensor (40, 81), indices tensor([2161, 595, 43, ..., 1944, 2965, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2161, 595, 43, ..., 1944, 2965, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2161, 595, 43, ..., 1944, 2965, 2933])\n", + "deriv_tensor (40, 81), indices tensor([2161, 595, 43, ..., 1944, 2965, 2933])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 753, 2545, ..., 390, 288, 462])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 753, 2545, ..., 390, 288, 462])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 753, 2545, ..., 390, 288, 462])\n", + "deriv_tensor (40, 81), indices tensor([ 137, 753, 2545, ..., 390, 288, 462])\n", + "deriv_tensor (40, 81), indices tensor([2992, 417, 1224, ..., 2032, 2940, 395])\n", + "deriv_tensor (40, 81), indices tensor([2992, 417, 1224, ..., 2032, 2940, 395])\n", + "deriv_tensor (40, 81), indices tensor([2992, 417, 1224, ..., 2032, 2940, 395])\n", + "deriv_tensor (40, 81), indices tensor([2992, 417, 1224, ..., 2032, 2940, 395])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 241, 662, ..., 2586, 1507, 296])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 241, 662, ..., 2586, 1507, 296])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 241, 662, ..., 2586, 1507, 296])\n", + "deriv_tensor (40, 81), indices tensor([ 3, 241, 662, ..., 2586, 1507, 296])\n", + "deriv_tensor (40, 81), indices tensor([2245, 26, 763, ..., 2124, 1029, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2245, 26, 763, ..., 2124, 1029, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2245, 26, 763, ..., 2124, 1029, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2245, 26, 763, ..., 2124, 1029, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2399, 2647, 713, ..., 2753, 1929, 168])\n", + "deriv_tensor (40, 81), indices tensor([2399, 2647, 713, ..., 2753, 1929, 168])\n", + "deriv_tensor (40, 81), indices tensor([2399, 2647, 713, ..., 2753, 1929, 168])\n", + "deriv_tensor (40, 81), indices tensor([2399, 2647, 713, ..., 2753, 1929, 168])\n", + "deriv_tensor (40, 81), indices tensor([1704, 1563, 2659, ..., 3093, 1607, 1405])\n", + "deriv_tensor (40, 81), indices tensor([1704, 1563, 2659, ..., 3093, 1607, 1405])\n", + "deriv_tensor (40, 81), indices tensor([1704, 1563, 2659, ..., 3093, 1607, 1405])\n", + "deriv_tensor (40, 81), indices tensor([1704, 1563, 2659, ..., 3093, 1607, 1405])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 1452, 2616, ..., 403, 2129, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 1452, 2616, ..., 403, 2129, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 1452, 2616, ..., 403, 2129, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 762, 1452, 2616, ..., 403, 2129, 1063])\n", + "deriv_tensor (40, 81), indices tensor([ 44, 247, 165, ..., 2680, 2254, 1427])\n", + "deriv_tensor (40, 81), indices tensor([ 44, 247, 165, ..., 2680, 2254, 1427])\n", + "deriv_tensor (40, 81), indices tensor([ 44, 247, 165, ..., 2680, 2254, 1427])\n", + "deriv_tensor (40, 81), indices tensor([ 44, 247, 165, ..., 2680, 2254, 1427])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 99, 781, ..., 475, 1597, 2514])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 99, 781, ..., 475, 1597, 2514])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 99, 781, ..., 475, 1597, 2514])\n", + "deriv_tensor (40, 81), indices tensor([ 785, 99, 781, ..., 475, 1597, 2514])\n", + "deriv_tensor (40, 81), indices tensor([1877, 2068, 2171, ..., 1314, 548, 1235])\n", + "deriv_tensor (40, 81), indices tensor([1877, 2068, 2171, ..., 1314, 548, 1235])\n", + "deriv_tensor (40, 81), indices tensor([1877, 2068, 2171, ..., 1314, 548, 1235])\n", + "deriv_tensor (40, 81), indices tensor([1877, 2068, 2171, ..., 1314, 548, 1235])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 1183, 375, ..., 2737, 3058, 516])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 1183, 375, ..., 2737, 3058, 516])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 1183, 375, ..., 2737, 3058, 516])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 1183, 375, ..., 2737, 3058, 516])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1155, 1670, ..., 538, 1312, 797])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1155, 1670, ..., 538, 1312, 797])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1155, 1670, ..., 538, 1312, 797])\n", + "deriv_tensor (40, 81), indices tensor([ 287, 1155, 1670, ..., 538, 1312, 797])\n", + "deriv_tensor (40, 81), indices tensor([2197, 1452, 1628, ..., 2556, 1791, 696])\n", + "deriv_tensor (40, 81), indices tensor([2197, 1452, 1628, ..., 2556, 1791, 696])\n", + "deriv_tensor (40, 81), indices tensor([2197, 1452, 1628, ..., 2556, 1791, 696])\n", + "deriv_tensor (40, 81), indices tensor([2197, 1452, 1628, ..., 2556, 1791, 696])\n", + "deriv_tensor (40, 81), indices tensor([3011, 411, 43, ..., 2103, 496, 2877])\n", + "deriv_tensor (40, 81), indices tensor([3011, 411, 43, ..., 2103, 496, 2877])\n", + "deriv_tensor (40, 81), indices tensor([3011, 411, 43, ..., 2103, 496, 2877])\n", + "deriv_tensor (40, 81), indices tensor([3011, 411, 43, ..., 2103, 496, 2877])\n", + "deriv_tensor (40, 81), indices tensor([2505, 2167, 2799, ..., 603, 2311, 2820])\n", + "deriv_tensor (40, 81), indices tensor([2505, 2167, 2799, ..., 603, 2311, 2820])\n", + "deriv_tensor (40, 81), indices tensor([2505, 2167, 2799, ..., 603, 2311, 2820])\n", + "deriv_tensor (40, 81), indices tensor([2505, 2167, 2799, ..., 603, 2311, 2820])\n", + "deriv_tensor (40, 81), indices tensor([2166, 2527, 212, ..., 218, 93, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2166, 2527, 212, ..., 218, 93, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2166, 2527, 212, ..., 218, 93, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2166, 2527, 212, ..., 218, 93, 2238])\n", + "deriv_tensor (40, 81), indices tensor([3225, 2890, 962, ..., 51, 1074, 500])\n", + "deriv_tensor (40, 81), indices tensor([3225, 2890, 962, ..., 51, 1074, 500])\n", + "deriv_tensor (40, 81), indices tensor([3225, 2890, 962, ..., 51, 1074, 500])\n", + "deriv_tensor (40, 81), indices tensor([3225, 2890, 962, ..., 51, 1074, 500])\n", + "deriv_tensor (40, 81), indices tensor([2444, 1382, 1812, ..., 1548, 419, 2645])\n", + "deriv_tensor (40, 81), indices tensor([2444, 1382, 1812, ..., 1548, 419, 2645])\n", + "deriv_tensor (40, 81), indices tensor([2444, 1382, 1812, ..., 1548, 419, 2645])\n", + "deriv_tensor (40, 81), indices tensor([2444, 1382, 1812, ..., 1548, 419, 2645])\n", + "deriv_tensor (40, 81), indices tensor([3132, 334, 942, ..., 1401, 702, 2057])\n", + "deriv_tensor (40, 81), indices tensor([3132, 334, 942, ..., 1401, 702, 2057])\n", + "deriv_tensor (40, 81), indices tensor([3132, 334, 942, ..., 1401, 702, 2057])\n", + "deriv_tensor (40, 81), indices tensor([3132, 334, 942, ..., 1401, 702, 2057])\n", + "deriv_tensor (40, 81), indices tensor([2258, 1287, 108, ..., 786, 1403, 1592])\n", + "deriv_tensor (40, 81), indices tensor([2258, 1287, 108, ..., 786, 1403, 1592])\n", + "deriv_tensor (40, 81), indices tensor([2258, 1287, 108, ..., 786, 1403, 1592])\n", + "deriv_tensor (40, 81), indices tensor([2258, 1287, 108, ..., 786, 1403, 1592])\n", + "deriv_tensor (40, 81), indices tensor([3239, 3126, 2106, ..., 630, 1476, 2006])\n", + "deriv_tensor (40, 81), indices tensor([3239, 3126, 2106, ..., 630, 1476, 2006])\n", + "deriv_tensor (40, 81), indices tensor([3239, 3126, 2106, ..., 630, 1476, 2006])\n", + "deriv_tensor (40, 81), indices tensor([3239, 3126, 2106, ..., 630, 1476, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2430, 477, 1573, ..., 2265, 565, 1297])\n", + "deriv_tensor (40, 81), indices tensor([2430, 477, 1573, ..., 2265, 565, 1297])\n", + "deriv_tensor (40, 81), indices tensor([2430, 477, 1573, ..., 2265, 565, 1297])\n", + "deriv_tensor (40, 81), indices tensor([2430, 477, 1573, ..., 2265, 565, 1297])\n", + "deriv_tensor (40, 81), indices tensor([1509, 2682, 2244, ..., 2668, 1783, 1949])\n", + "deriv_tensor (40, 81), indices tensor([1509, 2682, 2244, ..., 2668, 1783, 1949])\n", + "deriv_tensor (40, 81), indices tensor([1509, 2682, 2244, ..., 2668, 1783, 1949])\n", + "deriv_tensor (40, 81), indices tensor([1509, 2682, 2244, ..., 2668, 1783, 1949])\n", + "deriv_tensor (40, 81), indices tensor([1264, 68, 2562, ..., 1676, 3142, 1575])\n", + "deriv_tensor (40, 81), indices tensor([1264, 68, 2562, ..., 1676, 3142, 1575])\n", + "deriv_tensor (40, 81), indices tensor([1264, 68, 2562, ..., 1676, 3142, 1575])\n", + "deriv_tensor (40, 81), indices tensor([1264, 68, 2562, ..., 1676, 3142, 1575])\n", + "deriv_tensor (40, 81), indices tensor([ 712, 1386, 2051, ..., 1831, 1562, 1255])\n", + "deriv_tensor (40, 81), indices tensor([ 712, 1386, 2051, ..., 1831, 1562, 1255])\n", + "deriv_tensor (40, 81), indices tensor([ 712, 1386, 2051, ..., 1831, 1562, 1255])\n", + "deriv_tensor (40, 81), indices tensor([ 712, 1386, 2051, ..., 1831, 1562, 1255])\n", + "deriv_tensor (40, 81), indices tensor([2459, 897, 420, ..., 3182, 964, 2595])\n", + "deriv_tensor (40, 81), indices tensor([2459, 897, 420, ..., 3182, 964, 2595])\n", + "deriv_tensor (40, 81), indices tensor([2459, 897, 420, ..., 3182, 964, 2595])\n", + "deriv_tensor (40, 81), indices tensor([2459, 897, 420, ..., 3182, 964, 2595])\n", + "deriv_tensor (40, 81), indices tensor([2545, 2539, 1463, ..., 1860, 169, 2605])\n", + "deriv_tensor (40, 81), indices tensor([2545, 2539, 1463, ..., 1860, 169, 2605])\n", + "deriv_tensor (40, 81), indices tensor([2545, 2539, 1463, ..., 1860, 169, 2605])\n", + "deriv_tensor (40, 81), indices tensor([2545, 2539, 1463, ..., 1860, 169, 2605])\n", + "deriv_tensor (40, 81), indices tensor([2794, 582, 1178, ..., 1564, 1447, 997])\n", + "deriv_tensor (40, 81), indices tensor([2794, 582, 1178, ..., 1564, 1447, 997])\n", + "deriv_tensor (40, 81), indices tensor([2794, 582, 1178, ..., 1564, 1447, 997])\n", + "deriv_tensor (40, 81), indices tensor([2794, 582, 1178, ..., 1564, 1447, 997])\n", + "deriv_tensor (40, 81), indices tensor([1786, 1432, 2793, ..., 2216, 715, 2012])\n", + "deriv_tensor (40, 81), indices tensor([1786, 1432, 2793, ..., 2216, 715, 2012])\n", + "deriv_tensor (40, 81), indices tensor([1786, 1432, 2793, ..., 2216, 715, 2012])\n", + "deriv_tensor (40, 81), indices tensor([1786, 1432, 2793, ..., 2216, 715, 2012])\n", + "deriv_tensor (40, 81), indices tensor([3203, 3236, 2394, ..., 2800, 3228, 1895])\n", + "deriv_tensor (40, 81), indices tensor([3203, 3236, 2394, ..., 2800, 3228, 1895])\n", + "deriv_tensor (40, 81), indices tensor([3203, 3236, 2394, ..., 2800, 3228, 1895])\n", + "deriv_tensor (40, 81), indices tensor([3203, 3236, 2394, ..., 2800, 3228, 1895])\n", + "deriv_tensor (40, 81), indices tensor([1303, 1193, 2367, ..., 2760, 1437, 435])\n", + "deriv_tensor (40, 81), indices tensor([1303, 1193, 2367, ..., 2760, 1437, 435])\n", + "deriv_tensor (40, 81), indices tensor([1303, 1193, 2367, ..., 2760, 1437, 435])\n", + "deriv_tensor (40, 81), indices tensor([1303, 1193, 2367, ..., 2760, 1437, 435])\n", + "deriv_tensor (40, 81), indices tensor([3192, 473, 2286, ..., 339, 1650, 1976])\n", + "deriv_tensor (40, 81), indices tensor([3192, 473, 2286, ..., 339, 1650, 1976])\n", + "deriv_tensor (40, 81), indices tensor([3192, 473, 2286, ..., 339, 1650, 1976])\n", + "deriv_tensor (40, 81), indices tensor([3192, 473, 2286, ..., 339, 1650, 1976])\n", + "deriv_tensor (40, 81), indices tensor([2169, 2070, 471, ..., 545, 1639, 221])\n", + "deriv_tensor (40, 81), indices tensor([2169, 2070, 471, ..., 545, 1639, 221])\n", + "deriv_tensor (40, 81), indices tensor([2169, 2070, 471, ..., 545, 1639, 221])\n", + "deriv_tensor (40, 81), indices tensor([2169, 2070, 471, ..., 545, 1639, 221])\n", + "deriv_tensor (40, 81), indices tensor([2458, 1236, 1045, ..., 1992, 2710, 1376])\n", + "deriv_tensor (40, 81), indices tensor([2458, 1236, 1045, ..., 1992, 2710, 1376])\n", + "deriv_tensor (40, 81), indices tensor([2458, 1236, 1045, ..., 1992, 2710, 1376])\n", + "deriv_tensor (40, 81), indices tensor([2458, 1236, 1045, ..., 1992, 2710, 1376])\n", + "deriv_tensor (40, 81), indices tensor([1469, 1350, 2964, ..., 2036, 3038, 1332])\n", + "deriv_tensor (40, 81), indices tensor([1469, 1350, 2964, ..., 2036, 3038, 1332])\n", + "deriv_tensor (40, 81), indices tensor([1469, 1350, 2964, ..., 2036, 3038, 1332])\n", + "deriv_tensor (40, 81), indices tensor([1469, 1350, 2964, ..., 2036, 3038, 1332])\n", + "deriv_tensor (40, 81), indices tensor([ 919, 2845, 1291, ..., 2121, 532, 991])\n", + "deriv_tensor (40, 81), indices tensor([ 919, 2845, 1291, ..., 2121, 532, 991])\n", + "deriv_tensor (40, 81), indices tensor([ 919, 2845, 1291, ..., 2121, 532, 991])\n", + "deriv_tensor (40, 81), indices tensor([ 919, 2845, 1291, ..., 2121, 532, 991])\n", + "deriv_tensor (40, 81), indices tensor([1386, 7, 2687, ..., 2765, 136, 3200])\n", + "deriv_tensor (40, 81), indices tensor([1386, 7, 2687, ..., 2765, 136, 3200])\n", + "deriv_tensor (40, 81), indices tensor([1386, 7, 2687, ..., 2765, 136, 3200])\n", + "deriv_tensor (40, 81), indices tensor([1386, 7, 2687, ..., 2765, 136, 3200])\n", + "deriv_tensor (40, 81), indices tensor([ 459, 2548, 420, ..., 907, 2800, 299])\n", + "deriv_tensor (40, 81), indices tensor([ 459, 2548, 420, ..., 907, 2800, 299])\n", + "deriv_tensor (40, 81), indices tensor([ 459, 2548, 420, ..., 907, 2800, 299])\n", + "deriv_tensor (40, 81), indices tensor([ 459, 2548, 420, ..., 907, 2800, 299])\n", + "deriv_tensor (40, 81), indices tensor([2795, 1216, 2320, ..., 326, 2349, 1247])\n", + "deriv_tensor (40, 81), indices tensor([2795, 1216, 2320, ..., 326, 2349, 1247])\n", + "deriv_tensor (40, 81), indices tensor([2795, 1216, 2320, ..., 326, 2349, 1247])\n", + "deriv_tensor (40, 81), indices tensor([2795, 1216, 2320, ..., 326, 2349, 1247])\n", + "deriv_tensor (40, 81), indices tensor([3198, 3234, 1315, ..., 1119, 1669, 439])\n", + "deriv_tensor (40, 81), indices tensor([3198, 3234, 1315, ..., 1119, 1669, 439])\n", + "deriv_tensor (40, 81), indices tensor([3198, 3234, 1315, ..., 1119, 1669, 439])\n", + "deriv_tensor (40, 81), indices tensor([3198, 3234, 1315, ..., 1119, 1669, 439])\n", + "deriv_tensor (40, 81), indices tensor([2061, 1520, 1747, ..., 3100, 1017, 535])\n", + "deriv_tensor (40, 81), indices tensor([2061, 1520, 1747, ..., 3100, 1017, 535])\n", + "deriv_tensor (40, 81), indices tensor([2061, 1520, 1747, ..., 3100, 1017, 535])\n", + "deriv_tensor (40, 81), indices tensor([2061, 1520, 1747, ..., 3100, 1017, 535])\n", + "deriv_tensor (40, 81), indices tensor([2287, 1956, 1144, ..., 2988, 2783, 45])\n", + "deriv_tensor (40, 81), indices tensor([2287, 1956, 1144, ..., 2988, 2783, 45])\n", + "deriv_tensor (40, 81), indices tensor([2287, 1956, 1144, ..., 2988, 2783, 45])\n", + "deriv_tensor (40, 81), indices tensor([2287, 1956, 1144, ..., 2988, 2783, 45])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1159, 2924, ..., 834, 85, 2265])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1159, 2924, ..., 834, 85, 2265])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1159, 2924, ..., 834, 85, 2265])\n", + "deriv_tensor (40, 81), indices tensor([ 106, 1159, 2924, ..., 834, 85, 2265])\n", + "deriv_tensor (40, 81), indices tensor([2416, 1366, 261, ..., 1799, 716, 1196])\n", + "deriv_tensor (40, 81), indices tensor([2416, 1366, 261, ..., 1799, 716, 1196])\n", + "deriv_tensor (40, 81), indices tensor([2416, 1366, 261, ..., 1799, 716, 1196])\n", + "deriv_tensor (40, 81), indices tensor([2416, 1366, 261, ..., 1799, 716, 1196])\n", + "deriv_tensor (40, 81), indices tensor([2960, 2044, 462, ..., 483, 1254, 318])\n", + "deriv_tensor (40, 81), indices tensor([2960, 2044, 462, ..., 483, 1254, 318])\n", + "deriv_tensor (40, 81), indices tensor([2960, 2044, 462, ..., 483, 1254, 318])\n", + "deriv_tensor (40, 81), indices tensor([2960, 2044, 462, ..., 483, 1254, 318])\n", + "deriv_tensor (40, 81), indices tensor([2299, 420, 80, ..., 50, 1315, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2299, 420, 80, ..., 50, 1315, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2299, 420, 80, ..., 50, 1315, 2922])\n", + "deriv_tensor (40, 81), indices tensor([2299, 420, 80, ..., 50, 1315, 2922])\n", + "deriv_tensor (40, 81), indices tensor([ 768, 2102, 3052, ..., 2483, 2808, 765])\n", + "deriv_tensor (40, 81), indices tensor([ 768, 2102, 3052, ..., 2483, 2808, 765])\n", + "deriv_tensor (40, 81), indices tensor([ 768, 2102, 3052, ..., 2483, 2808, 765])\n", + "deriv_tensor (40, 81), indices tensor([ 768, 2102, 3052, ..., 2483, 2808, 765])\n", + "deriv_tensor (40, 81), indices tensor([2570, 761, 854, ..., 604, 1049, 3044])\n", + "deriv_tensor (40, 81), indices tensor([2570, 761, 854, ..., 604, 1049, 3044])\n", + "deriv_tensor (40, 81), indices tensor([2570, 761, 854, ..., 604, 1049, 3044])\n", + "deriv_tensor (40, 81), indices tensor([2570, 761, 854, ..., 604, 1049, 3044])\n", + "deriv_tensor (40, 81), indices tensor([ 151, 629, 2793, ..., 3104, 3234, 1257])\n", + "deriv_tensor (40, 81), indices tensor([ 151, 629, 2793, ..., 3104, 3234, 1257])\n", + "deriv_tensor (40, 81), indices tensor([ 151, 629, 2793, ..., 3104, 3234, 1257])\n", + "deriv_tensor (40, 81), indices tensor([ 151, 629, 2793, ..., 3104, 3234, 1257])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 1557, 510, ..., 663, 1243, 1648])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 1557, 510, ..., 663, 1243, 1648])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 1557, 510, ..., 663, 1243, 1648])\n", + "deriv_tensor (40, 81), indices tensor([ 522, 1557, 510, ..., 663, 1243, 1648])\n", + "deriv_tensor (40, 81), indices tensor([1281, 921, 2016, ..., 472, 1710, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1281, 921, 2016, ..., 472, 1710, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1281, 921, 2016, ..., 472, 1710, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1281, 921, 2016, ..., 472, 1710, 3001])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 935, 3130, ..., 1248, 1764, 223])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 935, 3130, ..., 1248, 1764, 223])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 935, 3130, ..., 1248, 1764, 223])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 935, 3130, ..., 1248, 1764, 223])\n", + "deriv_tensor (40, 81), indices tensor([1563, 3025, 3070, ..., 2408, 1226, 3201])\n", + "deriv_tensor (40, 81), indices tensor([1563, 3025, 3070, ..., 2408, 1226, 3201])\n", + "deriv_tensor (40, 81), indices tensor([1563, 3025, 3070, ..., 2408, 1226, 3201])\n", + "deriv_tensor (40, 81), indices tensor([1563, 3025, 3070, ..., 2408, 1226, 3201])\n", + "deriv_tensor (40, 81), indices tensor([2192, 669, 689, ..., 2692, 408, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2192, 669, 689, ..., 2692, 408, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2192, 669, 689, ..., 2692, 408, 3148])\n", + "deriv_tensor (40, 81), indices tensor([2192, 669, 689, ..., 2692, 408, 3148])\n", + "deriv_tensor (40, 81), indices tensor([1054, 2278, 3067, ..., 1170, 1712, 2674])\n", + "deriv_tensor (40, 81), indices tensor([1054, 2278, 3067, ..., 1170, 1712, 2674])\n", + "deriv_tensor (40, 81), indices tensor([1054, 2278, 3067, ..., 1170, 1712, 2674])\n", + "deriv_tensor (40, 81), indices tensor([1054, 2278, 3067, ..., 1170, 1712, 2674])\n", + "deriv_tensor (40, 81), indices tensor([1946, 799, 1407, ..., 1458, 1446, 3227])\n", + "deriv_tensor (40, 81), indices tensor([1946, 799, 1407, ..., 1458, 1446, 3227])\n", + "deriv_tensor (40, 81), indices tensor([1946, 799, 1407, ..., 1458, 1446, 3227])\n", + "deriv_tensor (40, 81), indices tensor([1946, 799, 1407, ..., 1458, 1446, 3227])\n", + "deriv_tensor (40, 81), indices tensor([2581, 3099, 1192, ..., 846, 1048, 3015])\n", + "deriv_tensor (40, 81), indices tensor([2581, 3099, 1192, ..., 846, 1048, 3015])\n", + "deriv_tensor (40, 81), indices tensor([2581, 3099, 1192, ..., 846, 1048, 3015])\n", + "deriv_tensor (40, 81), indices tensor([2581, 3099, 1192, ..., 846, 1048, 3015])\n", + "deriv_tensor (40, 81), indices tensor([2091, 2621, 117, ..., 1479, 2473, 161])\n", + "deriv_tensor (40, 81), indices tensor([2091, 2621, 117, ..., 1479, 2473, 161])\n", + "deriv_tensor (40, 81), indices tensor([2091, 2621, 117, ..., 1479, 2473, 161])\n", + "deriv_tensor (40, 81), indices tensor([2091, 2621, 117, ..., 1479, 2473, 161])\n", + "deriv_tensor (40, 81), indices tensor([2919, 278, 426, ..., 1272, 165, 968])\n", + "deriv_tensor (40, 81), indices tensor([2919, 278, 426, ..., 1272, 165, 968])\n", + "deriv_tensor (40, 81), indices tensor([2919, 278, 426, ..., 1272, 165, 968])\n", + "deriv_tensor (40, 81), indices tensor([2919, 278, 426, ..., 1272, 165, 968])\n", + "deriv_tensor (40, 81), indices tensor([ 13, 1993, 226, ..., 238, 886, 3016])\n", + "deriv_tensor (40, 81), indices tensor([ 13, 1993, 226, ..., 238, 886, 3016])\n", + "deriv_tensor (40, 81), indices tensor([ 13, 1993, 226, ..., 238, 886, 3016])\n", + "deriv_tensor (40, 81), indices tensor([ 13, 1993, 226, ..., 238, 886, 3016])\n", + "deriv_tensor (40, 81), indices tensor([2973, 3133, 1481, ..., 10, 3028, 439])\n", + "deriv_tensor (40, 81), indices tensor([2973, 3133, 1481, ..., 10, 3028, 439])\n", + "deriv_tensor (40, 81), indices tensor([2973, 3133, 1481, ..., 10, 3028, 439])\n", + "deriv_tensor (40, 81), indices tensor([2973, 3133, 1481, ..., 10, 3028, 439])\n", + "deriv_tensor (40, 81), indices tensor([3060, 252, 2568, ..., 2255, 3233, 2604])\n", + "deriv_tensor (40, 81), indices tensor([3060, 252, 2568, ..., 2255, 3233, 2604])\n", + "deriv_tensor (40, 81), indices tensor([3060, 252, 2568, ..., 2255, 3233, 2604])\n", + "deriv_tensor (40, 81), indices tensor([3060, 252, 2568, ..., 2255, 3233, 2604])\n", + "deriv_tensor (40, 81), indices tensor([2608, 1560, 2222, ..., 804, 2965, 219])\n", + "deriv_tensor (40, 81), indices tensor([2608, 1560, 2222, ..., 804, 2965, 219])\n", + "deriv_tensor (40, 81), indices tensor([2608, 1560, 2222, ..., 804, 2965, 219])\n", + "deriv_tensor (40, 81), indices tensor([2608, 1560, 2222, ..., 804, 2965, 219])\n", + "deriv_tensor (40, 81), indices tensor([2467, 1343, 2553, ..., 2870, 516, 3083])\n", + "deriv_tensor (40, 81), indices tensor([2467, 1343, 2553, ..., 2870, 516, 3083])\n", + "deriv_tensor (40, 81), indices tensor([2467, 1343, 2553, ..., 2870, 516, 3083])\n", + "deriv_tensor (40, 81), indices tensor([2467, 1343, 2553, ..., 2870, 516, 3083])\n", + "deriv_tensor (40, 81), indices tensor([ 988, 3053, 1313, ..., 1662, 410, 1249])\n", + "deriv_tensor (40, 81), indices tensor([ 988, 3053, 1313, ..., 1662, 410, 1249])\n", + "deriv_tensor (40, 81), indices tensor([ 988, 3053, 1313, ..., 1662, 410, 1249])\n", + "deriv_tensor (40, 81), indices tensor([ 988, 3053, 1313, ..., 1662, 410, 1249])\n", + "deriv_tensor (40, 81), indices tensor([ 823, 2308, 1142, ..., 2676, 1021, 305])\n", + "deriv_tensor (40, 81), indices tensor([ 823, 2308, 1142, ..., 2676, 1021, 305])\n", + "deriv_tensor (40, 81), indices tensor([ 823, 2308, 1142, ..., 2676, 1021, 305])\n", + "deriv_tensor (40, 81), indices tensor([ 823, 2308, 1142, ..., 2676, 1021, 305])\n", + "deriv_tensor (40, 81), indices tensor([1648, 1580, 818, ..., 763, 828, 2438])\n", + "deriv_tensor (40, 81), indices tensor([1648, 1580, 818, ..., 763, 828, 2438])\n", + "deriv_tensor (40, 81), indices tensor([1648, 1580, 818, ..., 763, 828, 2438])\n", + "deriv_tensor (40, 81), indices tensor([1648, 1580, 818, ..., 763, 828, 2438])\n", + "deriv_tensor (40, 81), indices tensor([2249, 3100, 1489, ..., 2561, 197, 436])\n", + "deriv_tensor (40, 81), indices tensor([2249, 3100, 1489, ..., 2561, 197, 436])\n", + "deriv_tensor (40, 81), indices tensor([2249, 3100, 1489, ..., 2561, 197, 436])\n", + "deriv_tensor (40, 81), indices tensor([2249, 3100, 1489, ..., 2561, 197, 436])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 3034, 1066, ..., 2419, 2282, 2632])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 3034, 1066, ..., 2419, 2282, 2632])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 3034, 1066, ..., 2419, 2282, 2632])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 3034, 1066, ..., 2419, 2282, 2632])\n", + "deriv_tensor (40, 81), indices tensor([1928, 935, 968, ..., 755, 292, 1945])\n", + "deriv_tensor (40, 81), indices tensor([1928, 935, 968, ..., 755, 292, 1945])\n", + "deriv_tensor (40, 81), indices tensor([1928, 935, 968, ..., 755, 292, 1945])\n", + "deriv_tensor (40, 81), indices tensor([1928, 935, 968, ..., 755, 292, 1945])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 667, 609, ..., 1069, 910, 2035])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 667, 609, ..., 1069, 910, 2035])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 667, 609, ..., 1069, 910, 2035])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 667, 609, ..., 1069, 910, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1441, 3169, 229, ..., 2639, 1331, 3043])\n", + "deriv_tensor (40, 81), indices tensor([1441, 3169, 229, ..., 2639, 1331, 3043])\n", + "deriv_tensor (40, 81), indices tensor([1441, 3169, 229, ..., 2639, 1331, 3043])\n", + "deriv_tensor (40, 81), indices tensor([1441, 3169, 229, ..., 2639, 1331, 3043])\n", + "deriv_tensor (40, 81), indices tensor([ 631, 2687, 1481, ..., 165, 1516, 489])\n", + "deriv_tensor (40, 81), indices tensor([ 631, 2687, 1481, ..., 165, 1516, 489])\n", + "deriv_tensor (40, 81), indices tensor([ 631, 2687, 1481, ..., 165, 1516, 489])\n", + "deriv_tensor (40, 81), indices tensor([ 631, 2687, 1481, ..., 165, 1516, 489])\n", + "deriv_tensor (40, 81), indices tensor([1210, 808, 2043, ..., 2458, 2563, 1228])\n", + "deriv_tensor (40, 81), indices tensor([1210, 808, 2043, ..., 2458, 2563, 1228])\n", + "deriv_tensor (40, 81), indices tensor([1210, 808, 2043, ..., 2458, 2563, 1228])\n", + "deriv_tensor (40, 81), indices tensor([1210, 808, 2043, ..., 2458, 2563, 1228])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 312, 1467, ..., 2814, 781, 2968])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 312, 1467, ..., 2814, 781, 2968])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 312, 1467, ..., 2814, 781, 2968])\n", + "deriv_tensor (40, 81), indices tensor([ 113, 312, 1467, ..., 2814, 781, 2968])\n", + "deriv_tensor (40, 81), indices tensor([1997, 916, 1278, ..., 2524, 1424, 1131])\n", + "deriv_tensor (40, 81), indices tensor([1997, 916, 1278, ..., 2524, 1424, 1131])\n", + "deriv_tensor (40, 81), indices tensor([1997, 916, 1278, ..., 2524, 1424, 1131])\n", + "deriv_tensor (40, 81), indices tensor([1997, 916, 1278, ..., 2524, 1424, 1131])\n", + "deriv_tensor (40, 81), indices tensor([2094, 257, 2637, ..., 12, 2265, 2823])\n", + "deriv_tensor (40, 81), indices tensor([2094, 257, 2637, ..., 12, 2265, 2823])\n", + "deriv_tensor (40, 81), indices tensor([2094, 257, 2637, ..., 12, 2265, 2823])\n", + "deriv_tensor (40, 81), indices tensor([2094, 257, 2637, ..., 12, 2265, 2823])\n", + "deriv_tensor (40, 81), indices tensor([1596, 1644, 2950, ..., 3188, 659, 1189])\n", + "deriv_tensor (40, 81), indices tensor([1596, 1644, 2950, ..., 3188, 659, 1189])\n", + "deriv_tensor (40, 81), indices tensor([1596, 1644, 2950, ..., 3188, 659, 1189])\n", + "deriv_tensor (40, 81), indices tensor([1596, 1644, 2950, ..., 3188, 659, 1189])\n", + "deriv_tensor (40, 81), indices tensor([ 597, 1105, 422, ..., 594, 1682, 286])\n", + "deriv_tensor (40, 81), indices tensor([ 597, 1105, 422, ..., 594, 1682, 286])\n", + "deriv_tensor (40, 81), indices tensor([ 597, 1105, 422, ..., 594, 1682, 286])\n", + "deriv_tensor (40, 81), indices tensor([ 597, 1105, 422, ..., 594, 1682, 286])\n", + "deriv_tensor (40, 81), indices tensor([2843, 1087, 2771, ..., 1325, 238, 769])\n", + "deriv_tensor (40, 81), indices tensor([2843, 1087, 2771, ..., 1325, 238, 769])\n", + "deriv_tensor (40, 81), indices tensor([2843, 1087, 2771, ..., 1325, 238, 769])\n", + "deriv_tensor (40, 81), indices tensor([2843, 1087, 2771, ..., 1325, 238, 769])\n", + "deriv_tensor (40, 81), indices tensor([1949, 1661, 1270, ..., 2042, 1654, 325])\n", + "deriv_tensor (40, 81), indices tensor([1949, 1661, 1270, ..., 2042, 1654, 325])\n", + "deriv_tensor (40, 81), indices tensor([1949, 1661, 1270, ..., 2042, 1654, 325])\n", + "deriv_tensor (40, 81), indices tensor([1949, 1661, 1270, ..., 2042, 1654, 325])\n", + "deriv_tensor (40, 81), indices tensor([2483, 1258, 2093, ..., 1568, 1474, 713])\n", + "deriv_tensor (40, 81), indices tensor([2483, 1258, 2093, ..., 1568, 1474, 713])\n", + "deriv_tensor (40, 81), indices tensor([2483, 1258, 2093, ..., 1568, 1474, 713])\n", + "deriv_tensor (40, 81), indices tensor([2483, 1258, 2093, ..., 1568, 1474, 713])\n", + "deriv_tensor (40, 81), indices tensor([1391, 1879, 3001, ..., 1612, 416, 38])\n", + "deriv_tensor (40, 81), indices tensor([1391, 1879, 3001, ..., 1612, 416, 38])\n", + "deriv_tensor (40, 81), indices tensor([1391, 1879, 3001, ..., 1612, 416, 38])\n", + "deriv_tensor (40, 81), indices tensor([1391, 1879, 3001, ..., 1612, 416, 38])\n", + "deriv_tensor (40, 81), indices tensor([2026, 578, 26, ..., 2290, 1625, 2962])\n", + "deriv_tensor (40, 81), indices tensor([2026, 578, 26, ..., 2290, 1625, 2962])\n", + "deriv_tensor (40, 81), indices tensor([2026, 578, 26, ..., 2290, 1625, 2962])\n", + "deriv_tensor (40, 81), indices tensor([2026, 578, 26, ..., 2290, 1625, 2962])\n", + "deriv_tensor (40, 81), indices tensor([3045, 1985, 1444, ..., 2782, 33, 523])\n", + "deriv_tensor (40, 81), indices tensor([3045, 1985, 1444, ..., 2782, 33, 523])\n", + "deriv_tensor (40, 81), indices tensor([3045, 1985, 1444, ..., 2782, 33, 523])\n", + "deriv_tensor (40, 81), indices tensor([3045, 1985, 1444, ..., 2782, 33, 523])\n", + "deriv_tensor (40, 81), indices tensor([3170, 1474, 213, ..., 48, 1201, 1097])\n", + "deriv_tensor (40, 81), indices tensor([3170, 1474, 213, ..., 48, 1201, 1097])\n", + "deriv_tensor (40, 81), indices tensor([3170, 1474, 213, ..., 48, 1201, 1097])\n", + "deriv_tensor (40, 81), indices tensor([3170, 1474, 213, ..., 48, 1201, 1097])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1681, 3177, ..., 2434, 1439, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1681, 3177, ..., 2434, 1439, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1681, 3177, ..., 2434, 1439, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1868, 1681, 3177, ..., 2434, 1439, 2510])\n", + "deriv_tensor (40, 81), indices tensor([2653, 2367, 350, ..., 1787, 206, 1741])\n", + "deriv_tensor (40, 81), indices tensor([2653, 2367, 350, ..., 1787, 206, 1741])\n", + "deriv_tensor (40, 81), indices tensor([2653, 2367, 350, ..., 1787, 206, 1741])\n", + "deriv_tensor (40, 81), indices tensor([2653, 2367, 350, ..., 1787, 206, 1741])\n", + "deriv_tensor (40, 81), indices tensor([1999, 1236, 309, ..., 2078, 1618, 1474])\n", + "deriv_tensor (40, 81), indices tensor([1999, 1236, 309, ..., 2078, 1618, 1474])\n", + "deriv_tensor (40, 81), indices tensor([1999, 1236, 309, ..., 2078, 1618, 1474])\n", + "deriv_tensor (40, 81), indices tensor([1999, 1236, 309, ..., 2078, 1618, 1474])\n", + "deriv_tensor (40, 81), indices tensor([ 667, 1665, 1371, ..., 760, 718, 941])\n", + "deriv_tensor (40, 81), indices tensor([ 667, 1665, 1371, ..., 760, 718, 941])\n", + "deriv_tensor (40, 81), indices tensor([ 667, 1665, 1371, ..., 760, 718, 941])\n", + "deriv_tensor (40, 81), indices tensor([ 667, 1665, 1371, ..., 760, 718, 941])\n", + "deriv_tensor (40, 81), indices tensor([2545, 524, 2319, ..., 2280, 398, 2622])\n", + "deriv_tensor (40, 81), indices tensor([2545, 524, 2319, ..., 2280, 398, 2622])\n", + "deriv_tensor (40, 81), indices tensor([2545, 524, 2319, ..., 2280, 398, 2622])\n", + "deriv_tensor (40, 81), indices tensor([2545, 524, 2319, ..., 2280, 398, 2622])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1670, 175, ..., 2419, 1408, 342])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1670, 175, ..., 2419, 1408, 342])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1670, 175, ..., 2419, 1408, 342])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1670, 175, ..., 2419, 1408, 342])\n", + "deriv_tensor (40, 81), indices tensor([2598, 3217, 1865, ..., 2015, 1759, 2493])\n", + "deriv_tensor (40, 81), indices tensor([2598, 3217, 1865, ..., 2015, 1759, 2493])\n", + "deriv_tensor (40, 81), indices tensor([2598, 3217, 1865, ..., 2015, 1759, 2493])\n", + "deriv_tensor (40, 81), indices tensor([2598, 3217, 1865, ..., 2015, 1759, 2493])\n", + "deriv_tensor (40, 81), indices tensor([2436, 290, 306, ..., 2521, 992, 2523])\n", + "deriv_tensor (40, 81), indices tensor([2436, 290, 306, ..., 2521, 992, 2523])\n", + "deriv_tensor (40, 81), indices tensor([2436, 290, 306, ..., 2521, 992, 2523])\n", + "deriv_tensor (40, 81), indices tensor([2436, 290, 306, ..., 2521, 992, 2523])\n", + "deriv_tensor (40, 81), indices tensor([2576, 1307, 1659, ..., 2305, 2254, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2576, 1307, 1659, ..., 2305, 2254, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2576, 1307, 1659, ..., 2305, 2254, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2576, 1307, 1659, ..., 2305, 2254, 1275])\n", + "deriv_tensor (40, 81), indices tensor([2745, 210, 2752, ..., 2368, 511, 2308])\n", + "deriv_tensor (40, 81), indices tensor([2745, 210, 2752, ..., 2368, 511, 2308])\n", + "deriv_tensor (40, 81), indices tensor([2745, 210, 2752, ..., 2368, 511, 2308])\n", + "deriv_tensor (40, 81), indices tensor([2745, 210, 2752, ..., 2368, 511, 2308])\n", + "deriv_tensor (40, 81), indices tensor([ 981, 653, 1794, ..., 3130, 412, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 981, 653, 1794, ..., 3130, 412, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 981, 653, 1794, ..., 3130, 412, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 981, 653, 1794, ..., 3130, 412, 1225])\n", + "deriv_tensor (40, 81), indices tensor([1393, 2881, 554, ..., 2390, 2710, 1166])\n", + "deriv_tensor (40, 81), indices tensor([1393, 2881, 554, ..., 2390, 2710, 1166])\n", + "deriv_tensor (40, 81), indices tensor([1393, 2881, 554, ..., 2390, 2710, 1166])\n", + "deriv_tensor (40, 81), indices tensor([1393, 2881, 554, ..., 2390, 2710, 1166])\n", + "deriv_tensor (40, 81), indices tensor([2662, 1684, 350, ..., 2941, 510, 527])\n", + "deriv_tensor (40, 81), indices tensor([2662, 1684, 350, ..., 2941, 510, 527])\n", + "deriv_tensor (40, 81), indices tensor([2662, 1684, 350, ..., 2941, 510, 527])\n", + "deriv_tensor (40, 81), indices tensor([2662, 1684, 350, ..., 2941, 510, 527])\n", + "deriv_tensor (40, 81), indices tensor([1518, 2424, 956, ..., 2878, 146, 603])\n", + "deriv_tensor (40, 81), indices tensor([1518, 2424, 956, ..., 2878, 146, 603])\n", + "deriv_tensor (40, 81), indices tensor([1518, 2424, 956, ..., 2878, 146, 603])\n", + "deriv_tensor (40, 81), indices tensor([1518, 2424, 956, ..., 2878, 146, 603])\n", + "deriv_tensor (40, 81), indices tensor([3030, 677, 1814, ..., 1496, 731, 2220])\n", + "deriv_tensor (40, 81), indices tensor([3030, 677, 1814, ..., 1496, 731, 2220])\n", + "deriv_tensor (40, 81), indices tensor([3030, 677, 1814, ..., 1496, 731, 2220])\n", + "deriv_tensor (40, 81), indices tensor([3030, 677, 1814, ..., 1496, 731, 2220])\n", + "deriv_tensor (40, 81), indices tensor([2845, 2009, 2815, ..., 1747, 2657, 285])\n", + "deriv_tensor (40, 81), indices tensor([2845, 2009, 2815, ..., 1747, 2657, 285])\n", + "deriv_tensor (40, 81), indices tensor([2845, 2009, 2815, ..., 1747, 2657, 285])\n", + "deriv_tensor (40, 81), indices tensor([2845, 2009, 2815, ..., 1747, 2657, 285])\n", + "deriv_tensor (40, 81), indices tensor([2228, 2157, 2161, ..., 893, 413, 61])\n", + "deriv_tensor (40, 81), indices tensor([2228, 2157, 2161, ..., 893, 413, 61])\n", + "deriv_tensor (40, 81), indices tensor([2228, 2157, 2161, ..., 893, 413, 61])\n", + "deriv_tensor (40, 81), indices tensor([2228, 2157, 2161, ..., 893, 413, 61])\n", + "deriv_tensor (40, 81), indices tensor([ 99, 1819, 1390, ..., 493, 683, 2302])\n", + "deriv_tensor (40, 81), indices tensor([ 99, 1819, 1390, ..., 493, 683, 2302])\n", + "deriv_tensor (40, 81), indices tensor([ 99, 1819, 1390, ..., 493, 683, 2302])\n", + "deriv_tensor (40, 81), indices tensor([ 99, 1819, 1390, ..., 493, 683, 2302])\n", + "deriv_tensor (40, 81), indices tensor([ 540, 915, 133, ..., 2423, 2594, 2385])\n", + "deriv_tensor (40, 81), indices tensor([ 540, 915, 133, ..., 2423, 2594, 2385])\n", + "deriv_tensor (40, 81), indices tensor([ 540, 915, 133, ..., 2423, 2594, 2385])\n", + "deriv_tensor (40, 81), indices tensor([ 540, 915, 133, ..., 2423, 2594, 2385])\n", + "deriv_tensor (40, 81), indices tensor([2192, 182, 1484, ..., 1227, 3123, 303])\n", + "deriv_tensor (40, 81), indices tensor([2192, 182, 1484, ..., 1227, 3123, 303])\n", + "deriv_tensor (40, 81), indices tensor([2192, 182, 1484, ..., 1227, 3123, 303])\n", + "deriv_tensor (40, 81), indices tensor([2192, 182, 1484, ..., 1227, 3123, 303])\n", + "deriv_tensor (40, 81), indices tensor([1799, 739, 2594, ..., 985, 920, 1199])\n", + "deriv_tensor (40, 81), indices tensor([1799, 739, 2594, ..., 985, 920, 1199])\n", + "deriv_tensor (40, 81), indices tensor([1799, 739, 2594, ..., 985, 920, 1199])\n", + "deriv_tensor (40, 81), indices tensor([1799, 739, 2594, ..., 985, 920, 1199])\n", + "deriv_tensor (40, 81), indices tensor([ 704, 2086, 1797, ..., 1480, 442, 994])\n", + "deriv_tensor (40, 81), indices tensor([ 704, 2086, 1797, ..., 1480, 442, 994])\n", + "deriv_tensor (40, 81), indices tensor([ 704, 2086, 1797, ..., 1480, 442, 994])\n", + "deriv_tensor (40, 81), indices tensor([ 704, 2086, 1797, ..., 1480, 442, 994])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2739, 1966, ..., 1376, 221, 3167])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2739, 1966, ..., 1376, 221, 3167])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2739, 1966, ..., 1376, 221, 3167])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2739, 1966, ..., 1376, 221, 3167])\n", + "deriv_tensor (40, 81), indices tensor([ 492, 241, 2510, ..., 2560, 1060, 47])\n", + "deriv_tensor (40, 81), indices tensor([ 492, 241, 2510, ..., 2560, 1060, 47])\n", + "deriv_tensor (40, 81), indices tensor([ 492, 241, 2510, ..., 2560, 1060, 47])\n", + "deriv_tensor (40, 81), indices tensor([ 492, 241, 2510, ..., 2560, 1060, 47])\n", + "deriv_tensor (40, 81), indices tensor([1570, 2460, 769, ..., 450, 2709, 1040])\n", + "deriv_tensor (40, 81), indices tensor([1570, 2460, 769, ..., 450, 2709, 1040])\n", + "deriv_tensor (40, 81), indices tensor([1570, 2460, 769, ..., 450, 2709, 1040])\n", + "deriv_tensor (40, 81), indices tensor([1570, 2460, 769, ..., 450, 2709, 1040])\n", + "deriv_tensor (40, 81), indices tensor([ 531, 644, 2804, ..., 2647, 1927, 273])\n", + "deriv_tensor (40, 81), indices tensor([ 531, 644, 2804, ..., 2647, 1927, 273])\n", + "deriv_tensor (40, 81), indices tensor([ 531, 644, 2804, ..., 2647, 1927, 273])\n", + "deriv_tensor (40, 81), indices tensor([ 531, 644, 2804, ..., 2647, 1927, 273])\n", + "deriv_tensor (40, 81), indices tensor([3069, 294, 3170, ..., 2760, 34, 2077])\n", + "deriv_tensor (40, 81), indices tensor([3069, 294, 3170, ..., 2760, 34, 2077])\n", + "deriv_tensor (40, 81), indices tensor([3069, 294, 3170, ..., 2760, 34, 2077])\n", + "deriv_tensor (40, 81), indices tensor([3069, 294, 3170, ..., 2760, 34, 2077])\n", + "deriv_tensor (40, 81), indices tensor([ 605, 1930, 3107, ..., 1588, 2285, 1374])\n", + "deriv_tensor (40, 81), indices tensor([ 605, 1930, 3107, ..., 1588, 2285, 1374])\n", + "deriv_tensor (40, 81), indices tensor([ 605, 1930, 3107, ..., 1588, 2285, 1374])\n", + "deriv_tensor (40, 81), indices tensor([ 605, 1930, 3107, ..., 1588, 2285, 1374])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 30, 1694, ..., 2370, 1919, 602])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 30, 1694, ..., 2370, 1919, 602])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 30, 1694, ..., 2370, 1919, 602])\n", + "deriv_tensor (40, 81), indices tensor([ 757, 30, 1694, ..., 2370, 1919, 602])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1451, 60, ..., 2149, 2697, 2923])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1451, 60, ..., 2149, 2697, 2923])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1451, 60, ..., 2149, 2697, 2923])\n", + "deriv_tensor (40, 81), indices tensor([2525, 1451, 60, ..., 2149, 2697, 2923])\n", + "deriv_tensor (40, 81), indices tensor([ 977, 621, 482, ..., 2903, 142, 1027])\n", + "deriv_tensor (40, 81), indices tensor([ 977, 621, 482, ..., 2903, 142, 1027])\n", + "deriv_tensor (40, 81), indices tensor([ 977, 621, 482, ..., 2903, 142, 1027])\n", + "deriv_tensor (40, 81), indices tensor([ 977, 621, 482, ..., 2903, 142, 1027])\n", + "deriv_tensor (40, 81), indices tensor([1367, 2416, 1714, ..., 2186, 784, 2880])\n", + "deriv_tensor (40, 81), indices tensor([1367, 2416, 1714, ..., 2186, 784, 2880])\n", + "deriv_tensor (40, 81), indices tensor([1367, 2416, 1714, ..., 2186, 784, 2880])\n", + "deriv_tensor (40, 81), indices tensor([1367, 2416, 1714, ..., 2186, 784, 2880])\n", + "deriv_tensor (40, 81), indices tensor([2193, 1948, 222, ..., 2469, 2622, 2657])\n", + "deriv_tensor (40, 81), indices tensor([2193, 1948, 222, ..., 2469, 2622, 2657])\n", + "deriv_tensor (40, 81), indices tensor([2193, 1948, 222, ..., 2469, 2622, 2657])\n", + "deriv_tensor (40, 81), indices tensor([2193, 1948, 222, ..., 2469, 2622, 2657])\n", + "deriv_tensor (40, 81), indices tensor([2810, 678, 1497, ..., 2728, 2850, 1280])\n", + "deriv_tensor (40, 81), indices tensor([2810, 678, 1497, ..., 2728, 2850, 1280])\n", + "deriv_tensor (40, 81), indices tensor([2810, 678, 1497, ..., 2728, 2850, 1280])\n", + "deriv_tensor (40, 81), indices tensor([2810, 678, 1497, ..., 2728, 2850, 1280])\n", + "deriv_tensor (40, 81), indices tensor([1612, 1303, 1166, ..., 2968, 38, 2064])\n", + "deriv_tensor (40, 81), indices tensor([1612, 1303, 1166, ..., 2968, 38, 2064])\n", + "deriv_tensor (40, 81), indices tensor([1612, 1303, 1166, ..., 2968, 38, 2064])\n", + "deriv_tensor (40, 81), indices tensor([1612, 1303, 1166, ..., 2968, 38, 2064])\n", + "deriv_tensor (40, 81), indices tensor([2983, 630, 2829, ..., 1725, 1071, 225])\n", + "deriv_tensor (40, 81), indices tensor([2983, 630, 2829, ..., 1725, 1071, 225])\n", + "deriv_tensor (40, 81), indices tensor([2983, 630, 2829, ..., 1725, 1071, 225])\n", + "deriv_tensor (40, 81), indices tensor([2983, 630, 2829, ..., 1725, 1071, 225])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 1239, 1354, ..., 93, 1094, 1658])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 1239, 1354, ..., 93, 1094, 1658])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 1239, 1354, ..., 93, 1094, 1658])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 1239, 1354, ..., 93, 1094, 1658])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 1972, 120, ..., 2030, 328, 709])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 1972, 120, ..., 2030, 328, 709])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 1972, 120, ..., 2030, 328, 709])\n", + "deriv_tensor (40, 81), indices tensor([ 470, 1972, 120, ..., 2030, 328, 709])\n", + "deriv_tensor (40, 81), indices tensor([ 905, 1554, 397, ..., 406, 252, 2189])\n", + "deriv_tensor (40, 81), indices tensor([ 905, 1554, 397, ..., 406, 252, 2189])\n", + "deriv_tensor (40, 81), indices tensor([ 905, 1554, 397, ..., 406, 252, 2189])\n", + "deriv_tensor (40, 81), indices tensor([ 905, 1554, 397, ..., 406, 252, 2189])\n", + "deriv_tensor (40, 81), indices tensor([2283, 3200, 581, ..., 2447, 1809, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2283, 3200, 581, ..., 2447, 1809, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2283, 3200, 581, ..., 2447, 1809, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2283, 3200, 581, ..., 2447, 1809, 2424])\n", + "deriv_tensor (40, 81), indices tensor([2070, 564, 215, ..., 660, 159, 1964])\n", + "deriv_tensor (40, 81), indices tensor([2070, 564, 215, ..., 660, 159, 1964])\n", + "deriv_tensor (40, 81), indices tensor([2070, 564, 215, ..., 660, 159, 1964])\n", + "deriv_tensor (40, 81), indices tensor([2070, 564, 215, ..., 660, 159, 1964])\n", + "deriv_tensor (40, 81), indices tensor([2568, 2059, 1229, ..., 2003, 2181, 1093])\n", + "deriv_tensor (40, 81), indices tensor([2568, 2059, 1229, ..., 2003, 2181, 1093])\n", + "deriv_tensor (40, 81), indices tensor([2568, 2059, 1229, ..., 2003, 2181, 1093])\n", + "deriv_tensor (40, 81), indices tensor([2568, 2059, 1229, ..., 2003, 2181, 1093])\n", + "deriv_tensor (40, 81), indices tensor([ 128, 49, 781, ..., 1550, 3055, 454])\n", + "deriv_tensor (40, 81), indices tensor([ 128, 49, 781, ..., 1550, 3055, 454])\n", + "deriv_tensor (40, 81), indices tensor([ 128, 49, 781, ..., 1550, 3055, 454])\n", + "deriv_tensor (40, 81), indices tensor([ 128, 49, 781, ..., 1550, 3055, 454])\n", + "deriv_tensor (40, 81), indices tensor([ 931, 250, 131, ..., 463, 1574, 586])\n", + "deriv_tensor (40, 81), indices tensor([ 931, 250, 131, ..., 463, 1574, 586])\n", + "deriv_tensor (40, 81), indices tensor([ 931, 250, 131, ..., 463, 1574, 586])\n", + "deriv_tensor (40, 81), indices tensor([ 931, 250, 131, ..., 463, 1574, 586])\n", + "deriv_tensor (40, 81), indices tensor([2644, 471, 1895, ..., 136, 2892, 563])\n", + "deriv_tensor (40, 81), indices tensor([2644, 471, 1895, ..., 136, 2892, 563])\n", + "deriv_tensor (40, 81), indices tensor([2644, 471, 1895, ..., 136, 2892, 563])\n", + "deriv_tensor (40, 81), indices tensor([2644, 471, 1895, ..., 136, 2892, 563])\n", + "deriv_tensor (40, 81), indices tensor([1084, 2608, 3129, ..., 2941, 1472, 2290])\n", + "deriv_tensor (40, 81), indices tensor([1084, 2608, 3129, ..., 2941, 1472, 2290])\n", + "deriv_tensor (40, 81), indices tensor([1084, 2608, 3129, ..., 2941, 1472, 2290])\n", + "deriv_tensor (40, 81), indices tensor([1084, 2608, 3129, ..., 2941, 1472, 2290])\n", + "deriv_tensor (40, 81), indices tensor([1595, 731, 1068, ..., 31, 1605, 721])\n", + "deriv_tensor (40, 81), indices tensor([1595, 731, 1068, ..., 31, 1605, 721])\n", + "deriv_tensor (40, 81), indices tensor([1595, 731, 1068, ..., 31, 1605, 721])\n", + "deriv_tensor (40, 81), indices tensor([1595, 731, 1068, ..., 31, 1605, 721])\n", + "deriv_tensor (40, 81), indices tensor([1982, 1936, 2608, ..., 436, 2413, 2163])\n", + "deriv_tensor (40, 81), indices tensor([1982, 1936, 2608, ..., 436, 2413, 2163])\n", + "deriv_tensor (40, 81), indices tensor([1982, 1936, 2608, ..., 436, 2413, 2163])\n", + "deriv_tensor (40, 81), indices tensor([1982, 1936, 2608, ..., 436, 2413, 2163])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 220, 722, ..., 1583, 2457, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 220, 722, ..., 1583, 2457, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 220, 722, ..., 1583, 2457, 1795])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 220, 722, ..., 1583, 2457, 1795])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2156, 16, ..., 56, 2643, 777])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2156, 16, ..., 56, 2643, 777])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2156, 16, ..., 56, 2643, 777])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2156, 16, ..., 56, 2643, 777])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1296, 2244, ..., 2704, 2414, 986])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1296, 2244, ..., 2704, 2414, 986])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1296, 2244, ..., 2704, 2414, 986])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1296, 2244, ..., 2704, 2414, 986])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 2039, 2700, ..., 1969, 91, 2037])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 2039, 2700, ..., 1969, 91, 2037])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 2039, 2700, ..., 1969, 91, 2037])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 2039, 2700, ..., 1969, 91, 2037])\n", + "deriv_tensor (40, 81), indices tensor([3152, 2776, 1311, ..., 1111, 132, 225])\n", + "deriv_tensor (40, 81), indices tensor([3152, 2776, 1311, ..., 1111, 132, 225])\n", + "deriv_tensor (40, 81), indices tensor([3152, 2776, 1311, ..., 1111, 132, 225])\n", + "deriv_tensor (40, 81), indices tensor([3152, 2776, 1311, ..., 1111, 132, 225])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 1005, 528, ..., 1110, 561, 2183])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 1005, 528, ..., 1110, 561, 2183])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 1005, 528, ..., 1110, 561, 2183])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 1005, 528, ..., 1110, 561, 2183])\n", + "deriv_tensor (40, 81), indices tensor([1319, 2324, 2659, ..., 295, 601, 2367])\n", + "deriv_tensor (40, 81), indices tensor([1319, 2324, 2659, ..., 295, 601, 2367])\n", + "deriv_tensor (40, 81), indices tensor([1319, 2324, 2659, ..., 295, 601, 2367])\n", + "deriv_tensor (40, 81), indices tensor([1319, 2324, 2659, ..., 295, 601, 2367])\n", + "deriv_tensor (40, 81), indices tensor([2951, 631, 577, ..., 300, 3129, 985])\n", + "deriv_tensor (40, 81), indices tensor([2951, 631, 577, ..., 300, 3129, 985])\n", + "deriv_tensor (40, 81), indices tensor([2951, 631, 577, ..., 300, 3129, 985])\n", + "deriv_tensor (40, 81), indices tensor([2951, 631, 577, ..., 300, 3129, 985])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2419, 947, ..., 2723, 3206, 473])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2419, 947, ..., 2723, 3206, 473])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2419, 947, ..., 2723, 3206, 473])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2419, 947, ..., 2723, 3206, 473])\n", + "deriv_tensor (40, 81), indices tensor([3191, 814, 348, ..., 2078, 974, 30])\n", + "deriv_tensor (40, 81), indices tensor([3191, 814, 348, ..., 2078, 974, 30])\n", + "deriv_tensor (40, 81), indices tensor([3191, 814, 348, ..., 2078, 974, 30])\n", + "deriv_tensor (40, 81), indices tensor([3191, 814, 348, ..., 2078, 974, 30])\n", + "deriv_tensor (40, 81), indices tensor([ 204, 2842, 2459, ..., 1413, 2706, 479])\n", + "deriv_tensor (40, 81), indices tensor([ 204, 2842, 2459, ..., 1413, 2706, 479])\n", + "deriv_tensor (40, 81), indices tensor([ 204, 2842, 2459, ..., 1413, 2706, 479])\n", + "deriv_tensor (40, 81), indices tensor([ 204, 2842, 2459, ..., 1413, 2706, 479])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1857, 1039, ..., 41, 1881, 3028])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1857, 1039, ..., 41, 1881, 3028])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1857, 1039, ..., 41, 1881, 3028])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1857, 1039, ..., 41, 1881, 3028])\n", + "deriv_tensor (40, 81), indices tensor([2924, 952, 356, ..., 394, 872, 3110])\n", + "deriv_tensor (40, 81), indices tensor([2924, 952, 356, ..., 394, 872, 3110])\n", + "deriv_tensor (40, 81), indices tensor([2924, 952, 356, ..., 394, 872, 3110])\n", + "deriv_tensor (40, 81), indices tensor([2924, 952, 356, ..., 394, 872, 3110])\n", + "deriv_tensor (40, 81), indices tensor([1687, 2733, 322, ..., 1291, 1001, 2153])\n", + "deriv_tensor (40, 81), indices tensor([1687, 2733, 322, ..., 1291, 1001, 2153])\n", + "deriv_tensor (40, 81), indices tensor([1687, 2733, 322, ..., 1291, 1001, 2153])\n", + "deriv_tensor (40, 81), indices tensor([1687, 2733, 322, ..., 1291, 1001, 2153])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1212, 2125, ..., 693, 491, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1212, 2125, ..., 693, 491, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1212, 2125, ..., 693, 491, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1212, 2125, ..., 693, 491, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1304, 936, 2179, ..., 995, 2130, 853])\n", + "deriv_tensor (40, 81), indices tensor([1304, 936, 2179, ..., 995, 2130, 853])\n", + "deriv_tensor (40, 81), indices tensor([1304, 936, 2179, ..., 995, 2130, 853])\n", + "deriv_tensor (40, 81), indices tensor([1304, 936, 2179, ..., 995, 2130, 853])\n", + "deriv_tensor (40, 81), indices tensor([2468, 472, 685, ..., 7, 3116, 3173])\n", + "deriv_tensor (40, 81), indices tensor([2468, 472, 685, ..., 7, 3116, 3173])\n", + "deriv_tensor (40, 81), indices tensor([2468, 472, 685, ..., 7, 3116, 3173])\n", + "deriv_tensor (40, 81), indices tensor([2468, 472, 685, ..., 7, 3116, 3173])\n", + "deriv_tensor (40, 81), indices tensor([1563, 561, 2235, ..., 603, 1005, 1])\n", + "deriv_tensor (40, 81), indices tensor([1563, 561, 2235, ..., 603, 1005, 1])\n", + "deriv_tensor (40, 81), indices tensor([1563, 561, 2235, ..., 603, 1005, 1])\n", + "deriv_tensor (40, 81), indices tensor([1563, 561, 2235, ..., 603, 1005, 1])\n", + "deriv_tensor (40, 81), indices tensor([ 742, 2680, 1940, ..., 1084, 593, 483])\n", + "deriv_tensor (40, 81), indices tensor([ 742, 2680, 1940, ..., 1084, 593, 483])\n", + "deriv_tensor (40, 81), indices tensor([ 742, 2680, 1940, ..., 1084, 593, 483])\n", + "deriv_tensor (40, 81), indices tensor([ 742, 2680, 1940, ..., 1084, 593, 483])\n", + "deriv_tensor (40, 81), indices tensor([1499, 1133, 33, ..., 2607, 2759, 746])\n", + "deriv_tensor (40, 81), indices tensor([1499, 1133, 33, ..., 2607, 2759, 746])\n", + "deriv_tensor (40, 81), indices tensor([1499, 1133, 33, ..., 2607, 2759, 746])\n", + "deriv_tensor (40, 81), indices tensor([1499, 1133, 33, ..., 2607, 2759, 746])\n", + "deriv_tensor (40, 81), indices tensor([ 637, 1495, 85, ..., 2820, 2828, 2033])\n", + "deriv_tensor (40, 81), indices tensor([ 637, 1495, 85, ..., 2820, 2828, 2033])\n", + "deriv_tensor (40, 81), indices tensor([ 637, 1495, 85, ..., 2820, 2828, 2033])\n", + "deriv_tensor (40, 81), indices tensor([ 637, 1495, 85, ..., 2820, 2828, 2033])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1235, 2885, ..., 314, 16, 829])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1235, 2885, ..., 314, 16, 829])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1235, 2885, ..., 314, 16, 829])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1235, 2885, ..., 314, 16, 829])\n", + "deriv_tensor (40, 81), indices tensor([ 251, 2872, 1150, ..., 933, 2616, 361])\n", + "deriv_tensor (40, 81), indices tensor([ 251, 2872, 1150, ..., 933, 2616, 361])\n", + "deriv_tensor (40, 81), indices tensor([ 251, 2872, 1150, ..., 933, 2616, 361])\n", + "deriv_tensor (40, 81), indices tensor([ 251, 2872, 1150, ..., 933, 2616, 361])\n", + "deriv_tensor (40, 81), indices tensor([2732, 215, 1425, ..., 1701, 1283, 2460])\n", + "deriv_tensor (40, 81), indices tensor([2732, 215, 1425, ..., 1701, 1283, 2460])\n", + "deriv_tensor (40, 81), indices tensor([2732, 215, 1425, ..., 1701, 1283, 2460])\n", + "deriv_tensor (40, 81), indices tensor([2732, 215, 1425, ..., 1701, 1283, 2460])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1708, 2000, ..., 3127, 448, 1020])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1708, 2000, ..., 3127, 448, 1020])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1708, 2000, ..., 3127, 448, 1020])\n", + "deriv_tensor (40, 81), indices tensor([ 5, 1708, 2000, ..., 3127, 448, 1020])\n", + "deriv_tensor (40, 81), indices tensor([ 676, 1044, 2600, ..., 378, 1617, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 676, 1044, 2600, ..., 378, 1617, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 676, 1044, 2600, ..., 378, 1617, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 676, 1044, 2600, ..., 378, 1617, 1225])\n", + "deriv_tensor (40, 81), indices tensor([2750, 921, 2887, ..., 80, 2158, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2750, 921, 2887, ..., 80, 2158, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2750, 921, 2887, ..., 80, 2158, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2750, 921, 2887, ..., 80, 2158, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2520, 558, 1091, ..., 489, 193, 2035])\n", + "deriv_tensor (40, 81), indices tensor([2520, 558, 1091, ..., 489, 193, 2035])\n", + "deriv_tensor (40, 81), indices tensor([2520, 558, 1091, ..., 489, 193, 2035])\n", + "deriv_tensor (40, 81), indices tensor([2520, 558, 1091, ..., 489, 193, 2035])\n", + "deriv_tensor (40, 81), indices tensor([1809, 12, 2095, ..., 732, 2344, 884])\n", + "deriv_tensor (40, 81), indices tensor([1809, 12, 2095, ..., 732, 2344, 884])\n", + "deriv_tensor (40, 81), indices tensor([1809, 12, 2095, ..., 732, 2344, 884])\n", + "deriv_tensor (40, 81), indices tensor([1809, 12, 2095, ..., 732, 2344, 884])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 1749, 2127, ..., 2544, 741, 2213])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 1749, 2127, ..., 2544, 741, 2213])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 1749, 2127, ..., 2544, 741, 2213])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 1749, 2127, ..., 2544, 741, 2213])\n", + "deriv_tensor (40, 81), indices tensor([2662, 3113, 3147, ..., 2624, 822, 3052])\n", + "deriv_tensor (40, 81), indices tensor([2662, 3113, 3147, ..., 2624, 822, 3052])\n", + "deriv_tensor (40, 81), indices tensor([2662, 3113, 3147, ..., 2624, 822, 3052])\n", + "deriv_tensor (40, 81), indices tensor([2662, 3113, 3147, ..., 2624, 822, 3052])\n", + "deriv_tensor (40, 81), indices tensor([ 604, 1507, 866, ..., 545, 2530, 1776])\n", + "deriv_tensor (40, 81), indices tensor([ 604, 1507, 866, ..., 545, 2530, 1776])\n", + "deriv_tensor (40, 81), indices tensor([ 604, 1507, 866, ..., 545, 2530, 1776])\n", + "deriv_tensor (40, 81), indices tensor([ 604, 1507, 866, ..., 545, 2530, 1776])\n", + "deriv_tensor (40, 81), indices tensor([ 815, 902, 1157, ..., 1612, 1829, 1668])\n", + "deriv_tensor (40, 81), indices tensor([ 815, 902, 1157, ..., 1612, 1829, 1668])\n", + "deriv_tensor (40, 81), indices tensor([ 815, 902, 1157, ..., 1612, 1829, 1668])\n", + "deriv_tensor (40, 81), indices tensor([ 815, 902, 1157, ..., 1612, 1829, 1668])\n", + "deriv_tensor (40, 81), indices tensor([3003, 2978, 1484, ..., 3150, 1755, 875])\n", + "deriv_tensor (40, 81), indices tensor([3003, 2978, 1484, ..., 3150, 1755, 875])\n", + "deriv_tensor (40, 81), indices tensor([3003, 2978, 1484, ..., 3150, 1755, 875])\n", + "deriv_tensor (40, 81), indices tensor([3003, 2978, 1484, ..., 3150, 1755, 875])\n", + "deriv_tensor (40, 81), indices tensor([1837, 388, 1873, ..., 1237, 381, 2926])\n", + "deriv_tensor (40, 81), indices tensor([1837, 388, 1873, ..., 1237, 381, 2926])\n", + "deriv_tensor (40, 81), indices tensor([1837, 388, 1873, ..., 1237, 381, 2926])\n", + "deriv_tensor (40, 81), indices tensor([1837, 388, 1873, ..., 1237, 381, 2926])\n", + "deriv_tensor (40, 81), indices tensor([1644, 847, 1484, ..., 872, 1977, 354])\n", + "deriv_tensor (40, 81), indices tensor([1644, 847, 1484, ..., 872, 1977, 354])\n", + "deriv_tensor (40, 81), indices tensor([1644, 847, 1484, ..., 872, 1977, 354])\n", + "deriv_tensor (40, 81), indices tensor([1644, 847, 1484, ..., 872, 1977, 354])\n", + "deriv_tensor (40, 81), indices tensor([2177, 2200, 2318, ..., 447, 50, 1307])\n", + "deriv_tensor (40, 81), indices tensor([2177, 2200, 2318, ..., 447, 50, 1307])\n", + "deriv_tensor (40, 81), indices tensor([2177, 2200, 2318, ..., 447, 50, 1307])\n", + "deriv_tensor (40, 81), indices tensor([2177, 2200, 2318, ..., 447, 50, 1307])\n", + "deriv_tensor (40, 81), indices tensor([1440, 2523, 2577, ..., 1112, 2564, 2235])\n", + "deriv_tensor (40, 81), indices tensor([1440, 2523, 2577, ..., 1112, 2564, 2235])\n", + "deriv_tensor (40, 81), indices tensor([1440, 2523, 2577, ..., 1112, 2564, 2235])\n", + "deriv_tensor (40, 81), indices tensor([1440, 2523, 2577, ..., 1112, 2564, 2235])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2239, 2038, ..., 1089, 2520, 871])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2239, 2038, ..., 1089, 2520, 871])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2239, 2038, ..., 1089, 2520, 871])\n", + "deriv_tensor (40, 81), indices tensor([1239, 2239, 2038, ..., 1089, 2520, 871])\n", + "deriv_tensor (40, 81), indices tensor([2277, 895, 3102, ..., 2704, 1034, 230])\n", + "deriv_tensor (40, 81), indices tensor([2277, 895, 3102, ..., 2704, 1034, 230])\n", + "deriv_tensor (40, 81), indices tensor([2277, 895, 3102, ..., 2704, 1034, 230])\n", + "deriv_tensor (40, 81), indices tensor([2277, 895, 3102, ..., 2704, 1034, 230])\n", + "deriv_tensor (40, 81), indices tensor([2221, 2851, 1203, ..., 1446, 1167, 1366])\n", + "deriv_tensor (40, 81), indices tensor([2221, 2851, 1203, ..., 1446, 1167, 1366])\n", + "deriv_tensor (40, 81), indices tensor([2221, 2851, 1203, ..., 1446, 1167, 1366])\n", + "deriv_tensor (40, 81), indices tensor([2221, 2851, 1203, ..., 1446, 1167, 1366])\n", + "deriv_tensor (40, 81), indices tensor([1502, 1946, 1217, ..., 2574, 378, 2383])\n", + "deriv_tensor (40, 81), indices tensor([1502, 1946, 1217, ..., 2574, 378, 2383])\n", + "deriv_tensor (40, 81), indices tensor([1502, 1946, 1217, ..., 2574, 378, 2383])\n", + "deriv_tensor (40, 81), indices tensor([1502, 1946, 1217, ..., 2574, 378, 2383])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1121, 1860, ..., 2598, 580, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1121, 1860, ..., 2598, 580, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1121, 1860, ..., 2598, 580, 887])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1121, 1860, ..., 2598, 580, 887])\n", + "deriv_tensor (40, 81), indices tensor([1891, 1746, 1000, ..., 1871, 1084, 684])\n", + "deriv_tensor (40, 81), indices tensor([1891, 1746, 1000, ..., 1871, 1084, 684])\n", + "deriv_tensor (40, 81), indices tensor([1891, 1746, 1000, ..., 1871, 1084, 684])\n", + "deriv_tensor (40, 81), indices tensor([1891, 1746, 1000, ..., 1871, 1084, 684])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2926, 1090, ..., 1842, 2575, 2082])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2926, 1090, ..., 1842, 2575, 2082])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2926, 1090, ..., 1842, 2575, 2082])\n", + "deriv_tensor (40, 81), indices tensor([2788, 2926, 1090, ..., 1842, 2575, 2082])\n", + "deriv_tensor (40, 81), indices tensor([ 230, 2772, 2774, ..., 448, 3029, 613])\n", + "deriv_tensor (40, 81), indices tensor([ 230, 2772, 2774, ..., 448, 3029, 613])\n", + "deriv_tensor (40, 81), indices tensor([ 230, 2772, 2774, ..., 448, 3029, 613])\n", + "deriv_tensor (40, 81), indices tensor([ 230, 2772, 2774, ..., 448, 3029, 613])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1946, 570, ..., 1209, 353, 1865])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1946, 570, ..., 1209, 353, 1865])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1946, 570, ..., 1209, 353, 1865])\n", + "deriv_tensor (40, 81), indices tensor([1975, 1946, 570, ..., 1209, 353, 1865])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 17, 1549, ..., 2997, 341, 1050])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 17, 1549, ..., 2997, 341, 1050])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 17, 1549, ..., 2997, 341, 1050])\n", + "deriv_tensor (40, 81), indices tensor([ 237, 17, 1549, ..., 2997, 341, 1050])\n", + "deriv_tensor (40, 81), indices tensor([1690, 2019, 1130, ..., 733, 665, 3083])\n", + "deriv_tensor (40, 81), indices tensor([1690, 2019, 1130, ..., 733, 665, 3083])\n", + "deriv_tensor (40, 81), indices tensor([1690, 2019, 1130, ..., 733, 665, 3083])\n", + "deriv_tensor (40, 81), indices tensor([1690, 2019, 1130, ..., 733, 665, 3083])\n", + "deriv_tensor (40, 81), indices tensor([2865, 2278, 2163, ..., 1455, 2825, 347])\n", + "deriv_tensor (40, 81), indices tensor([2865, 2278, 2163, ..., 1455, 2825, 347])\n", + "deriv_tensor (40, 81), indices tensor([2865, 2278, 2163, ..., 1455, 2825, 347])\n", + "deriv_tensor (40, 81), indices tensor([2865, 2278, 2163, ..., 1455, 2825, 347])\n", + "deriv_tensor (40, 81), indices tensor([2067, 129, 133, ..., 3147, 3199, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2067, 129, 133, ..., 3147, 3199, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2067, 129, 133, ..., 3147, 3199, 2150])\n", + "deriv_tensor (40, 81), indices tensor([2067, 129, 133, ..., 3147, 3199, 2150])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 293, 1281, ..., 1126, 1601, 2246])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 293, 1281, ..., 1126, 1601, 2246])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 293, 1281, ..., 1126, 1601, 2246])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 293, 1281, ..., 1126, 1601, 2246])\n", + "deriv_tensor (40, 81), indices tensor([2720, 2615, 537, ..., 782, 2635, 2680])\n", + "deriv_tensor (40, 81), indices tensor([2720, 2615, 537, ..., 782, 2635, 2680])\n", + "deriv_tensor (40, 81), indices tensor([2720, 2615, 537, ..., 782, 2635, 2680])\n", + "deriv_tensor (40, 81), indices tensor([2720, 2615, 537, ..., 782, 2635, 2680])\n", + "deriv_tensor (40, 81), indices tensor([2107, 1813, 2490, ..., 1077, 1952, 1435])\n", + "deriv_tensor (40, 81), indices tensor([2107, 1813, 2490, ..., 1077, 1952, 1435])\n", + "deriv_tensor (40, 81), indices tensor([2107, 1813, 2490, ..., 1077, 1952, 1435])\n", + "deriv_tensor (40, 81), indices tensor([2107, 1813, 2490, ..., 1077, 1952, 1435])\n", + "deriv_tensor (40, 81), indices tensor([3089, 2737, 3169, ..., 2934, 2815, 1140])\n", + "deriv_tensor (40, 81), indices tensor([3089, 2737, 3169, ..., 2934, 2815, 1140])\n", + "deriv_tensor (40, 81), indices tensor([3089, 2737, 3169, ..., 2934, 2815, 1140])\n", + "deriv_tensor (40, 81), indices tensor([3089, 2737, 3169, ..., 2934, 2815, 1140])\n", + "deriv_tensor (40, 81), indices tensor([2090, 2807, 1468, ..., 2952, 2250, 2137])\n", + "deriv_tensor (40, 81), indices tensor([2090, 2807, 1468, ..., 2952, 2250, 2137])\n", + "deriv_tensor (40, 81), indices tensor([2090, 2807, 1468, ..., 2952, 2250, 2137])\n", + "deriv_tensor (40, 81), indices tensor([2090, 2807, 1468, ..., 2952, 2250, 2137])\n", + "deriv_tensor (40, 81), indices tensor([1218, 2846, 366, ..., 1296, 1436, 1637])\n", + "deriv_tensor (40, 81), indices tensor([1218, 2846, 366, ..., 1296, 1436, 1637])\n", + "deriv_tensor (40, 81), indices tensor([1218, 2846, 366, ..., 1296, 1436, 1637])\n", + "deriv_tensor (40, 81), indices tensor([1218, 2846, 366, ..., 1296, 1436, 1637])\n", + "deriv_tensor (40, 81), indices tensor([3099, 165, 2142, ..., 3150, 2320, 2025])\n", + "deriv_tensor (40, 81), indices tensor([3099, 165, 2142, ..., 3150, 2320, 2025])\n", + "deriv_tensor (40, 81), indices tensor([3099, 165, 2142, ..., 3150, 2320, 2025])\n", + "deriv_tensor (40, 81), indices tensor([3099, 165, 2142, ..., 3150, 2320, 2025])\n", + "deriv_tensor (40, 81), indices tensor([2877, 1815, 1145, ..., 580, 1746, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2877, 1815, 1145, ..., 580, 1746, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2877, 1815, 1145, ..., 580, 1746, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2877, 1815, 1145, ..., 580, 1746, 2979])\n", + "deriv_tensor (40, 81), indices tensor([1370, 887, 666, ..., 725, 2300, 2141])\n", + "deriv_tensor (40, 81), indices tensor([1370, 887, 666, ..., 725, 2300, 2141])\n", + "deriv_tensor (40, 81), indices tensor([1370, 887, 666, ..., 725, 2300, 2141])\n", + "deriv_tensor (40, 81), indices tensor([1370, 887, 666, ..., 725, 2300, 2141])\n", + "deriv_tensor (40, 81), indices tensor([2283, 2833, 1837, ..., 3167, 2909, 2840])\n", + "deriv_tensor (40, 81), indices tensor([2283, 2833, 1837, ..., 3167, 2909, 2840])\n", + "deriv_tensor (40, 81), indices tensor([2283, 2833, 1837, ..., 3167, 2909, 2840])\n", + "deriv_tensor (40, 81), indices tensor([2283, 2833, 1837, ..., 3167, 2909, 2840])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2093, 1679, ..., 1114, 1906, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2093, 1679, ..., 1114, 1906, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2093, 1679, ..., 1114, 1906, 1260])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2093, 1679, ..., 1114, 1906, 1260])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 173, 385, ..., 758, 343, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 173, 385, ..., 758, 343, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 173, 385, ..., 758, 343, 2505])\n", + "deriv_tensor (40, 81), indices tensor([ 88, 173, 385, ..., 758, 343, 2505])\n", + "deriv_tensor (40, 81), indices tensor([2563, 1011, 1216, ..., 586, 61, 1079])\n", + "deriv_tensor (40, 81), indices tensor([2563, 1011, 1216, ..., 586, 61, 1079])\n", + "deriv_tensor (40, 81), indices tensor([2563, 1011, 1216, ..., 586, 61, 1079])\n", + "deriv_tensor (40, 81), indices tensor([2563, 1011, 1216, ..., 586, 61, 1079])\n", + "deriv_tensor (40, 81), indices tensor([2613, 603, 3068, ..., 748, 623, 615])\n", + "deriv_tensor (40, 81), indices tensor([2613, 603, 3068, ..., 748, 623, 615])\n", + "deriv_tensor (40, 81), indices tensor([2613, 603, 3068, ..., 748, 623, 615])\n", + "deriv_tensor (40, 81), indices tensor([2613, 603, 3068, ..., 748, 623, 615])\n", + "deriv_tensor (40, 81), indices tensor([2127, 2401, 2920, ..., 1202, 1455, 2586])\n", + "deriv_tensor (40, 81), indices tensor([2127, 2401, 2920, ..., 1202, 1455, 2586])\n", + "deriv_tensor (40, 81), indices tensor([2127, 2401, 2920, ..., 1202, 1455, 2586])\n", + "deriv_tensor (40, 81), indices tensor([2127, 2401, 2920, ..., 1202, 1455, 2586])\n", + "deriv_tensor (40, 81), indices tensor([2665, 2928, 2614, ..., 1868, 764, 2542])\n", + "deriv_tensor (40, 81), indices tensor([2665, 2928, 2614, ..., 1868, 764, 2542])\n", + "deriv_tensor (40, 81), indices tensor([2665, 2928, 2614, ..., 1868, 764, 2542])\n", + "deriv_tensor (40, 81), indices tensor([2665, 2928, 2614, ..., 1868, 764, 2542])\n", + "deriv_tensor (40, 81), indices tensor([ 22, 405, 2421, ..., 3151, 1064, 2205])\n", + "deriv_tensor (40, 81), indices tensor([ 22, 405, 2421, ..., 3151, 1064, 2205])\n", + "deriv_tensor (40, 81), indices tensor([ 22, 405, 2421, ..., 3151, 1064, 2205])\n", + "deriv_tensor (40, 81), indices tensor([ 22, 405, 2421, ..., 3151, 1064, 2205])\n", + "deriv_tensor (40, 81), indices tensor([2241, 3046, 178, ..., 1815, 1209, 571])\n", + "deriv_tensor (40, 81), indices tensor([2241, 3046, 178, ..., 1815, 1209, 571])\n", + "deriv_tensor (40, 81), indices tensor([2241, 3046, 178, ..., 1815, 1209, 571])\n", + "deriv_tensor (40, 81), indices tensor([2241, 3046, 178, ..., 1815, 1209, 571])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 3114, 2562, ..., 1057, 1740, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 3114, 2562, ..., 1057, 1740, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 3114, 2562, ..., 1057, 1740, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 209, 3114, 2562, ..., 1057, 1740, 904])\n", + "deriv_tensor (40, 81), indices tensor([2104, 370, 277, ..., 1288, 2070, 1042])\n", + "deriv_tensor (40, 81), indices tensor([2104, 370, 277, ..., 1288, 2070, 1042])\n", + "deriv_tensor (40, 81), indices tensor([2104, 370, 277, ..., 1288, 2070, 1042])\n", + "deriv_tensor (40, 81), indices tensor([2104, 370, 277, ..., 1288, 2070, 1042])\n", + "deriv_tensor (40, 81), indices tensor([2977, 668, 945, ..., 880, 1779, 187])\n", + "deriv_tensor (40, 81), indices tensor([2977, 668, 945, ..., 880, 1779, 187])\n", + "deriv_tensor (40, 81), indices tensor([2977, 668, 945, ..., 880, 1779, 187])\n", + "deriv_tensor (40, 81), indices tensor([2977, 668, 945, ..., 880, 1779, 187])\n", + "deriv_tensor (40, 81), indices tensor([3096, 142, 1903, ..., 970, 784, 2741])\n", + "deriv_tensor (40, 81), indices tensor([3096, 142, 1903, ..., 970, 784, 2741])\n", + "deriv_tensor (40, 81), indices tensor([3096, 142, 1903, ..., 970, 784, 2741])\n", + "deriv_tensor (40, 81), indices tensor([3096, 142, 1903, ..., 970, 784, 2741])\n", + "deriv_tensor (40, 81), indices tensor([2710, 1139, 1949, ..., 1821, 2518, 811])\n", + "deriv_tensor (40, 81), indices tensor([2710, 1139, 1949, ..., 1821, 2518, 811])\n", + "deriv_tensor (40, 81), indices tensor([2710, 1139, 1949, ..., 1821, 2518, 811])\n", + "deriv_tensor (40, 81), indices tensor([2710, 1139, 1949, ..., 1821, 2518, 811])\n", + "deriv_tensor (40, 81), indices tensor([3138, 1068, 2460, ..., 1006, 570, 695])\n", + "deriv_tensor (40, 81), indices tensor([3138, 1068, 2460, ..., 1006, 570, 695])\n", + "deriv_tensor (40, 81), indices tensor([3138, 1068, 2460, ..., 1006, 570, 695])\n", + "deriv_tensor (40, 81), indices tensor([3138, 1068, 2460, ..., 1006, 570, 695])\n", + "deriv_tensor (40, 81), indices tensor([ 66, 2169, 1429, ..., 2410, 730, 230])\n", + "deriv_tensor (40, 81), indices tensor([ 66, 2169, 1429, ..., 2410, 730, 230])\n", + "deriv_tensor (40, 81), indices tensor([ 66, 2169, 1429, ..., 2410, 730, 230])\n", + "deriv_tensor (40, 81), indices tensor([ 66, 2169, 1429, ..., 2410, 730, 230])\n", + "deriv_tensor (40, 81), indices tensor([1033, 481, 1923, ..., 702, 418, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1033, 481, 1923, ..., 702, 418, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1033, 481, 1923, ..., 702, 418, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1033, 481, 1923, ..., 702, 418, 2353])\n", + "deriv_tensor (40, 81), indices tensor([2465, 362, 668, ..., 851, 2931, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2465, 362, 668, ..., 851, 2931, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2465, 362, 668, ..., 851, 2931, 1312])\n", + "deriv_tensor (40, 81), indices tensor([2465, 362, 668, ..., 851, 2931, 1312])\n", + "deriv_tensor (40, 81), indices tensor([1002, 865, 833, ..., 262, 3123, 1115])\n", + "deriv_tensor (40, 81), indices tensor([1002, 865, 833, ..., 262, 3123, 1115])\n", + "deriv_tensor (40, 81), indices tensor([1002, 865, 833, ..., 262, 3123, 1115])\n", + "deriv_tensor (40, 81), indices tensor([1002, 865, 833, ..., 262, 3123, 1115])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2225, 1448, ..., 3028, 154, 1062])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2225, 1448, ..., 3028, 154, 1062])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2225, 1448, ..., 3028, 154, 1062])\n", + "deriv_tensor (40, 81), indices tensor([2554, 2225, 1448, ..., 3028, 154, 1062])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1584, 257, ..., 926, 782, 2471])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1584, 257, ..., 926, 782, 2471])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1584, 257, ..., 926, 782, 2471])\n", + "deriv_tensor (40, 81), indices tensor([1495, 1584, 257, ..., 926, 782, 2471])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1270, 1523, ..., 2276, 3084, 584])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1270, 1523, ..., 2276, 3084, 584])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1270, 1523, ..., 2276, 3084, 584])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 1270, 1523, ..., 2276, 3084, 584])\n", + "deriv_tensor (40, 81), indices tensor([1370, 1197, 2060, ..., 1828, 1846, 3045])\n", + "deriv_tensor (40, 81), indices tensor([1370, 1197, 2060, ..., 1828, 1846, 3045])\n", + "deriv_tensor (40, 81), indices tensor([1370, 1197, 2060, ..., 1828, 1846, 3045])\n", + "deriv_tensor (40, 81), indices tensor([1370, 1197, 2060, ..., 1828, 1846, 3045])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2618, 1571, ..., 693, 1142, 490])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2618, 1571, ..., 693, 1142, 490])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2618, 1571, ..., 693, 1142, 490])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2618, 1571, ..., 693, 1142, 490])\n", + "deriv_tensor (40, 81), indices tensor([1291, 3097, 1828, ..., 479, 2693, 436])\n", + "deriv_tensor (40, 81), indices tensor([1291, 3097, 1828, ..., 479, 2693, 436])\n", + "deriv_tensor (40, 81), indices tensor([1291, 3097, 1828, ..., 479, 2693, 436])\n", + "deriv_tensor (40, 81), indices tensor([1291, 3097, 1828, ..., 479, 2693, 436])\n", + "deriv_tensor (40, 81), indices tensor([3122, 1461, 1406, ..., 1363, 2780, 1701])\n", + "deriv_tensor (40, 81), indices tensor([3122, 1461, 1406, ..., 1363, 2780, 1701])\n", + "deriv_tensor (40, 81), indices tensor([3122, 1461, 1406, ..., 1363, 2780, 1701])\n", + "deriv_tensor (40, 81), indices tensor([3122, 1461, 1406, ..., 1363, 2780, 1701])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2062, 1520, ..., 1568, 125, 2563])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2062, 1520, ..., 1568, 125, 2563])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2062, 1520, ..., 1568, 125, 2563])\n", + "deriv_tensor (40, 81), indices tensor([2837, 2062, 1520, ..., 1568, 125, 2563])\n", + "deriv_tensor (40, 81), indices tensor([3113, 1311, 3103, ..., 1769, 2717, 1105])\n", + "deriv_tensor (40, 81), indices tensor([3113, 1311, 3103, ..., 1769, 2717, 1105])\n", + "deriv_tensor (40, 81), indices tensor([3113, 1311, 3103, ..., 1769, 2717, 1105])\n", + "deriv_tensor (40, 81), indices tensor([3113, 1311, 3103, ..., 1769, 2717, 1105])\n", + "deriv_tensor (40, 81), indices tensor([1034, 2605, 969, ..., 2134, 2151, 1494])\n", + "deriv_tensor (40, 81), indices tensor([1034, 2605, 969, ..., 2134, 2151, 1494])\n", + "deriv_tensor (40, 81), indices tensor([1034, 2605, 969, ..., 2134, 2151, 1494])\n", + "deriv_tensor (40, 81), indices tensor([1034, 2605, 969, ..., 2134, 2151, 1494])\n", + "deriv_tensor (40, 81), indices tensor([ 659, 3061, 2440, ..., 1594, 2047, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 659, 3061, 2440, ..., 1594, 2047, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 659, 3061, 2440, ..., 1594, 2047, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 659, 3061, 2440, ..., 1594, 2047, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 402, 1426, 1597, ..., 2942, 2291, 1763])\n", + "deriv_tensor (40, 81), indices tensor([ 402, 1426, 1597, ..., 2942, 2291, 1763])\n", + "deriv_tensor (40, 81), indices tensor([ 402, 1426, 1597, ..., 2942, 2291, 1763])\n", + "deriv_tensor (40, 81), indices tensor([ 402, 1426, 1597, ..., 2942, 2291, 1763])\n", + "deriv_tensor (40, 81), indices tensor([2334, 216, 1771, ..., 2877, 1111, 134])\n", + "deriv_tensor (40, 81), indices tensor([2334, 216, 1771, ..., 2877, 1111, 134])\n", + "deriv_tensor (40, 81), indices tensor([2334, 216, 1771, ..., 2877, 1111, 134])\n", + "deriv_tensor (40, 81), indices tensor([2334, 216, 1771, ..., 2877, 1111, 134])\n", + "deriv_tensor (40, 81), indices tensor([2734, 2346, 3016, ..., 744, 1167, 2423])\n", + "deriv_tensor (40, 81), indices tensor([2734, 2346, 3016, ..., 744, 1167, 2423])\n", + "deriv_tensor (40, 81), indices tensor([2734, 2346, 3016, ..., 744, 1167, 2423])\n", + "deriv_tensor (40, 81), indices tensor([2734, 2346, 3016, ..., 744, 1167, 2423])\n", + "deriv_tensor (40, 81), indices tensor([1326, 1285, 98, ..., 2901, 781, 2120])\n", + "deriv_tensor (40, 81), indices tensor([1326, 1285, 98, ..., 2901, 781, 2120])\n", + "deriv_tensor (40, 81), indices tensor([1326, 1285, 98, ..., 2901, 781, 2120])\n", + "deriv_tensor (40, 81), indices tensor([1326, 1285, 98, ..., 2901, 781, 2120])\n", + "deriv_tensor (40, 81), indices tensor([ 614, 2674, 401, ..., 2779, 1711, 2489])\n", + "deriv_tensor (40, 81), indices tensor([ 614, 2674, 401, ..., 2779, 1711, 2489])\n", + "deriv_tensor (40, 81), indices tensor([ 614, 2674, 401, ..., 2779, 1711, 2489])\n", + "deriv_tensor (40, 81), indices tensor([ 614, 2674, 401, ..., 2779, 1711, 2489])\n", + "deriv_tensor (40, 81), indices tensor([2131, 300, 1618, ..., 242, 1848, 495])\n", + "deriv_tensor (40, 81), indices tensor([2131, 300, 1618, ..., 242, 1848, 495])\n", + "deriv_tensor (40, 81), indices tensor([2131, 300, 1618, ..., 242, 1848, 495])\n", + "deriv_tensor (40, 81), indices tensor([2131, 300, 1618, ..., 242, 1848, 495])\n", + "deriv_tensor (40, 81), indices tensor([3024, 694, 2184, ..., 1608, 895, 2584])\n", + "deriv_tensor (40, 81), indices tensor([3024, 694, 2184, ..., 1608, 895, 2584])\n", + "deriv_tensor (40, 81), indices tensor([3024, 694, 2184, ..., 1608, 895, 2584])\n", + "deriv_tensor (40, 81), indices tensor([3024, 694, 2184, ..., 1608, 895, 2584])\n", + "deriv_tensor (40, 81), indices tensor([2754, 1963, 2477, ..., 788, 1023, 489])\n", + "deriv_tensor (40, 81), indices tensor([2754, 1963, 2477, ..., 788, 1023, 489])\n", + "deriv_tensor (40, 81), indices tensor([2754, 1963, 2477, ..., 788, 1023, 489])\n", + "deriv_tensor (40, 81), indices tensor([2754, 1963, 2477, ..., 788, 1023, 489])\n", + "deriv_tensor (40, 81), indices tensor([3048, 773, 1497, ..., 2408, 1857, 349])\n", + "deriv_tensor (40, 81), indices tensor([3048, 773, 1497, ..., 2408, 1857, 349])\n", + "deriv_tensor (40, 81), indices tensor([3048, 773, 1497, ..., 2408, 1857, 349])\n", + "deriv_tensor (40, 81), indices tensor([3048, 773, 1497, ..., 2408, 1857, 349])\n", + "deriv_tensor (40, 81), indices tensor([1825, 1098, 2834, ..., 3092, 2135, 1568])\n", + "deriv_tensor (40, 81), indices tensor([1825, 1098, 2834, ..., 3092, 2135, 1568])\n", + "deriv_tensor (40, 81), indices tensor([1825, 1098, 2834, ..., 3092, 2135, 1568])\n", + "deriv_tensor (40, 81), indices tensor([1825, 1098, 2834, ..., 3092, 2135, 1568])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2949, 734, ..., 2734, 2518, 1765])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2949, 734, ..., 2734, 2518, 1765])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2949, 734, ..., 2734, 2518, 1765])\n", + "deriv_tensor (40, 81), indices tensor([1316, 2949, 734, ..., 2734, 2518, 1765])\n", + "deriv_tensor (40, 81), indices tensor([2823, 1175, 470, ..., 451, 133, 3232])\n", + "deriv_tensor (40, 81), indices tensor([2823, 1175, 470, ..., 451, 133, 3232])\n", + "deriv_tensor (40, 81), indices tensor([2823, 1175, 470, ..., 451, 133, 3232])\n", + "deriv_tensor (40, 81), indices tensor([2823, 1175, 470, ..., 451, 133, 3232])\n", + "deriv_tensor (40, 81), indices tensor([1932, 485, 3151, ..., 1915, 1874, 464])\n", + "deriv_tensor (40, 81), indices tensor([1932, 485, 3151, ..., 1915, 1874, 464])\n", + "deriv_tensor (40, 81), indices tensor([1932, 485, 3151, ..., 1915, 1874, 464])\n", + "deriv_tensor (40, 81), indices tensor([1932, 485, 3151, ..., 1915, 1874, 464])\n", + "deriv_tensor (40, 81), indices tensor([2297, 2899, 2578, ..., 2599, 733, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2297, 2899, 2578, ..., 2599, 733, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2297, 2899, 2578, ..., 2599, 733, 2103])\n", + "deriv_tensor (40, 81), indices tensor([2297, 2899, 2578, ..., 2599, 733, 2103])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 870, 2200, ..., 1550, 2399, 103])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 870, 2200, ..., 1550, 2399, 103])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 870, 2200, ..., 1550, 2399, 103])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 870, 2200, ..., 1550, 2399, 103])\n", + "deriv_tensor (40, 81), indices tensor([ 591, 750, 2065, ..., 2728, 493, 1323])\n", + "deriv_tensor (40, 81), indices tensor([ 591, 750, 2065, ..., 2728, 493, 1323])\n", + "deriv_tensor (40, 81), indices tensor([ 591, 750, 2065, ..., 2728, 493, 1323])\n", + "deriv_tensor (40, 81), indices tensor([ 591, 750, 2065, ..., 2728, 493, 1323])\n", + "deriv_tensor (40, 81), indices tensor([2778, 2944, 1089, ..., 1164, 1998, 278])\n", + "deriv_tensor (40, 81), indices tensor([2778, 2944, 1089, ..., 1164, 1998, 278])\n", + "deriv_tensor (40, 81), indices tensor([2778, 2944, 1089, ..., 1164, 1998, 278])\n", + "deriv_tensor (40, 81), indices tensor([2778, 2944, 1089, ..., 1164, 1998, 278])\n", + "deriv_tensor (40, 81), indices tensor([2644, 948, 994, ..., 2746, 1145, 462])\n", + "deriv_tensor (40, 81), indices tensor([2644, 948, 994, ..., 2746, 1145, 462])\n", + "deriv_tensor (40, 81), indices tensor([2644, 948, 994, ..., 2746, 1145, 462])\n", + "deriv_tensor (40, 81), indices tensor([2644, 948, 994, ..., 2746, 1145, 462])\n", + "deriv_tensor (40, 81), indices tensor([2940, 2667, 300, ..., 2622, 40, 288])\n", + "deriv_tensor (40, 81), indices tensor([2940, 2667, 300, ..., 2622, 40, 288])\n", + "deriv_tensor (40, 81), indices tensor([2940, 2667, 300, ..., 2622, 40, 288])\n", + "deriv_tensor (40, 81), indices tensor([2940, 2667, 300, ..., 2622, 40, 288])\n", + "deriv_tensor (40, 81), indices tensor([ 736, 1220, 631, ..., 1135, 1249, 2830])\n", + "deriv_tensor (40, 81), indices tensor([ 736, 1220, 631, ..., 1135, 1249, 2830])\n", + "deriv_tensor (40, 81), indices tensor([ 736, 1220, 631, ..., 1135, 1249, 2830])\n", + "deriv_tensor (40, 81), indices tensor([ 736, 1220, 631, ..., 1135, 1249, 2830])\n", + "deriv_tensor (40, 81), indices tensor([1896, 1840, 1389, ..., 1183, 2617, 1316])\n", + "deriv_tensor (40, 81), indices tensor([1896, 1840, 1389, ..., 1183, 2617, 1316])\n", + "deriv_tensor (40, 81), indices tensor([1896, 1840, 1389, ..., 1183, 2617, 1316])\n", + "deriv_tensor (40, 81), indices tensor([1896, 1840, 1389, ..., 1183, 2617, 1316])\n", + "deriv_tensor (40, 81), indices tensor([2530, 1618, 338, ..., 1169, 2092, 1144])\n", + "deriv_tensor (40, 81), indices tensor([2530, 1618, 338, ..., 1169, 2092, 1144])\n", + "deriv_tensor (40, 81), indices tensor([2530, 1618, 338, ..., 1169, 2092, 1144])\n", + "deriv_tensor (40, 81), indices tensor([2530, 1618, 338, ..., 1169, 2092, 1144])\n", + "deriv_tensor (40, 81), indices tensor([1998, 442, 2187, ..., 2155, 533, 1193])\n", + "deriv_tensor (40, 81), indices tensor([1998, 442, 2187, ..., 2155, 533, 1193])\n", + "deriv_tensor (40, 81), indices tensor([1998, 442, 2187, ..., 2155, 533, 1193])\n", + "deriv_tensor (40, 81), indices tensor([1998, 442, 2187, ..., 2155, 533, 1193])\n", + "deriv_tensor (40, 81), indices tensor([3149, 1472, 1189, ..., 674, 849, 1060])\n", + "deriv_tensor (40, 81), indices tensor([3149, 1472, 1189, ..., 674, 849, 1060])\n", + "deriv_tensor (40, 81), indices tensor([3149, 1472, 1189, ..., 674, 849, 1060])\n", + "deriv_tensor (40, 81), indices tensor([3149, 1472, 1189, ..., 674, 849, 1060])\n", + "deriv_tensor (40, 81), indices tensor([1943, 1448, 190, ..., 2277, 2769, 1634])\n", + "deriv_tensor (40, 81), indices tensor([1943, 1448, 190, ..., 2277, 2769, 1634])\n", + "deriv_tensor (40, 81), indices tensor([1943, 1448, 190, ..., 2277, 2769, 1634])\n", + "deriv_tensor (40, 81), indices tensor([1943, 1448, 190, ..., 2277, 2769, 1634])\n", + "deriv_tensor (40, 81), indices tensor([1724, 1672, 418, ..., 2393, 1621, 720])\n", + "deriv_tensor (40, 81), indices tensor([1724, 1672, 418, ..., 2393, 1621, 720])\n", + "deriv_tensor (40, 81), indices tensor([1724, 1672, 418, ..., 2393, 1621, 720])\n", + "deriv_tensor (40, 81), indices tensor([1724, 1672, 418, ..., 2393, 1621, 720])\n", + "deriv_tensor (40, 81), indices tensor([ 544, 1586, 340, ..., 1820, 3063, 1037])\n", + "deriv_tensor (40, 81), indices tensor([ 544, 1586, 340, ..., 1820, 3063, 1037])\n", + "deriv_tensor (40, 81), indices tensor([ 544, 1586, 340, ..., 1820, 3063, 1037])\n", + "deriv_tensor (40, 81), indices tensor([ 544, 1586, 340, ..., 1820, 3063, 1037])\n", + "deriv_tensor (40, 81), indices tensor([1953, 371, 2990, ..., 2406, 516, 1941])\n", + "deriv_tensor (40, 81), indices tensor([1953, 371, 2990, ..., 2406, 516, 1941])\n", + "deriv_tensor (40, 81), indices tensor([1953, 371, 2990, ..., 2406, 516, 1941])\n", + "deriv_tensor (40, 81), indices tensor([1953, 371, 2990, ..., 2406, 516, 1941])\n", + "deriv_tensor (40, 81), indices tensor([2058, 2633, 2569, ..., 2471, 3216, 368])\n", + "deriv_tensor (40, 81), indices tensor([2058, 2633, 2569, ..., 2471, 3216, 368])\n", + "deriv_tensor (40, 81), indices tensor([2058, 2633, 2569, ..., 2471, 3216, 368])\n", + "deriv_tensor (40, 81), indices tensor([2058, 2633, 2569, ..., 2471, 3216, 368])\n", + "deriv_tensor (40, 81), indices tensor([ 816, 785, 1158, ..., 2459, 2638, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 816, 785, 1158, ..., 2459, 2638, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 816, 785, 1158, ..., 2459, 2638, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 816, 785, 1158, ..., 2459, 2638, 2038])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2344, 2030, ..., 2574, 2207, 644])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2344, 2030, ..., 2574, 2207, 644])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2344, 2030, ..., 2574, 2207, 644])\n", + "deriv_tensor (40, 81), indices tensor([1386, 2344, 2030, ..., 2574, 2207, 644])\n", + "deriv_tensor (40, 81), indices tensor([1273, 779, 1384, ..., 1738, 2146, 1903])\n", + "deriv_tensor (40, 81), indices tensor([1273, 779, 1384, ..., 1738, 2146, 1903])\n", + "deriv_tensor (40, 81), indices tensor([1273, 779, 1384, ..., 1738, 2146, 1903])\n", + "deriv_tensor (40, 81), indices tensor([1273, 779, 1384, ..., 1738, 2146, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1206, 230, ..., 1333, 674, 2451])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1206, 230, ..., 1333, 674, 2451])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1206, 230, ..., 1333, 674, 2451])\n", + "deriv_tensor (40, 81), indices tensor([2348, 1206, 230, ..., 1333, 674, 2451])\n", + "deriv_tensor (40, 81), indices tensor([1283, 958, 3205, ..., 798, 1991, 2469])\n", + "deriv_tensor (40, 81), indices tensor([1283, 958, 3205, ..., 798, 1991, 2469])\n", + "deriv_tensor (40, 81), indices tensor([1283, 958, 3205, ..., 798, 1991, 2469])\n", + "deriv_tensor (40, 81), indices tensor([1283, 958, 3205, ..., 798, 1991, 2469])\n", + "deriv_tensor (40, 81), indices tensor([2054, 255, 2039, ..., 1065, 1609, 2294])\n", + "deriv_tensor (40, 81), indices tensor([2054, 255, 2039, ..., 1065, 1609, 2294])\n", + "deriv_tensor (40, 81), indices tensor([2054, 255, 2039, ..., 1065, 1609, 2294])\n", + "deriv_tensor (40, 81), indices tensor([2054, 255, 2039, ..., 1065, 1609, 2294])\n", + "deriv_tensor (40, 81), indices tensor([1907, 1097, 2441, ..., 1958, 1401, 1805])\n", + "deriv_tensor (40, 81), indices tensor([1907, 1097, 2441, ..., 1958, 1401, 1805])\n", + "deriv_tensor (40, 81), indices tensor([1907, 1097, 2441, ..., 1958, 1401, 1805])\n", + "deriv_tensor (40, 81), indices tensor([1907, 1097, 2441, ..., 1958, 1401, 1805])\n", + "deriv_tensor (40, 81), indices tensor([ 776, 2158, 403, ..., 608, 3066, 3126])\n", + "deriv_tensor (40, 81), indices tensor([ 776, 2158, 403, ..., 608, 3066, 3126])\n", + "deriv_tensor (40, 81), indices tensor([ 776, 2158, 403, ..., 608, 3066, 3126])\n", + "deriv_tensor (40, 81), indices tensor([ 776, 2158, 403, ..., 608, 3066, 3126])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2744, 206, ..., 1581, 1228, 2784])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2744, 206, ..., 1581, 1228, 2784])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2744, 206, ..., 1581, 1228, 2784])\n", + "deriv_tensor (40, 81), indices tensor([1196, 2744, 206, ..., 1581, 1228, 2784])\n", + "deriv_tensor (40, 81), indices tensor([1264, 1553, 1811, ..., 2204, 50, 2062])\n", + "deriv_tensor (40, 81), indices tensor([1264, 1553, 1811, ..., 2204, 50, 2062])\n", + "deriv_tensor (40, 81), indices tensor([1264, 1553, 1811, ..., 2204, 50, 2062])\n", + "deriv_tensor (40, 81), indices tensor([1264, 1553, 1811, ..., 2204, 50, 2062])\n", + "deriv_tensor (40, 81), indices tensor([3190, 2771, 2910, ..., 735, 509, 2193])\n", + "deriv_tensor (40, 81), indices tensor([3190, 2771, 2910, ..., 735, 509, 2193])\n", + "deriv_tensor (40, 81), indices tensor([3190, 2771, 2910, ..., 735, 509, 2193])\n", + "deriv_tensor (40, 81), indices tensor([3190, 2771, 2910, ..., 735, 509, 2193])\n", + "deriv_tensor (40, 81), indices tensor([1960, 3121, 3074, ..., 1110, 1531, 3145])\n", + "deriv_tensor (40, 81), indices tensor([1960, 3121, 3074, ..., 1110, 1531, 3145])\n", + "deriv_tensor (40, 81), indices tensor([1960, 3121, 3074, ..., 1110, 1531, 3145])\n", + "deriv_tensor (40, 81), indices tensor([1960, 3121, 3074, ..., 1110, 1531, 3145])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 267, 2611, ..., 3040, 1450, 2616])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 267, 2611, ..., 3040, 1450, 2616])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 267, 2611, ..., 3040, 1450, 2616])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 267, 2611, ..., 3040, 1450, 2616])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1974, 2757, ..., 1501, 3125, 3069])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1974, 2757, ..., 1501, 3125, 3069])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1974, 2757, ..., 1501, 3125, 3069])\n", + "deriv_tensor (40, 81), indices tensor([3175, 1974, 2757, ..., 1501, 3125, 3069])\n", + "deriv_tensor (40, 81), indices tensor([1123, 2684, 1868, ..., 1780, 460, 44])\n", + "deriv_tensor (40, 81), indices tensor([1123, 2684, 1868, ..., 1780, 460, 44])\n", + "deriv_tensor (40, 81), indices tensor([1123, 2684, 1868, ..., 1780, 460, 44])\n", + "deriv_tensor (40, 81), indices tensor([1123, 2684, 1868, ..., 1780, 460, 44])\n", + "deriv_tensor (40, 81), indices tensor([1401, 2372, 2893, ..., 719, 940, 2767])\n", + "deriv_tensor (40, 81), indices tensor([1401, 2372, 2893, ..., 719, 940, 2767])\n", + "deriv_tensor (40, 81), indices tensor([1401, 2372, 2893, ..., 719, 940, 2767])\n", + "deriv_tensor (40, 81), indices tensor([1401, 2372, 2893, ..., 719, 940, 2767])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 2030, 152, ..., 246, 804, 1919])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 2030, 152, ..., 246, 804, 1919])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 2030, 152, ..., 246, 804, 1919])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 2030, 152, ..., 246, 804, 1919])\n", + "deriv_tensor (40, 81), indices tensor([ 662, 2651, 1750, ..., 2373, 614, 46])\n", + "deriv_tensor (40, 81), indices tensor([ 662, 2651, 1750, ..., 2373, 614, 46])\n", + "deriv_tensor (40, 81), indices tensor([ 662, 2651, 1750, ..., 2373, 614, 46])\n", + "deriv_tensor (40, 81), indices tensor([ 662, 2651, 1750, ..., 2373, 614, 46])\n", + "deriv_tensor (40, 81), indices tensor([2622, 2997, 3141, ..., 853, 957, 1646])\n", + "deriv_tensor (40, 81), indices tensor([2622, 2997, 3141, ..., 853, 957, 1646])\n", + "deriv_tensor (40, 81), indices tensor([2622, 2997, 3141, ..., 853, 957, 1646])\n", + "deriv_tensor (40, 81), indices tensor([2622, 2997, 3141, ..., 853, 957, 1646])\n", + "deriv_tensor (40, 81), indices tensor([1762, 2481, 663, ..., 2306, 378, 1435])\n", + "deriv_tensor (40, 81), indices tensor([1762, 2481, 663, ..., 2306, 378, 1435])\n", + "deriv_tensor (40, 81), indices tensor([1762, 2481, 663, ..., 2306, 378, 1435])\n", + "deriv_tensor (40, 81), indices tensor([1762, 2481, 663, ..., 2306, 378, 1435])\n", + "deriv_tensor (40, 81), indices tensor([ 136, 777, 1786, ..., 2547, 3217, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 136, 777, 1786, ..., 2547, 3217, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 136, 777, 1786, ..., 2547, 3217, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 136, 777, 1786, ..., 2547, 3217, 2315])\n", + "deriv_tensor (40, 81), indices tensor([2649, 532, 3177, ..., 1082, 218, 2187])\n", + "deriv_tensor (40, 81), indices tensor([2649, 532, 3177, ..., 1082, 218, 2187])\n", + "deriv_tensor (40, 81), indices tensor([2649, 532, 3177, ..., 1082, 218, 2187])\n", + "deriv_tensor (40, 81), indices tensor([2649, 532, 3177, ..., 1082, 218, 2187])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1561, 1026, ..., 1425, 881, 1708])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1561, 1026, ..., 1425, 881, 1708])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1561, 1026, ..., 1425, 881, 1708])\n", + "deriv_tensor (40, 81), indices tensor([ 538, 1561, 1026, ..., 1425, 881, 1708])\n", + "deriv_tensor (40, 81), indices tensor([2912, 210, 928, ..., 829, 2840, 2214])\n", + "deriv_tensor (40, 81), indices tensor([2912, 210, 928, ..., 829, 2840, 2214])\n", + "deriv_tensor (40, 81), indices tensor([2912, 210, 928, ..., 829, 2840, 2214])\n", + "deriv_tensor (40, 81), indices tensor([2912, 210, 928, ..., 829, 2840, 2214])\n", + "deriv_tensor (40, 81), indices tensor([1932, 1306, 3097, ..., 2132, 3030, 1132])\n", + "deriv_tensor (40, 81), indices tensor([1932, 1306, 3097, ..., 2132, 3030, 1132])\n", + "deriv_tensor (40, 81), indices tensor([1932, 1306, 3097, ..., 2132, 3030, 1132])\n", + "deriv_tensor (40, 81), indices tensor([1932, 1306, 3097, ..., 2132, 3030, 1132])\n", + "deriv_tensor (40, 81), indices tensor([2550, 2267, 1876, ..., 1489, 1771, 1358])\n", + "deriv_tensor (40, 81), indices tensor([2550, 2267, 1876, ..., 1489, 1771, 1358])\n", + "deriv_tensor (40, 81), indices tensor([2550, 2267, 1876, ..., 1489, 1771, 1358])\n", + "deriv_tensor (40, 81), indices tensor([2550, 2267, 1876, ..., 1489, 1771, 1358])\n", + "deriv_tensor (40, 81), indices tensor([1333, 799, 967, ..., 1282, 1313, 1090])\n", + "deriv_tensor (40, 81), indices tensor([1333, 799, 967, ..., 1282, 1313, 1090])\n", + "deriv_tensor (40, 81), indices tensor([1333, 799, 967, ..., 1282, 1313, 1090])\n", + "deriv_tensor (40, 81), indices tensor([1333, 799, 967, ..., 1282, 1313, 1090])\n", + "deriv_tensor (40, 81), indices tensor([1025, 1202, 1466, ..., 565, 358, 804])\n", + "deriv_tensor (40, 81), indices tensor([1025, 1202, 1466, ..., 565, 358, 804])\n", + "deriv_tensor (40, 81), indices tensor([1025, 1202, 1466, ..., 565, 358, 804])\n", + "deriv_tensor (40, 81), indices tensor([1025, 1202, 1466, ..., 565, 358, 804])\n", + "deriv_tensor (40, 81), indices tensor([ 269, 921, 3216, ..., 2398, 3054, 1216])\n", + "deriv_tensor (40, 81), indices tensor([ 269, 921, 3216, ..., 2398, 3054, 1216])\n", + "deriv_tensor (40, 81), indices tensor([ 269, 921, 3216, ..., 2398, 3054, 1216])\n", + "deriv_tensor (40, 81), indices tensor([ 269, 921, 3216, ..., 2398, 3054, 1216])\n", + "deriv_tensor (40, 81), indices tensor([ 695, 1008, 1139, ..., 1486, 813, 1753])\n", + "deriv_tensor (40, 81), indices tensor([ 695, 1008, 1139, ..., 1486, 813, 1753])\n", + "deriv_tensor (40, 81), indices tensor([ 695, 1008, 1139, ..., 1486, 813, 1753])\n", + "deriv_tensor (40, 81), indices tensor([ 695, 1008, 1139, ..., 1486, 813, 1753])\n", + "deriv_tensor (40, 81), indices tensor([1151, 155, 818, ..., 1291, 3007, 1368])\n", + "deriv_tensor (40, 81), indices tensor([1151, 155, 818, ..., 1291, 3007, 1368])\n", + "deriv_tensor (40, 81), indices tensor([1151, 155, 818, ..., 1291, 3007, 1368])\n", + "deriv_tensor (40, 81), indices tensor([1151, 155, 818, ..., 1291, 3007, 1368])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 1960, 2302, ..., 2169, 505, 3121])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 1960, 2302, ..., 2169, 505, 3121])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 1960, 2302, ..., 2169, 505, 3121])\n", + "deriv_tensor (40, 81), indices tensor([ 276, 1960, 2302, ..., 2169, 505, 3121])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1572, 1178, ..., 323, 460, 3165])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1572, 1178, ..., 323, 460, 3165])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1572, 1178, ..., 323, 460, 3165])\n", + "deriv_tensor (40, 81), indices tensor([3167, 1572, 1178, ..., 323, 460, 3165])\n", + "deriv_tensor (40, 81), indices tensor([2357, 2163, 1077, ..., 2949, 1406, 2405])\n", + "deriv_tensor (40, 81), indices tensor([2357, 2163, 1077, ..., 2949, 1406, 2405])\n", + "deriv_tensor (40, 81), indices tensor([2357, 2163, 1077, ..., 2949, 1406, 2405])\n", + "deriv_tensor (40, 81), indices tensor([2357, 2163, 1077, ..., 2949, 1406, 2405])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 1076, 695, ..., 1022, 14, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 1076, 695, ..., 1022, 14, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 1076, 695, ..., 1022, 14, 920])\n", + "deriv_tensor (40, 81), indices tensor([ 703, 1076, 695, ..., 1022, 14, 920])\n", + "deriv_tensor (40, 81), indices tensor([2188, 1793, 1296, ..., 1884, 743, 1116])\n", + "deriv_tensor (40, 81), indices tensor([2188, 1793, 1296, ..., 1884, 743, 1116])\n", + "deriv_tensor (40, 81), indices tensor([2188, 1793, 1296, ..., 1884, 743, 1116])\n", + "deriv_tensor (40, 81), indices tensor([2188, 1793, 1296, ..., 1884, 743, 1116])\n", + "deriv_tensor (40, 81), indices tensor([1144, 161, 1274, ..., 3014, 1967, 2803])\n", + "deriv_tensor (40, 81), indices tensor([1144, 161, 1274, ..., 3014, 1967, 2803])\n", + "deriv_tensor (40, 81), indices tensor([1144, 161, 1274, ..., 3014, 1967, 2803])\n", + "deriv_tensor (40, 81), indices tensor([1144, 161, 1274, ..., 3014, 1967, 2803])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 2630, 3192, ..., 909, 3079, 455])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 2630, 3192, ..., 909, 3079, 455])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 2630, 3192, ..., 909, 3079, 455])\n", + "deriv_tensor (40, 81), indices tensor([ 327, 2630, 3192, ..., 909, 3079, 455])\n", + "deriv_tensor (40, 81), indices tensor([1351, 89, 666, ..., 1039, 567, 2655])\n", + "deriv_tensor (40, 81), indices tensor([1351, 89, 666, ..., 1039, 567, 2655])\n", + "deriv_tensor (40, 81), indices tensor([1351, 89, 666, ..., 1039, 567, 2655])\n", + "deriv_tensor (40, 81), indices tensor([1351, 89, 666, ..., 1039, 567, 2655])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 111, 1520, ..., 2710, 2031, 1138])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 111, 1520, ..., 2710, 2031, 1138])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 111, 1520, ..., 2710, 2031, 1138])\n", + "deriv_tensor (40, 81), indices tensor([ 487, 111, 1520, ..., 2710, 2031, 1138])\n", + "deriv_tensor (40, 81), indices tensor([ 923, 680, 1046, ..., 213, 1328, 2507])\n", + "deriv_tensor (40, 81), indices tensor([ 923, 680, 1046, ..., 213, 1328, 2507])\n", + "deriv_tensor (40, 81), indices tensor([ 923, 680, 1046, ..., 213, 1328, 2507])\n", + "deriv_tensor (40, 81), indices tensor([ 923, 680, 1046, ..., 213, 1328, 2507])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 1999, 197, ..., 550, 2530, 854])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 1999, 197, ..., 550, 2530, 854])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 1999, 197, ..., 550, 2530, 854])\n", + "deriv_tensor (40, 81), indices tensor([ 445, 1999, 197, ..., 550, 2530, 854])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2798, 311, ..., 384, 3131, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2798, 311, ..., 384, 3131, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2798, 311, ..., 384, 3131, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 825, 2798, 311, ..., 384, 3131, 953])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 847, 797, ..., 3233, 2488, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 847, 797, ..., 3233, 2488, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 847, 797, ..., 3233, 2488, 1115])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 847, 797, ..., 3233, 2488, 1115])\n", + "deriv_tensor (40, 81), indices tensor([2310, 2865, 2402, ..., 1606, 3201, 2491])\n", + "deriv_tensor (40, 81), indices tensor([2310, 2865, 2402, ..., 1606, 3201, 2491])\n", + "deriv_tensor (40, 81), indices tensor([2310, 2865, 2402, ..., 1606, 3201, 2491])\n", + "deriv_tensor (40, 81), indices tensor([2310, 2865, 2402, ..., 1606, 3201, 2491])\n", + "deriv_tensor (40, 81), indices tensor([1597, 1850, 2755, ..., 1680, 2203, 2590])\n", + "deriv_tensor (40, 81), indices tensor([1597, 1850, 2755, ..., 1680, 2203, 2590])\n", + "deriv_tensor (40, 81), indices tensor([1597, 1850, 2755, ..., 1680, 2203, 2590])\n", + "deriv_tensor (40, 81), indices tensor([1597, 1850, 2755, ..., 1680, 2203, 2590])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 2994, 2307, ..., 114, 2979, 1256])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 2994, 2307, ..., 114, 2979, 1256])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 2994, 2307, ..., 114, 2979, 1256])\n", + "deriv_tensor (40, 81), indices tensor([ 968, 2994, 2307, ..., 114, 2979, 1256])\n", + "deriv_tensor (40, 81), indices tensor([3205, 683, 2879, ..., 549, 1395, 2927])\n", + "deriv_tensor (40, 81), indices tensor([3205, 683, 2879, ..., 549, 1395, 2927])\n", + "deriv_tensor (40, 81), indices tensor([3205, 683, 2879, ..., 549, 1395, 2927])\n", + "deriv_tensor (40, 81), indices tensor([3205, 683, 2879, ..., 549, 1395, 2927])\n", + "deriv_tensor (40, 81), indices tensor([2357, 315, 3066, ..., 502, 1688, 739])\n", + "deriv_tensor (40, 81), indices tensor([2357, 315, 3066, ..., 502, 1688, 739])\n", + "deriv_tensor (40, 81), indices tensor([2357, 315, 3066, ..., 502, 1688, 739])\n", + "deriv_tensor (40, 81), indices tensor([2357, 315, 3066, ..., 502, 1688, 739])\n", + "deriv_tensor (40, 81), indices tensor([ 831, 113, 945, ..., 1449, 2359, 3132])\n", + "deriv_tensor (40, 81), indices tensor([ 831, 113, 945, ..., 1449, 2359, 3132])\n", + "deriv_tensor (40, 81), indices tensor([ 831, 113, 945, ..., 1449, 2359, 3132])\n", + "deriv_tensor (40, 81), indices tensor([ 831, 113, 945, ..., 1449, 2359, 3132])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2449, 921, ..., 1116, 2502, 2939])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2449, 921, ..., 1116, 2502, 2939])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2449, 921, ..., 1116, 2502, 2939])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2449, 921, ..., 1116, 2502, 2939])\n", + "deriv_tensor (40, 81), indices tensor([2971, 2567, 838, ..., 2753, 115, 2370])\n", + "deriv_tensor (40, 81), indices tensor([2971, 2567, 838, ..., 2753, 115, 2370])\n", + "deriv_tensor (40, 81), indices tensor([2971, 2567, 838, ..., 2753, 115, 2370])\n", + "deriv_tensor (40, 81), indices tensor([2971, 2567, 838, ..., 2753, 115, 2370])\n", + "deriv_tensor (40, 81), indices tensor([3220, 1186, 2207, ..., 739, 933, 1441])\n", + "deriv_tensor (40, 81), indices tensor([3220, 1186, 2207, ..., 739, 933, 1441])\n", + "deriv_tensor (40, 81), indices tensor([3220, 1186, 2207, ..., 739, 933, 1441])\n", + "deriv_tensor (40, 81), indices tensor([3220, 1186, 2207, ..., 739, 933, 1441])\n", + "deriv_tensor (40, 81), indices tensor([2238, 3222, 563, ..., 1623, 2495, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2238, 3222, 563, ..., 1623, 2495, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2238, 3222, 563, ..., 1623, 2495, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2238, 3222, 563, ..., 1623, 2495, 2411])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 3124, 791, ..., 309, 1109, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 3124, 791, ..., 309, 1109, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 3124, 791, ..., 309, 1109, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 694, 3124, 791, ..., 309, 1109, 754])\n", + "deriv_tensor (40, 81), indices tensor([ 554, 2901, 1192, ..., 2588, 267, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 554, 2901, 1192, ..., 2588, 267, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 554, 2901, 1192, ..., 2588, 267, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 554, 2901, 1192, ..., 2588, 267, 2927])\n", + "deriv_tensor (40, 81), indices tensor([ 935, 2440, 228, ..., 1709, 958, 1111])\n", + "deriv_tensor (40, 81), indices tensor([ 935, 2440, 228, ..., 1709, 958, 1111])\n", + "deriv_tensor (40, 81), indices tensor([ 935, 2440, 228, ..., 1709, 958, 1111])\n", + "deriv_tensor (40, 81), indices tensor([ 935, 2440, 228, ..., 1709, 958, 1111])\n", + "deriv_tensor (40, 81), indices tensor([1997, 2508, 1414, ..., 3170, 2189, 2177])\n", + "deriv_tensor (40, 81), indices tensor([1997, 2508, 1414, ..., 3170, 2189, 2177])\n", + "deriv_tensor (40, 81), indices tensor([1997, 2508, 1414, ..., 3170, 2189, 2177])\n", + "deriv_tensor (40, 81), indices tensor([1997, 2508, 1414, ..., 3170, 2189, 2177])\n", + "deriv_tensor (40, 81), indices tensor([ 144, 1722, 2841, ..., 553, 2163, 662])\n", + "deriv_tensor (40, 81), indices tensor([ 144, 1722, 2841, ..., 553, 2163, 662])\n", + "deriv_tensor (40, 81), indices tensor([ 144, 1722, 2841, ..., 553, 2163, 662])\n", + "deriv_tensor (40, 81), indices tensor([ 144, 1722, 2841, ..., 553, 2163, 662])\n", + "deriv_tensor (40, 81), indices tensor([2098, 1488, 1926, ..., 2437, 2923, 862])\n", + "deriv_tensor (40, 81), indices tensor([2098, 1488, 1926, ..., 2437, 2923, 862])\n", + "deriv_tensor (40, 81), indices tensor([2098, 1488, 1926, ..., 2437, 2923, 862])\n", + "deriv_tensor (40, 81), indices tensor([2098, 1488, 1926, ..., 2437, 2923, 862])\n", + "deriv_tensor (40, 81), indices tensor([2652, 370, 2600, ..., 216, 1821, 1138])\n", + "deriv_tensor (40, 81), indices tensor([2652, 370, 2600, ..., 216, 1821, 1138])\n", + "deriv_tensor (40, 81), indices tensor([2652, 370, 2600, ..., 216, 1821, 1138])\n", + "deriv_tensor (40, 81), indices tensor([2652, 370, 2600, ..., 216, 1821, 1138])\n", + "deriv_tensor (40, 81), indices tensor([2925, 2239, 2761, ..., 1758, 656, 1454])\n", + "deriv_tensor (40, 81), indices tensor([2925, 2239, 2761, ..., 1758, 656, 1454])\n", + "deriv_tensor (40, 81), indices tensor([2925, 2239, 2761, ..., 1758, 656, 1454])\n", + "deriv_tensor (40, 81), indices tensor([2925, 2239, 2761, ..., 1758, 656, 1454])\n", + "deriv_tensor (40, 81), indices tensor([1173, 833, 1131, ..., 1070, 2167, 1626])\n", + "deriv_tensor (40, 81), indices tensor([1173, 833, 1131, ..., 1070, 2167, 1626])\n", + "deriv_tensor (40, 81), indices tensor([1173, 833, 1131, ..., 1070, 2167, 1626])\n", + "deriv_tensor (40, 81), indices tensor([1173, 833, 1131, ..., 1070, 2167, 1626])\n", + "deriv_tensor (40, 81), indices tensor([ 447, 313, 125, ..., 1454, 1248, 1569])\n", + "deriv_tensor (40, 81), indices tensor([ 447, 313, 125, ..., 1454, 1248, 1569])\n", + "deriv_tensor (40, 81), indices tensor([ 447, 313, 125, ..., 1454, 1248, 1569])\n", + "deriv_tensor (40, 81), indices tensor([ 447, 313, 125, ..., 1454, 1248, 1569])\n", + "deriv_tensor (40, 81), indices tensor([2557, 2780, 3025, ..., 22, 565, 162])\n", + "deriv_tensor (40, 81), indices tensor([2557, 2780, 3025, ..., 22, 565, 162])\n", + "deriv_tensor (40, 81), indices tensor([2557, 2780, 3025, ..., 22, 565, 162])\n", + "deriv_tensor (40, 81), indices tensor([2557, 2780, 3025, ..., 22, 565, 162])\n", + "deriv_tensor (40, 81), indices tensor([ 448, 682, 800, ..., 48, 1879, 363])\n", + "deriv_tensor (40, 81), indices tensor([ 448, 682, 800, ..., 48, 1879, 363])\n", + "deriv_tensor (40, 81), indices tensor([ 448, 682, 800, ..., 48, 1879, 363])\n", + "deriv_tensor (40, 81), indices tensor([ 448, 682, 800, ..., 48, 1879, 363])\n", + "deriv_tensor (40, 81), indices tensor([1446, 717, 841, ..., 2122, 3236, 302])\n", + "deriv_tensor (40, 81), indices tensor([1446, 717, 841, ..., 2122, 3236, 302])\n", + "deriv_tensor (40, 81), indices tensor([1446, 717, 841, ..., 2122, 3236, 302])\n", + "deriv_tensor (40, 81), indices tensor([1446, 717, 841, ..., 2122, 3236, 302])\n", + "deriv_tensor (40, 81), indices tensor([ 363, 1512, 154, ..., 2478, 1132, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 363, 1512, 154, ..., 2478, 1132, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 363, 1512, 154, ..., 2478, 1132, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 363, 1512, 154, ..., 2478, 1132, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 33, 1989, 749, ..., 2561, 645, 707])\n", + "deriv_tensor (40, 81), indices tensor([ 33, 1989, 749, ..., 2561, 645, 707])\n", + "deriv_tensor (40, 81), indices tensor([ 33, 1989, 749, ..., 2561, 645, 707])\n", + "deriv_tensor (40, 81), indices tensor([ 33, 1989, 749, ..., 2561, 645, 707])\n", + "deriv_tensor (40, 81), indices tensor([ 580, 2542, 189, ..., 3105, 1799, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 580, 2542, 189, ..., 3105, 1799, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 580, 2542, 189, ..., 3105, 1799, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 580, 2542, 189, ..., 3105, 1799, 2926])\n", + "deriv_tensor (40, 81), indices tensor([1292, 478, 80, ..., 2642, 1523, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1292, 478, 80, ..., 2642, 1523, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1292, 478, 80, ..., 2642, 1523, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1292, 478, 80, ..., 2642, 1523, 1491])\n", + "deriv_tensor (40, 81), indices tensor([1458, 40, 2860, ..., 185, 361, 755])\n", + "deriv_tensor (40, 81), indices tensor([1458, 40, 2860, ..., 185, 361, 755])\n", + "deriv_tensor (40, 81), indices tensor([1458, 40, 2860, ..., 185, 361, 755])\n", + "deriv_tensor (40, 81), indices tensor([1458, 40, 2860, ..., 185, 361, 755])\n", + "deriv_tensor (40, 81), indices tensor([3231, 1951, 1083, ..., 927, 2393, 2029])\n", + "deriv_tensor (40, 81), indices tensor([3231, 1951, 1083, ..., 927, 2393, 2029])\n", + "deriv_tensor (40, 81), indices tensor([3231, 1951, 1083, ..., 927, 2393, 2029])\n", + "deriv_tensor (40, 81), indices tensor([3231, 1951, 1083, ..., 927, 2393, 2029])\n", + "deriv_tensor (40, 81), indices tensor([2207, 3229, 247, ..., 1686, 1854, 657])\n", + "deriv_tensor (40, 81), indices tensor([2207, 3229, 247, ..., 1686, 1854, 657])\n", + "deriv_tensor (40, 81), indices tensor([2207, 3229, 247, ..., 1686, 1854, 657])\n", + "deriv_tensor (40, 81), indices tensor([2207, 3229, 247, ..., 1686, 1854, 657])\n", + "deriv_tensor (40, 81), indices tensor([1352, 2695, 2776, ..., 1139, 1065, 856])\n", + "deriv_tensor (40, 81), indices tensor([1352, 2695, 2776, ..., 1139, 1065, 856])\n", + "deriv_tensor (40, 81), indices tensor([1352, 2695, 2776, ..., 1139, 1065, 856])\n", + "deriv_tensor (40, 81), indices tensor([1352, 2695, 2776, ..., 1139, 1065, 856])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 1739, 1222, ..., 6, 887, 1237])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 1739, 1222, ..., 6, 887, 1237])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 1739, 1222, ..., 6, 887, 1237])\n", + "deriv_tensor (40, 81), indices tensor([ 856, 1739, 1222, ..., 6, 887, 1237])\n", + "deriv_tensor (40, 81), indices tensor([2768, 848, 432, ..., 3077, 2877, 2389])\n", + "deriv_tensor (40, 81), indices tensor([2768, 848, 432, ..., 3077, 2877, 2389])\n", + "deriv_tensor (40, 81), indices tensor([2768, 848, 432, ..., 3077, 2877, 2389])\n", + "deriv_tensor (40, 81), indices tensor([2768, 848, 432, ..., 3077, 2877, 2389])\n", + "deriv_tensor (40, 81), indices tensor([1410, 1102, 2190, ..., 1558, 1184, 2971])\n", + "deriv_tensor (40, 81), indices tensor([1410, 1102, 2190, ..., 1558, 1184, 2971])\n", + "deriv_tensor (40, 81), indices tensor([1410, 1102, 2190, ..., 1558, 1184, 2971])\n", + "deriv_tensor (40, 81), indices tensor([1410, 1102, 2190, ..., 1558, 1184, 2971])\n", + "deriv_tensor (40, 81), indices tensor([ 568, 2699, 562, ..., 301, 523, 2100])\n", + "deriv_tensor (40, 81), indices tensor([ 568, 2699, 562, ..., 301, 523, 2100])\n", + "deriv_tensor (40, 81), indices tensor([ 568, 2699, 562, ..., 301, 523, 2100])\n", + "deriv_tensor (40, 81), indices tensor([ 568, 2699, 562, ..., 301, 523, 2100])\n", + "deriv_tensor (40, 81), indices tensor([1937, 2243, 1472, ..., 2696, 3032, 2670])\n", + "deriv_tensor (40, 81), indices tensor([1937, 2243, 1472, ..., 2696, 3032, 2670])\n", + "deriv_tensor (40, 81), indices tensor([1937, 2243, 1472, ..., 2696, 3032, 2670])\n", + "deriv_tensor (40, 81), indices tensor([1937, 2243, 1472, ..., 2696, 3032, 2670])\n", + "deriv_tensor (40, 81), indices tensor([1460, 1178, 510, ..., 370, 688, 2606])\n", + "deriv_tensor (40, 81), indices tensor([1460, 1178, 510, ..., 370, 688, 2606])\n", + "deriv_tensor (40, 81), indices tensor([1460, 1178, 510, ..., 370, 688, 2606])\n", + "deriv_tensor (40, 81), indices tensor([1460, 1178, 510, ..., 370, 688, 2606])\n", + "deriv_tensor (40, 81), indices tensor([2187, 945, 2110, ..., 1384, 1325, 3094])\n", + "deriv_tensor (40, 81), indices tensor([2187, 945, 2110, ..., 1384, 1325, 3094])\n", + "deriv_tensor (40, 81), indices tensor([2187, 945, 2110, ..., 1384, 1325, 3094])\n", + "deriv_tensor (40, 81), indices tensor([2187, 945, 2110, ..., 1384, 1325, 3094])\n", + "deriv_tensor (40, 81), indices tensor([ 422, 2482, 97, ..., 2144, 664, 656])\n", + "deriv_tensor (40, 81), indices tensor([ 422, 2482, 97, ..., 2144, 664, 656])\n", + "deriv_tensor (40, 81), indices tensor([ 422, 2482, 97, ..., 2144, 664, 656])\n", + "deriv_tensor (40, 81), indices tensor([ 422, 2482, 97, ..., 2144, 664, 656])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2406, 1837, ..., 348, 1780, 1824])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2406, 1837, ..., 348, 1780, 1824])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2406, 1837, ..., 348, 1780, 1824])\n", + "deriv_tensor (40, 81), indices tensor([1788, 2406, 1837, ..., 348, 1780, 1824])\n", + "deriv_tensor (40, 81), indices tensor([1905, 1096, 942, ..., 565, 1792, 2683])\n", + "deriv_tensor (40, 81), indices tensor([1905, 1096, 942, ..., 565, 1792, 2683])\n", + "deriv_tensor (40, 81), indices tensor([1905, 1096, 942, ..., 565, 1792, 2683])\n", + "deriv_tensor (40, 81), indices tensor([1905, 1096, 942, ..., 565, 1792, 2683])\n", + "deriv_tensor (40, 81), indices tensor([2162, 2060, 2615, ..., 1899, 1414, 2059])\n", + "deriv_tensor (40, 81), indices tensor([2162, 2060, 2615, ..., 1899, 1414, 2059])\n", + "deriv_tensor (40, 81), indices tensor([2162, 2060, 2615, ..., 1899, 1414, 2059])\n", + "deriv_tensor (40, 81), indices tensor([2162, 2060, 2615, ..., 1899, 1414, 2059])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 1113, 2442, ..., 1064, 1026, 1497])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 1113, 2442, ..., 1064, 1026, 1497])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 1113, 2442, ..., 1064, 1026, 1497])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 1113, 2442, ..., 1064, 1026, 1497])\n", + "deriv_tensor (40, 81), indices tensor([2756, 128, 890, ..., 461, 439, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2756, 128, 890, ..., 461, 439, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2756, 128, 890, ..., 461, 439, 2508])\n", + "deriv_tensor (40, 81), indices tensor([2756, 128, 890, ..., 461, 439, 2508])\n", + "deriv_tensor (40, 81), indices tensor([1963, 1572, 1088, ..., 2359, 1369, 746])\n", + "deriv_tensor (40, 81), indices tensor([1963, 1572, 1088, ..., 2359, 1369, 746])\n", + "deriv_tensor (40, 81), indices tensor([1963, 1572, 1088, ..., 2359, 1369, 746])\n", + "deriv_tensor (40, 81), indices tensor([1963, 1572, 1088, ..., 2359, 1369, 746])\n", + "deriv_tensor (40, 81), indices tensor([2218, 652, 2866, ..., 968, 2029, 3146])\n", + "deriv_tensor (40, 81), indices tensor([2218, 652, 2866, ..., 968, 2029, 3146])\n", + "deriv_tensor (40, 81), indices tensor([2218, 652, 2866, ..., 968, 2029, 3146])\n", + "deriv_tensor (40, 81), indices tensor([2218, 652, 2866, ..., 968, 2029, 3146])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2840, 2995, ..., 3174, 1216, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2840, 2995, ..., 3174, 1216, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2840, 2995, ..., 3174, 1216, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 867, 2840, 2995, ..., 3174, 1216, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 682, 7, 1674, ..., 1562, 2455, 3178])\n", + "deriv_tensor (40, 81), indices tensor([ 682, 7, 1674, ..., 1562, 2455, 3178])\n", + "deriv_tensor (40, 81), indices tensor([ 682, 7, 1674, ..., 1562, 2455, 3178])\n", + "deriv_tensor (40, 81), indices tensor([ 682, 7, 1674, ..., 1562, 2455, 3178])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1982, 1300, ..., 246, 2354, 2502])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1982, 1300, ..., 246, 2354, 2502])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1982, 1300, ..., 246, 2354, 2502])\n", + "deriv_tensor (40, 81), indices tensor([2863, 1982, 1300, ..., 246, 2354, 2502])\n", + "deriv_tensor (40, 81), indices tensor([ 649, 1237, 2992, ..., 2047, 1846, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 649, 1237, 2992, ..., 2047, 1846, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 649, 1237, 2992, ..., 2047, 1846, 370])\n", + "deriv_tensor (40, 81), indices tensor([ 649, 1237, 2992, ..., 2047, 1846, 370])\n", + "deriv_tensor (40, 81), indices tensor([1380, 105, 2942, ..., 18, 31, 3160])\n", + "deriv_tensor (40, 81), indices tensor([1380, 105, 2942, ..., 18, 31, 3160])\n", + "deriv_tensor (40, 81), indices tensor([1380, 105, 2942, ..., 18, 31, 3160])\n", + "deriv_tensor (40, 81), indices tensor([1380, 105, 2942, ..., 18, 31, 3160])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 474, 1612, ..., 6, 907, 690])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 474, 1612, ..., 6, 907, 690])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 474, 1612, ..., 6, 907, 690])\n", + "deriv_tensor (40, 81), indices tensor([ 335, 474, 1612, ..., 6, 907, 690])\n", + "deriv_tensor (40, 81), indices tensor([1926, 2211, 230, ..., 1329, 1182, 1108])\n", + "deriv_tensor (40, 81), indices tensor([1926, 2211, 230, ..., 1329, 1182, 1108])\n", + "deriv_tensor (40, 81), indices tensor([1926, 2211, 230, ..., 1329, 1182, 1108])\n", + "deriv_tensor (40, 81), indices tensor([1926, 2211, 230, ..., 1329, 1182, 1108])\n", + "deriv_tensor (40, 81), indices tensor([2725, 844, 3239, ..., 3005, 2539, 2071])\n", + "deriv_tensor (40, 81), indices tensor([2725, 844, 3239, ..., 3005, 2539, 2071])\n", + "deriv_tensor (40, 81), indices tensor([2725, 844, 3239, ..., 3005, 2539, 2071])\n", + "deriv_tensor (40, 81), indices tensor([2725, 844, 3239, ..., 3005, 2539, 2071])\n", + "deriv_tensor (40, 81), indices tensor([1732, 2194, 773, ..., 2956, 1913, 326])\n", + "deriv_tensor (40, 81), indices tensor([1732, 2194, 773, ..., 2956, 1913, 326])\n", + "deriv_tensor (40, 81), indices tensor([1732, 2194, 773, ..., 2956, 1913, 326])\n", + "deriv_tensor (40, 81), indices tensor([1732, 2194, 773, ..., 2956, 1913, 326])\n", + "deriv_tensor (40, 81), indices tensor([ 239, 2832, 499, ..., 2492, 2897, 1208])\n", + "deriv_tensor (40, 81), indices tensor([ 239, 2832, 499, ..., 2492, 2897, 1208])\n", + "deriv_tensor (40, 81), indices tensor([ 239, 2832, 499, ..., 2492, 2897, 1208])\n", + "deriv_tensor (40, 81), indices tensor([ 239, 2832, 499, ..., 2492, 2897, 1208])\n", + "deriv_tensor (40, 81), indices tensor([1292, 2145, 1501, ..., 2833, 487, 27])\n", + "deriv_tensor (40, 81), indices tensor([1292, 2145, 1501, ..., 2833, 487, 27])\n", + "deriv_tensor (40, 81), indices tensor([1292, 2145, 1501, ..., 2833, 487, 27])\n", + "deriv_tensor (40, 81), indices tensor([1292, 2145, 1501, ..., 2833, 487, 27])\n", + "deriv_tensor (40, 81), indices tensor([1159, 1899, 2872, ..., 2246, 118, 1533])\n", + "deriv_tensor (40, 81), indices tensor([1159, 1899, 2872, ..., 2246, 118, 1533])\n", + "deriv_tensor (40, 81), indices tensor([1159, 1899, 2872, ..., 2246, 118, 1533])\n", + "deriv_tensor (40, 81), indices tensor([1159, 1899, 2872, ..., 2246, 118, 1533])\n", + "deriv_tensor (40, 81), indices tensor([ 379, 1341, 2289, ..., 2021, 1926, 76])\n", + "deriv_tensor (40, 81), indices tensor([ 379, 1341, 2289, ..., 2021, 1926, 76])\n", + "deriv_tensor (40, 81), indices tensor([ 379, 1341, 2289, ..., 2021, 1926, 76])\n", + "deriv_tensor (40, 81), indices tensor([ 379, 1341, 2289, ..., 2021, 1926, 76])\n", + "deriv_tensor (40, 81), indices tensor([3098, 3097, 3094, ..., 1484, 1519, 274])\n", + "deriv_tensor (40, 81), indices tensor([3098, 3097, 3094, ..., 1484, 1519, 274])\n", + "deriv_tensor (40, 81), indices tensor([3098, 3097, 3094, ..., 1484, 1519, 274])\n", + "deriv_tensor (40, 81), indices tensor([3098, 3097, 3094, ..., 1484, 1519, 274])\n", + "deriv_tensor (40, 81), indices tensor([ 203, 1629, 1891, ..., 515, 1065, 1967])\n", + "deriv_tensor (40, 81), indices tensor([ 203, 1629, 1891, ..., 515, 1065, 1967])\n", + "deriv_tensor (40, 81), indices tensor([ 203, 1629, 1891, ..., 515, 1065, 1967])\n", + "deriv_tensor (40, 81), indices tensor([ 203, 1629, 1891, ..., 515, 1065, 1967])\n", + "deriv_tensor (40, 81), indices tensor([2049, 877, 368, ..., 2874, 260, 2402])\n", + "deriv_tensor (40, 81), indices tensor([2049, 877, 368, ..., 2874, 260, 2402])\n", + "deriv_tensor (40, 81), indices tensor([2049, 877, 368, ..., 2874, 260, 2402])\n", + "deriv_tensor (40, 81), indices tensor([2049, 877, 368, ..., 2874, 260, 2402])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3098, 2751, ..., 1837, 852, 2254])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3098, 2751, ..., 1837, 852, 2254])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3098, 2751, ..., 1837, 852, 2254])\n", + "deriv_tensor (40, 81), indices tensor([1782, 3098, 2751, ..., 1837, 852, 2254])\n", + "deriv_tensor (40, 81), indices tensor([ 49, 2846, 2651, ..., 1444, 2327, 70])\n", + "deriv_tensor (40, 81), indices tensor([ 49, 2846, 2651, ..., 1444, 2327, 70])\n", + "deriv_tensor (40, 81), indices tensor([ 49, 2846, 2651, ..., 1444, 2327, 70])\n", + "deriv_tensor (40, 81), indices tensor([ 49, 2846, 2651, ..., 1444, 2327, 70])\n", + "deriv_tensor (40, 81), indices tensor([1489, 2318, 1519, ..., 1200, 1801, 2671])\n", + "deriv_tensor (40, 81), indices tensor([1489, 2318, 1519, ..., 1200, 1801, 2671])\n", + "deriv_tensor (40, 81), indices tensor([1489, 2318, 1519, ..., 1200, 1801, 2671])\n", + "deriv_tensor (40, 81), indices tensor([1489, 2318, 1519, ..., 1200, 1801, 2671])\n", + "deriv_tensor (40, 81), indices tensor([2451, 166, 1075, ..., 1642, 594, 1077])\n", + "deriv_tensor (40, 81), indices tensor([2451, 166, 1075, ..., 1642, 594, 1077])\n", + "deriv_tensor (40, 81), indices tensor([2451, 166, 1075, ..., 1642, 594, 1077])\n", + "deriv_tensor (40, 81), indices tensor([2451, 166, 1075, ..., 1642, 594, 1077])\n", + "deriv_tensor (40, 81), indices tensor([2135, 1218, 2295, ..., 2710, 1483, 2133])\n", + "deriv_tensor (40, 81), indices tensor([2135, 1218, 2295, ..., 2710, 1483, 2133])\n", + "deriv_tensor (40, 81), indices tensor([2135, 1218, 2295, ..., 2710, 1483, 2133])\n", + "deriv_tensor (40, 81), indices tensor([2135, 1218, 2295, ..., 2710, 1483, 2133])\n", + "deriv_tensor (40, 81), indices tensor([2725, 1055, 2983, ..., 2823, 1698, 2442])\n", + "deriv_tensor (40, 81), indices tensor([2725, 1055, 2983, ..., 2823, 1698, 2442])\n", + "deriv_tensor (40, 81), indices tensor([2725, 1055, 2983, ..., 2823, 1698, 2442])\n", + "deriv_tensor (40, 81), indices tensor([2725, 1055, 2983, ..., 2823, 1698, 2442])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 83, 1653, ..., 259, 1771, 2788])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 83, 1653, ..., 259, 1771, 2788])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 83, 1653, ..., 259, 1771, 2788])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 83, 1653, ..., 259, 1771, 2788])\n", + "deriv_tensor (40, 81), indices tensor([2810, 505, 1590, ..., 1224, 2414, 751])\n", + "deriv_tensor (40, 81), indices tensor([2810, 505, 1590, ..., 1224, 2414, 751])\n", + "deriv_tensor (40, 81), indices tensor([2810, 505, 1590, ..., 1224, 2414, 751])\n", + "deriv_tensor (40, 81), indices tensor([2810, 505, 1590, ..., 1224, 2414, 751])\n", + "deriv_tensor (40, 81), indices tensor([2628, 557, 389, ..., 2148, 1084, 2224])\n", + "deriv_tensor (40, 81), indices tensor([2628, 557, 389, ..., 2148, 1084, 2224])\n", + "deriv_tensor (40, 81), indices tensor([2628, 557, 389, ..., 2148, 1084, 2224])\n", + "deriv_tensor (40, 81), indices tensor([2628, 557, 389, ..., 2148, 1084, 2224])\n", + "deriv_tensor (40, 81), indices tensor([1187, 326, 1264, ..., 3021, 2375, 1870])\n", + "deriv_tensor (40, 81), indices tensor([1187, 326, 1264, ..., 3021, 2375, 1870])\n", + "deriv_tensor (40, 81), indices tensor([1187, 326, 1264, ..., 3021, 2375, 1870])\n", + "deriv_tensor (40, 81), indices tensor([1187, 326, 1264, ..., 3021, 2375, 1870])\n", + "deriv_tensor (40, 81), indices tensor([1491, 1379, 3006, ..., 2932, 742, 2258])\n", + "deriv_tensor (40, 81), indices tensor([1491, 1379, 3006, ..., 2932, 742, 2258])\n", + "deriv_tensor (40, 81), indices tensor([1491, 1379, 3006, ..., 2932, 742, 2258])\n", + "deriv_tensor (40, 81), indices tensor([1491, 1379, 3006, ..., 2932, 742, 2258])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 1288, 2966, ..., 2441, 2787, 2852])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 1288, 2966, ..., 2441, 2787, 2852])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 1288, 2966, ..., 2441, 2787, 2852])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 1288, 2966, ..., 2441, 2787, 2852])\n", + "deriv_tensor (40, 81), indices tensor([1986, 2691, 1770, ..., 1653, 2689, 2409])\n", + "deriv_tensor (40, 81), indices tensor([1986, 2691, 1770, ..., 1653, 2689, 2409])\n", + "deriv_tensor (40, 81), indices tensor([1986, 2691, 1770, ..., 1653, 2689, 2409])\n", + "deriv_tensor (40, 81), indices tensor([1986, 2691, 1770, ..., 1653, 2689, 2409])\n", + "deriv_tensor (40, 81), indices tensor([2424, 1080, 2715, ..., 3089, 181, 3002])\n", + "deriv_tensor (40, 81), indices tensor([2424, 1080, 2715, ..., 3089, 181, 3002])\n", + "deriv_tensor (40, 81), indices tensor([2424, 1080, 2715, ..., 3089, 181, 3002])\n", + "deriv_tensor (40, 81), indices tensor([2424, 1080, 2715, ..., 3089, 181, 3002])\n", + "deriv_tensor (40, 81), indices tensor([1103, 2721, 2451, ..., 2108, 1725, 1618])\n", + "deriv_tensor (40, 81), indices tensor([1103, 2721, 2451, ..., 2108, 1725, 1618])\n", + "deriv_tensor (40, 81), indices tensor([1103, 2721, 2451, ..., 2108, 1725, 1618])\n", + "deriv_tensor (40, 81), indices tensor([1103, 2721, 2451, ..., 2108, 1725, 1618])\n", + "deriv_tensor (40, 81), indices tensor([ 59, 1601, 1818, ..., 1765, 2794, 481])\n", + "deriv_tensor (40, 81), indices tensor([ 59, 1601, 1818, ..., 1765, 2794, 481])\n", + "deriv_tensor (40, 81), indices tensor([ 59, 1601, 1818, ..., 1765, 2794, 481])\n", + "deriv_tensor (40, 81), indices tensor([ 59, 1601, 1818, ..., 1765, 2794, 481])\n", + "deriv_tensor (40, 81), indices tensor([2971, 446, 547, ..., 419, 2737, 2647])\n", + "deriv_tensor (40, 81), indices tensor([2971, 446, 547, ..., 419, 2737, 2647])\n", + "deriv_tensor (40, 81), indices tensor([2971, 446, 547, ..., 419, 2737, 2647])\n", + "deriv_tensor (40, 81), indices tensor([2971, 446, 547, ..., 419, 2737, 2647])\n", + "deriv_tensor (40, 81), indices tensor([2755, 2937, 1044, ..., 3103, 2655, 1014])\n", + "deriv_tensor (40, 81), indices tensor([2755, 2937, 1044, ..., 3103, 2655, 1014])\n", + "deriv_tensor (40, 81), indices tensor([2755, 2937, 1044, ..., 3103, 2655, 1014])\n", + "deriv_tensor (40, 81), indices tensor([2755, 2937, 1044, ..., 3103, 2655, 1014])\n", + "deriv_tensor (40, 81), indices tensor([2783, 547, 2826, ..., 2673, 1755, 1748])\n", + "deriv_tensor (40, 81), indices tensor([2783, 547, 2826, ..., 2673, 1755, 1748])\n", + "deriv_tensor (40, 81), indices tensor([2783, 547, 2826, ..., 2673, 1755, 1748])\n", + "deriv_tensor (40, 81), indices tensor([2783, 547, 2826, ..., 2673, 1755, 1748])\n", + "deriv_tensor (40, 81), indices tensor([1917, 2317, 951, ..., 3154, 823, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1917, 2317, 951, ..., 3154, 823, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1917, 2317, 951, ..., 3154, 823, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1917, 2317, 951, ..., 3154, 823, 1210])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 2076, 721, ..., 2276, 827, 2288])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 2076, 721, ..., 2276, 827, 2288])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 2076, 721, ..., 2276, 827, 2288])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 2076, 721, ..., 2276, 827, 2288])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 1123, 3041, ..., 1600, 200, 3044])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 1123, 3041, ..., 1600, 200, 3044])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 1123, 3041, ..., 1600, 200, 3044])\n", + "deriv_tensor (40, 81), indices tensor([ 528, 1123, 3041, ..., 1600, 200, 3044])\n", + "deriv_tensor (40, 81), indices tensor([ 889, 158, 2313, ..., 767, 2835, 723])\n", + "deriv_tensor (40, 81), indices tensor([ 889, 158, 2313, ..., 767, 2835, 723])\n", + "deriv_tensor (40, 81), indices tensor([ 889, 158, 2313, ..., 767, 2835, 723])\n", + "deriv_tensor (40, 81), indices tensor([ 889, 158, 2313, ..., 767, 2835, 723])\n", + "deriv_tensor (40, 81), indices tensor([2226, 2367, 1629, ..., 2048, 3125, 1810])\n", + "deriv_tensor (40, 81), indices tensor([2226, 2367, 1629, ..., 2048, 3125, 1810])\n", + "deriv_tensor (40, 81), indices tensor([2226, 2367, 1629, ..., 2048, 3125, 1810])\n", + "deriv_tensor (40, 81), indices tensor([2226, 2367, 1629, ..., 2048, 3125, 1810])\n", + "deriv_tensor (40, 81), indices tensor([1858, 3112, 2323, ..., 343, 606, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1858, 3112, 2323, ..., 343, 606, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1858, 3112, 2323, ..., 343, 606, 3055])\n", + "deriv_tensor (40, 81), indices tensor([1858, 3112, 2323, ..., 343, 606, 3055])\n", + "deriv_tensor (40, 81), indices tensor([2807, 455, 800, ..., 581, 2506, 343])\n", + "deriv_tensor (40, 81), indices tensor([2807, 455, 800, ..., 581, 2506, 343])\n", + "deriv_tensor (40, 81), indices tensor([2807, 455, 800, ..., 581, 2506, 343])\n", + "deriv_tensor (40, 81), indices tensor([2807, 455, 800, ..., 581, 2506, 343])\n", + "deriv_tensor (40, 81), indices tensor([1052, 962, 2344, ..., 1539, 38, 1723])\n", + "deriv_tensor (40, 81), indices tensor([1052, 962, 2344, ..., 1539, 38, 1723])\n", + "deriv_tensor (40, 81), indices tensor([1052, 962, 2344, ..., 1539, 38, 1723])\n", + "deriv_tensor (40, 81), indices tensor([1052, 962, 2344, ..., 1539, 38, 1723])\n", + "deriv_tensor (40, 81), indices tensor([1853, 2072, 1651, ..., 2424, 115, 1596])\n", + "deriv_tensor (40, 81), indices tensor([1853, 2072, 1651, ..., 2424, 115, 1596])\n", + "deriv_tensor (40, 81), indices tensor([1853, 2072, 1651, ..., 2424, 115, 1596])\n", + "deriv_tensor (40, 81), indices tensor([1853, 2072, 1651, ..., 2424, 115, 1596])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1876, 2216, ..., 2855, 491, 2073])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1876, 2216, ..., 2855, 491, 2073])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1876, 2216, ..., 2855, 491, 2073])\n", + "deriv_tensor (40, 81), indices tensor([1679, 1876, 2216, ..., 2855, 491, 2073])\n", + "deriv_tensor (40, 81), indices tensor([2571, 1712, 3219, ..., 47, 3003, 220])\n", + "deriv_tensor (40, 81), indices tensor([2571, 1712, 3219, ..., 47, 3003, 220])\n", + "deriv_tensor (40, 81), indices tensor([2571, 1712, 3219, ..., 47, 3003, 220])\n", + "deriv_tensor (40, 81), indices tensor([2571, 1712, 3219, ..., 47, 3003, 220])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1496, 816, ..., 673, 2514, 1924])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1496, 816, ..., 673, 2514, 1924])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1496, 816, ..., 673, 2514, 1924])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1496, 816, ..., 673, 2514, 1924])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 1564, 2382, ..., 80, 1153, 3185])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 1564, 2382, ..., 80, 1153, 3185])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 1564, 2382, ..., 80, 1153, 3185])\n", + "deriv_tensor (40, 81), indices tensor([ 884, 1564, 2382, ..., 80, 1153, 3185])\n", + "deriv_tensor (40, 81), indices tensor([3186, 278, 1502, ..., 3043, 2370, 1815])\n", + "deriv_tensor (40, 81), indices tensor([3186, 278, 1502, ..., 3043, 2370, 1815])\n", + "deriv_tensor (40, 81), indices tensor([3186, 278, 1502, ..., 3043, 2370, 1815])\n", + "deriv_tensor (40, 81), indices tensor([3186, 278, 1502, ..., 3043, 2370, 1815])\n", + "deriv_tensor (40, 81), indices tensor([2317, 793, 228, ..., 35, 2482, 216])\n", + "deriv_tensor (40, 81), indices tensor([2317, 793, 228, ..., 35, 2482, 216])\n", + "deriv_tensor (40, 81), indices tensor([2317, 793, 228, ..., 35, 2482, 216])\n", + "deriv_tensor (40, 81), indices tensor([2317, 793, 228, ..., 35, 2482, 216])\n", + "deriv_tensor (40, 81), indices tensor([1720, 1805, 679, ..., 1856, 1511, 1465])\n", + "deriv_tensor (40, 81), indices tensor([1720, 1805, 679, ..., 1856, 1511, 1465])\n", + "deriv_tensor (40, 81), indices tensor([1720, 1805, 679, ..., 1856, 1511, 1465])\n", + "deriv_tensor (40, 81), indices tensor([1720, 1805, 679, ..., 1856, 1511, 1465])\n", + "deriv_tensor (40, 81), indices tensor([ 314, 417, 1287, ..., 1700, 1984, 759])\n", + "deriv_tensor (40, 81), indices tensor([ 314, 417, 1287, ..., 1700, 1984, 759])\n", + "deriv_tensor (40, 81), indices tensor([ 314, 417, 1287, ..., 1700, 1984, 759])\n", + "deriv_tensor (40, 81), indices tensor([ 314, 417, 1287, ..., 1700, 1984, 759])\n", + "deriv_tensor (40, 81), indices tensor([1962, 1904, 2974, ..., 2463, 1342, 3070])\n", + "deriv_tensor (40, 81), indices tensor([1962, 1904, 2974, ..., 2463, 1342, 3070])\n", + "deriv_tensor (40, 81), indices tensor([1962, 1904, 2974, ..., 2463, 1342, 3070])\n", + "deriv_tensor (40, 81), indices tensor([1962, 1904, 2974, ..., 2463, 1342, 3070])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 3106, 1325, ..., 2782, 1857, 2016])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 3106, 1325, ..., 2782, 1857, 2016])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 3106, 1325, ..., 2782, 1857, 2016])\n", + "deriv_tensor (40, 81), indices tensor([ 140, 3106, 1325, ..., 2782, 1857, 2016])\n", + "deriv_tensor (40, 81), indices tensor([ 340, 405, 2741, ..., 471, 2438, 1869])\n", + "deriv_tensor (40, 81), indices tensor([ 340, 405, 2741, ..., 471, 2438, 1869])\n", + "deriv_tensor (40, 81), indices tensor([ 340, 405, 2741, ..., 471, 2438, 1869])\n", + "deriv_tensor (40, 81), indices tensor([ 340, 405, 2741, ..., 471, 2438, 1869])\n", + "deriv_tensor (40, 81), indices tensor([2697, 1784, 1507, ..., 2825, 1858, 756])\n", + "deriv_tensor (40, 81), indices tensor([2697, 1784, 1507, ..., 2825, 1858, 756])\n", + "deriv_tensor (40, 81), indices tensor([2697, 1784, 1507, ..., 2825, 1858, 756])\n", + "deriv_tensor (40, 81), indices tensor([2697, 1784, 1507, ..., 2825, 1858, 756])\n", + "deriv_tensor (40, 81), indices tensor([2857, 780, 39, ..., 1439, 2605, 1319])\n", + "deriv_tensor (40, 81), indices tensor([2857, 780, 39, ..., 1439, 2605, 1319])\n", + "deriv_tensor (40, 81), indices tensor([2857, 780, 39, ..., 1439, 2605, 1319])\n", + "deriv_tensor (40, 81), indices tensor([2857, 780, 39, ..., 1439, 2605, 1319])\n", + "deriv_tensor (40, 81), indices tensor([3088, 734, 1136, ..., 1507, 117, 2684])\n", + "deriv_tensor (40, 81), indices tensor([3088, 734, 1136, ..., 1507, 117, 2684])\n", + "deriv_tensor (40, 81), indices tensor([3088, 734, 1136, ..., 1507, 117, 2684])\n", + "deriv_tensor (40, 81), indices tensor([3088, 734, 1136, ..., 1507, 117, 2684])\n", + "deriv_tensor (40, 81), indices tensor([1752, 1605, 2179, ..., 2180, 1581, 650])\n", + "deriv_tensor (40, 81), indices tensor([1752, 1605, 2179, ..., 2180, 1581, 650])\n", + "deriv_tensor (40, 81), indices tensor([1752, 1605, 2179, ..., 2180, 1581, 650])\n", + "deriv_tensor (40, 81), indices tensor([1752, 1605, 2179, ..., 2180, 1581, 650])\n", + "deriv_tensor (40, 81), indices tensor([2435, 496, 428, ..., 121, 1094, 979])\n", + "deriv_tensor (40, 81), indices tensor([2435, 496, 428, ..., 121, 1094, 979])\n", + "deriv_tensor (40, 81), indices tensor([2435, 496, 428, ..., 121, 1094, 979])\n", + "deriv_tensor (40, 81), indices tensor([2435, 496, 428, ..., 121, 1094, 979])\n", + "deriv_tensor (40, 81), indices tensor([2504, 1521, 3124, ..., 449, 1126, 319])\n", + "deriv_tensor (40, 81), indices tensor([2504, 1521, 3124, ..., 449, 1126, 319])\n", + "deriv_tensor (40, 81), indices tensor([2504, 1521, 3124, ..., 449, 1126, 319])\n", + "deriv_tensor (40, 81), indices tensor([2504, 1521, 3124, ..., 449, 1126, 319])\n", + "deriv_tensor (40, 81), indices tensor([ 666, 2502, 2599, ..., 730, 894, 2613])\n", + "deriv_tensor (40, 81), indices tensor([ 666, 2502, 2599, ..., 730, 894, 2613])\n", + "deriv_tensor (40, 81), indices tensor([ 666, 2502, 2599, ..., 730, 894, 2613])\n", + "deriv_tensor (40, 81), indices tensor([ 666, 2502, 2599, ..., 730, 894, 2613])\n", + "deriv_tensor (40, 81), indices tensor([2304, 919, 1232, ..., 2035, 3123, 3154])\n", + "deriv_tensor (40, 81), indices tensor([2304, 919, 1232, ..., 2035, 3123, 3154])\n", + "deriv_tensor (40, 81), indices tensor([2304, 919, 1232, ..., 2035, 3123, 3154])\n", + "deriv_tensor (40, 81), indices tensor([2304, 919, 1232, ..., 2035, 3123, 3154])\n", + "deriv_tensor (40, 81), indices tensor([1203, 1951, 519, ..., 3100, 64, 2439])\n", + "deriv_tensor (40, 81), indices tensor([1203, 1951, 519, ..., 3100, 64, 2439])\n", + "deriv_tensor (40, 81), indices tensor([1203, 1951, 519, ..., 3100, 64, 2439])\n", + "deriv_tensor (40, 81), indices tensor([1203, 1951, 519, ..., 3100, 64, 2439])\n", + "deriv_tensor (40, 81), indices tensor([2982, 1689, 1800, ..., 2589, 518, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2982, 1689, 1800, ..., 2589, 518, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2982, 1689, 1800, ..., 2589, 518, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2982, 1689, 1800, ..., 2589, 518, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2994, 885, 2415, ..., 1821, 2540, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2994, 885, 2415, ..., 1821, 2540, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2994, 885, 2415, ..., 1821, 2540, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2994, 885, 2415, ..., 1821, 2540, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2046, 229, 356, ..., 1612, 318, 541])\n", + "deriv_tensor (40, 81), indices tensor([2046, 229, 356, ..., 1612, 318, 541])\n", + "deriv_tensor (40, 81), indices tensor([2046, 229, 356, ..., 1612, 318, 541])\n", + "deriv_tensor (40, 81), indices tensor([2046, 229, 356, ..., 1612, 318, 541])\n", + "deriv_tensor (40, 81), indices tensor([2619, 546, 1173, ..., 1262, 1874, 632])\n", + "deriv_tensor (40, 81), indices tensor([2619, 546, 1173, ..., 1262, 1874, 632])\n", + "deriv_tensor (40, 81), indices tensor([2619, 546, 1173, ..., 1262, 1874, 632])\n", + "deriv_tensor (40, 81), indices tensor([2619, 546, 1173, ..., 1262, 1874, 632])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2667, 1298, ..., 3201, 3020, 196])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2667, 1298, ..., 3201, 3020, 196])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2667, 1298, ..., 3201, 3020, 196])\n", + "deriv_tensor (40, 81), indices tensor([1195, 2667, 1298, ..., 3201, 3020, 196])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2212, 1204, ..., 507, 2021, 1471])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2212, 1204, ..., 507, 2021, 1471])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2212, 1204, ..., 507, 2021, 1471])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2212, 1204, ..., 507, 2021, 1471])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 2905, 2605, ..., 130, 1675, 689])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 2905, 2605, ..., 130, 1675, 689])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 2905, 2605, ..., 130, 1675, 689])\n", + "deriv_tensor (40, 81), indices tensor([ 863, 2905, 2605, ..., 130, 1675, 689])\n", + "deriv_tensor (40, 81), indices tensor([ 891, 617, 1085, ..., 1504, 1858, 186])\n", + "deriv_tensor (40, 81), indices tensor([ 891, 617, 1085, ..., 1504, 1858, 186])\n", + "deriv_tensor (40, 81), indices tensor([ 891, 617, 1085, ..., 1504, 1858, 186])\n", + "deriv_tensor (40, 81), indices tensor([ 891, 617, 1085, ..., 1504, 1858, 186])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2415, 2987, ..., 1518, 2818, 499])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2415, 2987, ..., 1518, 2818, 499])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2415, 2987, ..., 1518, 2818, 499])\n", + "deriv_tensor (40, 81), indices tensor([1536, 2415, 2987, ..., 1518, 2818, 499])\n", + "deriv_tensor (40, 81), indices tensor([1288, 1729, 2831, ..., 2455, 1562, 879])\n", + "deriv_tensor (40, 81), indices tensor([1288, 1729, 2831, ..., 2455, 1562, 879])\n", + "deriv_tensor (40, 81), indices tensor([1288, 1729, 2831, ..., 2455, 1562, 879])\n", + "deriv_tensor (40, 81), indices tensor([1288, 1729, 2831, ..., 2455, 1562, 879])\n", + "deriv_tensor (40, 81), indices tensor([2681, 228, 450, ..., 2509, 615, 2396])\n", + "deriv_tensor (40, 81), indices tensor([2681, 228, 450, ..., 2509, 615, 2396])\n", + "deriv_tensor (40, 81), indices tensor([2681, 228, 450, ..., 2509, 615, 2396])\n", + "deriv_tensor (40, 81), indices tensor([2681, 228, 450, ..., 2509, 615, 2396])\n", + "deriv_tensor (40, 81), indices tensor([ 592, 10, 1035, ..., 255, 2744, 2511])\n", + "deriv_tensor (40, 81), indices tensor([ 592, 10, 1035, ..., 255, 2744, 2511])\n", + "deriv_tensor (40, 81), indices tensor([ 592, 10, 1035, ..., 255, 2744, 2511])\n", + "deriv_tensor (40, 81), indices tensor([ 592, 10, 1035, ..., 255, 2744, 2511])\n", + "deriv_tensor (40, 81), indices tensor([2094, 674, 1514, ..., 2510, 2809, 1012])\n", + "deriv_tensor (40, 81), indices tensor([2094, 674, 1514, ..., 2510, 2809, 1012])\n", + "deriv_tensor (40, 81), indices tensor([2094, 674, 1514, ..., 2510, 2809, 1012])\n", + "deriv_tensor (40, 81), indices tensor([2094, 674, 1514, ..., 2510, 2809, 1012])\n", + "deriv_tensor (40, 81), indices tensor([1629, 701, 544, ..., 3221, 1315, 1628])\n", + "deriv_tensor (40, 81), indices tensor([1629, 701, 544, ..., 3221, 1315, 1628])\n", + "deriv_tensor (40, 81), indices tensor([1629, 701, 544, ..., 3221, 1315, 1628])\n", + "deriv_tensor (40, 81), indices tensor([1629, 701, 544, ..., 3221, 1315, 1628])\n", + "deriv_tensor (40, 81), indices tensor([1528, 1247, 1140, ..., 1464, 60, 2479])\n", + "deriv_tensor (40, 81), indices tensor([1528, 1247, 1140, ..., 1464, 60, 2479])\n", + "deriv_tensor (40, 81), indices tensor([1528, 1247, 1140, ..., 1464, 60, 2479])\n", + "deriv_tensor (40, 81), indices tensor([1528, 1247, 1140, ..., 1464, 60, 2479])\n", + "deriv_tensor (40, 81), indices tensor([ 12, 3194, 358, ..., 775, 668, 946])\n", + "deriv_tensor (40, 81), indices tensor([ 12, 3194, 358, ..., 775, 668, 946])\n", + "deriv_tensor (40, 81), indices tensor([ 12, 3194, 358, ..., 775, 668, 946])\n", + "deriv_tensor (40, 81), indices tensor([ 12, 3194, 358, ..., 775, 668, 946])\n", + "deriv_tensor (40, 81), indices tensor([1054, 1099, 559, ..., 1405, 1512, 23])\n", + "deriv_tensor (40, 81), indices tensor([1054, 1099, 559, ..., 1405, 1512, 23])\n", + "deriv_tensor (40, 81), indices tensor([1054, 1099, 559, ..., 1405, 1512, 23])\n", + "deriv_tensor (40, 81), indices tensor([1054, 1099, 559, ..., 1405, 1512, 23])\n", + "deriv_tensor (40, 81), indices tensor([3008, 948, 630, ..., 2214, 2128, 68])\n", + "deriv_tensor (40, 81), indices tensor([3008, 948, 630, ..., 2214, 2128, 68])\n", + "deriv_tensor (40, 81), indices tensor([3008, 948, 630, ..., 2214, 2128, 68])\n", + "deriv_tensor (40, 81), indices tensor([3008, 948, 630, ..., 2214, 2128, 68])\n", + "deriv_tensor (40, 81), indices tensor([1266, 1265, 306, ..., 1517, 2050, 1610])\n", + "deriv_tensor (40, 81), indices tensor([1266, 1265, 306, ..., 1517, 2050, 1610])\n", + "deriv_tensor (40, 81), indices tensor([1266, 1265, 306, ..., 1517, 2050, 1610])\n", + "deriv_tensor (40, 81), indices tensor([1266, 1265, 306, ..., 1517, 2050, 1610])\n", + "deriv_tensor (40, 81), indices tensor([ 686, 1084, 1503, ..., 2849, 3235, 2789])\n", + "deriv_tensor (40, 81), indices tensor([ 686, 1084, 1503, ..., 2849, 3235, 2789])\n", + "deriv_tensor (40, 81), indices tensor([ 686, 1084, 1503, ..., 2849, 3235, 2789])\n", + "deriv_tensor (40, 81), indices tensor([ 686, 1084, 1503, ..., 2849, 3235, 2789])\n", + "deriv_tensor (40, 81), indices tensor([1673, 733, 2544, ..., 2717, 2147, 1213])\n", + "deriv_tensor (40, 81), indices tensor([1673, 733, 2544, ..., 2717, 2147, 1213])\n", + "deriv_tensor (40, 81), indices tensor([1673, 733, 2544, ..., 2717, 2147, 1213])\n", + "deriv_tensor (40, 81), indices tensor([1673, 733, 2544, ..., 2717, 2147, 1213])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 2421, 2600, ..., 3236, 200, 1943])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 2421, 2600, ..., 3236, 200, 1943])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 2421, 2600, ..., 3236, 200, 1943])\n", + "deriv_tensor (40, 81), indices tensor([ 849, 2421, 2600, ..., 3236, 200, 1943])\n", + "deriv_tensor (40, 81), indices tensor([2372, 1064, 2412, ..., 2139, 700, 2839])\n", + "deriv_tensor (40, 81), indices tensor([2372, 1064, 2412, ..., 2139, 700, 2839])\n", + "deriv_tensor (40, 81), indices tensor([2372, 1064, 2412, ..., 2139, 700, 2839])\n", + "deriv_tensor (40, 81), indices tensor([2372, 1064, 2412, ..., 2139, 700, 2839])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 2400, 879, ..., 1098, 2314, 2782])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 2400, 879, ..., 1098, 2314, 2782])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 2400, 879, ..., 1098, 2314, 2782])\n", + "deriv_tensor (40, 81), indices tensor([ 702, 2400, 879, ..., 1098, 2314, 2782])\n", + "deriv_tensor (40, 81), indices tensor([1062, 625, 1037, ..., 149, 2593, 172])\n", + "deriv_tensor (40, 81), indices tensor([1062, 625, 1037, ..., 149, 2593, 172])\n", + "deriv_tensor (40, 81), indices tensor([1062, 625, 1037, ..., 149, 2593, 172])\n", + "deriv_tensor (40, 81), indices tensor([1062, 625, 1037, ..., 149, 2593, 172])\n", + "deriv_tensor (40, 81), indices tensor([2737, 1382, 167, ..., 1701, 1749, 183])\n", + "deriv_tensor (40, 81), indices tensor([2737, 1382, 167, ..., 1701, 1749, 183])\n", + "deriv_tensor (40, 81), indices tensor([2737, 1382, 167, ..., 1701, 1749, 183])\n", + "deriv_tensor (40, 81), indices tensor([2737, 1382, 167, ..., 1701, 1749, 183])\n", + "deriv_tensor (40, 81), indices tensor([3035, 1043, 1012, ..., 1641, 3131, 1927])\n", + "deriv_tensor (40, 81), indices tensor([3035, 1043, 1012, ..., 1641, 3131, 1927])\n", + "deriv_tensor (40, 81), indices tensor([3035, 1043, 1012, ..., 1641, 3131, 1927])\n", + "deriv_tensor (40, 81), indices tensor([3035, 1043, 1012, ..., 1641, 3131, 1927])\n", + "deriv_tensor (40, 81), indices tensor([2980, 1780, 2790, ..., 1518, 3195, 651])\n", + "deriv_tensor (40, 81), indices tensor([2980, 1780, 2790, ..., 1518, 3195, 651])\n", + "deriv_tensor (40, 81), indices tensor([2980, 1780, 2790, ..., 1518, 3195, 651])\n", + "deriv_tensor (40, 81), indices tensor([2980, 1780, 2790, ..., 1518, 3195, 651])\n", + "deriv_tensor (40, 81), indices tensor([ 588, 2335, 950, ..., 849, 2727, 981])\n", + "deriv_tensor (40, 81), indices tensor([ 588, 2335, 950, ..., 849, 2727, 981])\n", + "deriv_tensor (40, 81), indices tensor([ 588, 2335, 950, ..., 849, 2727, 981])\n", + "deriv_tensor (40, 81), indices tensor([ 588, 2335, 950, ..., 849, 2727, 981])\n", + "deriv_tensor (40, 81), indices tensor([2792, 2112, 462, ..., 1227, 1388, 1315])\n", + "deriv_tensor (40, 81), indices tensor([2792, 2112, 462, ..., 1227, 1388, 1315])\n", + "deriv_tensor (40, 81), indices tensor([2792, 2112, 462, ..., 1227, 1388, 1315])\n", + "deriv_tensor (40, 81), indices tensor([2792, 2112, 462, ..., 1227, 1388, 1315])\n", + "deriv_tensor (40, 81), indices tensor([1758, 133, 1267, ..., 600, 2040, 858])\n", + "deriv_tensor (40, 81), indices tensor([1758, 133, 1267, ..., 600, 2040, 858])\n", + "deriv_tensor (40, 81), indices tensor([1758, 133, 1267, ..., 600, 2040, 858])\n", + "deriv_tensor (40, 81), indices tensor([1758, 133, 1267, ..., 600, 2040, 858])\n", + "deriv_tensor (40, 81), indices tensor([1103, 3128, 10, ..., 1599, 1673, 2523])\n", + "deriv_tensor (40, 81), indices tensor([1103, 3128, 10, ..., 1599, 1673, 2523])\n", + "deriv_tensor (40, 81), indices tensor([1103, 3128, 10, ..., 1599, 1673, 2523])\n", + "deriv_tensor (40, 81), indices tensor([1103, 3128, 10, ..., 1599, 1673, 2523])\n", + "deriv_tensor (40, 81), indices tensor([1262, 2641, 1708, ..., 550, 1903, 2915])\n", + "deriv_tensor (40, 81), indices tensor([1262, 2641, 1708, ..., 550, 1903, 2915])\n", + "deriv_tensor (40, 81), indices tensor([1262, 2641, 1708, ..., 550, 1903, 2915])\n", + "deriv_tensor (40, 81), indices tensor([1262, 2641, 1708, ..., 550, 1903, 2915])\n", + "deriv_tensor (40, 81), indices tensor([2640, 2331, 116, ..., 972, 3197, 2850])\n", + "deriv_tensor (40, 81), indices tensor([2640, 2331, 116, ..., 972, 3197, 2850])\n", + "deriv_tensor (40, 81), indices tensor([2640, 2331, 116, ..., 972, 3197, 2850])\n", + "deriv_tensor (40, 81), indices tensor([2640, 2331, 116, ..., 972, 3197, 2850])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2631, 2003, ..., 1799, 1008, 2201])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2631, 2003, ..., 1799, 1008, 2201])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2631, 2003, ..., 1799, 1008, 2201])\n", + "deriv_tensor (40, 81), indices tensor([1626, 2631, 2003, ..., 1799, 1008, 2201])\n", + "deriv_tensor (40, 81), indices tensor([1017, 2341, 3119, ..., 700, 2635, 759])\n", + "deriv_tensor (40, 81), indices tensor([1017, 2341, 3119, ..., 700, 2635, 759])\n", + "deriv_tensor (40, 81), indices tensor([1017, 2341, 3119, ..., 700, 2635, 759])\n", + "deriv_tensor (40, 81), indices tensor([1017, 2341, 3119, ..., 700, 2635, 759])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 651, 846, ..., 1302, 1208, 2160])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 651, 846, ..., 1302, 1208, 2160])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 651, 846, ..., 1302, 1208, 2160])\n", + "deriv_tensor (40, 81), indices tensor([ 296, 651, 846, ..., 1302, 1208, 2160])\n", + "deriv_tensor (40, 81), indices tensor([2892, 1972, 3102, ..., 635, 2099, 418])\n", + "deriv_tensor (40, 81), indices tensor([2892, 1972, 3102, ..., 635, 2099, 418])\n", + "deriv_tensor (40, 81), indices tensor([2892, 1972, 3102, ..., 635, 2099, 418])\n", + "deriv_tensor (40, 81), indices tensor([2892, 1972, 3102, ..., 635, 2099, 418])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1019, 1768, ..., 934, 1099, 2129])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1019, 1768, ..., 934, 1099, 2129])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1019, 1768, ..., 934, 1099, 2129])\n", + "deriv_tensor (40, 81), indices tensor([ 294, 1019, 1768, ..., 934, 1099, 2129])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 1450, 2303, ..., 1383, 1151, 2223])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 1450, 2303, ..., 1383, 1151, 2223])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 1450, 2303, ..., 1383, 1151, 2223])\n", + "deriv_tensor (40, 81), indices tensor([ 826, 1450, 2303, ..., 1383, 1151, 2223])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2723, 2324, ..., 799, 3140, 2067])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2723, 2324, ..., 799, 3140, 2067])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2723, 2324, ..., 799, 3140, 2067])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2723, 2324, ..., 799, 3140, 2067])\n", + "deriv_tensor (40, 81), indices tensor([1883, 2030, 3101, ..., 701, 3115, 650])\n", + "deriv_tensor (40, 81), indices tensor([1883, 2030, 3101, ..., 701, 3115, 650])\n", + "deriv_tensor (40, 81), indices tensor([1883, 2030, 3101, ..., 701, 3115, 650])\n", + "deriv_tensor (40, 81), indices tensor([1883, 2030, 3101, ..., 701, 3115, 650])\n", + "deriv_tensor (40, 81), indices tensor([ 893, 1754, 2858, ..., 2127, 2044, 2584])\n", + "deriv_tensor (40, 81), indices tensor([ 893, 1754, 2858, ..., 2127, 2044, 2584])\n", + "deriv_tensor (40, 81), indices tensor([ 893, 1754, 2858, ..., 2127, 2044, 2584])\n", + "deriv_tensor (40, 81), indices tensor([ 893, 1754, 2858, ..., 2127, 2044, 2584])\n", + "deriv_tensor (40, 81), indices tensor([3125, 975, 1853, ..., 1972, 3134, 3034])\n", + "deriv_tensor (40, 81), indices tensor([3125, 975, 1853, ..., 1972, 3134, 3034])\n", + "deriv_tensor (40, 81), indices tensor([3125, 975, 1853, ..., 1972, 3134, 3034])\n", + "deriv_tensor (40, 81), indices tensor([3125, 975, 1853, ..., 1972, 3134, 3034])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 1172, 1405, ..., 2259, 316, 17])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 1172, 1405, ..., 2259, 316, 17])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 1172, 1405, ..., 2259, 316, 17])\n", + "deriv_tensor (40, 81), indices tensor([ 48, 1172, 1405, ..., 2259, 316, 17])\n", + "deriv_tensor (40, 81), indices tensor([1237, 1406, 2979, ..., 709, 1468, 1925])\n", + "deriv_tensor (40, 81), indices tensor([1237, 1406, 2979, ..., 709, 1468, 1925])\n", + "deriv_tensor (40, 81), indices tensor([1237, 1406, 2979, ..., 709, 1468, 1925])\n", + "deriv_tensor (40, 81), indices tensor([1237, 1406, 2979, ..., 709, 1468, 1925])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 2829, 610, ..., 2857, 2468, 1200])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 2829, 610, ..., 2857, 2468, 1200])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 2829, 610, ..., 2857, 2468, 1200])\n", + "deriv_tensor (40, 81), indices tensor([ 289, 2829, 610, ..., 2857, 2468, 1200])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 2386, 1810, ..., 1276, 2936, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 2386, 1810, ..., 1276, 2936, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 2386, 1810, ..., 1276, 2936, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 957, 2386, 1810, ..., 1276, 2936, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 2838, 3000, ..., 1528, 1546, 2674])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 2838, 3000, ..., 1528, 1546, 2674])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 2838, 3000, ..., 1528, 1546, 2674])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 2838, 3000, ..., 1528, 1546, 2674])\n", + "deriv_tensor (40, 81), indices tensor([3049, 2143, 2117, ..., 892, 2652, 1567])\n", + "deriv_tensor (40, 81), indices tensor([3049, 2143, 2117, ..., 892, 2652, 1567])\n", + "deriv_tensor (40, 81), indices tensor([3049, 2143, 2117, ..., 892, 2652, 1567])\n", + "deriv_tensor (40, 81), indices tensor([3049, 2143, 2117, ..., 892, 2652, 1567])\n", + "deriv_tensor (40, 81), indices tensor([1773, 291, 498, ..., 1640, 963, 2329])\n", + "deriv_tensor (40, 81), indices tensor([1773, 291, 498, ..., 1640, 963, 2329])\n", + "deriv_tensor (40, 81), indices tensor([1773, 291, 498, ..., 1640, 963, 2329])\n", + "deriv_tensor (40, 81), indices tensor([1773, 291, 498, ..., 1640, 963, 2329])\n", + "deriv_tensor (40, 81), indices tensor([1123, 1891, 2368, ..., 977, 1401, 760])\n", + "deriv_tensor (40, 81), indices tensor([1123, 1891, 2368, ..., 977, 1401, 760])\n", + "deriv_tensor (40, 81), indices tensor([1123, 1891, 2368, ..., 977, 1401, 760])\n", + "deriv_tensor (40, 81), indices tensor([1123, 1891, 2368, ..., 977, 1401, 760])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 443, 2166, ..., 498, 1532, 469])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 443, 2166, ..., 498, 1532, 469])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 443, 2166, ..., 498, 1532, 469])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 443, 2166, ..., 498, 1532, 469])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 187, 2998, ..., 3218, 2403, 2949])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 187, 2998, ..., 3218, 2403, 2949])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 187, 2998, ..., 3218, 2403, 2949])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 187, 2998, ..., 3218, 2403, 2949])\n", + "deriv_tensor (40, 81), indices tensor([1956, 1666, 1852, ..., 305, 469, 761])\n", + "deriv_tensor (40, 81), indices tensor([1956, 1666, 1852, ..., 305, 469, 761])\n", + "deriv_tensor (40, 81), indices tensor([1956, 1666, 1852, ..., 305, 469, 761])\n", + "deriv_tensor (40, 81), indices tensor([1956, 1666, 1852, ..., 305, 469, 761])\n", + "deriv_tensor (40, 81), indices tensor([1777, 637, 1413, ..., 2980, 345, 1395])\n", + "deriv_tensor (40, 81), indices tensor([1777, 637, 1413, ..., 2980, 345, 1395])\n", + "deriv_tensor (40, 81), indices tensor([1777, 637, 1413, ..., 2980, 345, 1395])\n", + "deriv_tensor (40, 81), indices tensor([1777, 637, 1413, ..., 2980, 345, 1395])\n", + "deriv_tensor (40, 81), indices tensor([1289, 2008, 2460, ..., 1790, 645, 2598])\n", + "deriv_tensor (40, 81), indices tensor([1289, 2008, 2460, ..., 1790, 645, 2598])\n", + "deriv_tensor (40, 81), indices tensor([1289, 2008, 2460, ..., 1790, 645, 2598])\n", + "deriv_tensor (40, 81), indices tensor([1289, 2008, 2460, ..., 1790, 645, 2598])\n", + "deriv_tensor (40, 81), indices tensor([2010, 403, 1018, ..., 1046, 2904, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2010, 403, 1018, ..., 1046, 2904, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2010, 403, 1018, ..., 1046, 2904, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2010, 403, 1018, ..., 1046, 2904, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2911, 728, ..., 155, 1271, 842])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2911, 728, ..., 155, 1271, 842])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2911, 728, ..., 155, 1271, 842])\n", + "deriv_tensor (40, 81), indices tensor([2577, 2911, 728, ..., 155, 1271, 842])\n", + "deriv_tensor (40, 81), indices tensor([2019, 2076, 2256, ..., 2778, 351, 3129])\n", + "deriv_tensor (40, 81), indices tensor([2019, 2076, 2256, ..., 2778, 351, 3129])\n", + "deriv_tensor (40, 81), indices tensor([2019, 2076, 2256, ..., 2778, 351, 3129])\n", + "deriv_tensor (40, 81), indices tensor([2019, 2076, 2256, ..., 2778, 351, 3129])\n", + "deriv_tensor (40, 81), indices tensor([1222, 2851, 3165, ..., 631, 2259, 2467])\n", + "deriv_tensor (40, 81), indices tensor([1222, 2851, 3165, ..., 631, 2259, 2467])\n", + "deriv_tensor (40, 81), indices tensor([1222, 2851, 3165, ..., 631, 2259, 2467])\n", + "deriv_tensor (40, 81), indices tensor([1222, 2851, 3165, ..., 631, 2259, 2467])\n", + "deriv_tensor (40, 81), indices tensor([1220, 1606, 1992, ..., 790, 485, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1220, 1606, 1992, ..., 790, 485, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1220, 1606, 1992, ..., 790, 485, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1220, 1606, 1992, ..., 790, 485, 2668])\n", + "deriv_tensor (40, 81), indices tensor([2125, 1269, 1742, ..., 783, 2057, 2588])\n", + "deriv_tensor (40, 81), indices tensor([2125, 1269, 1742, ..., 783, 2057, 2588])\n", + "deriv_tensor (40, 81), indices tensor([2125, 1269, 1742, ..., 783, 2057, 2588])\n", + "deriv_tensor (40, 81), indices tensor([2125, 1269, 1742, ..., 783, 2057, 2588])\n", + "deriv_tensor (40, 81), indices tensor([1838, 764, 2943, ..., 2728, 2174, 255])\n", + "deriv_tensor (40, 81), indices tensor([1838, 764, 2943, ..., 2728, 2174, 255])\n", + "deriv_tensor (40, 81), indices tensor([1838, 764, 2943, ..., 2728, 2174, 255])\n", + "deriv_tensor (40, 81), indices tensor([1838, 764, 2943, ..., 2728, 2174, 255])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2555, 3222, ..., 1964, 2770, 786])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2555, 3222, ..., 1964, 2770, 786])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2555, 3222, ..., 1964, 2770, 786])\n", + "deriv_tensor (40, 81), indices tensor([2494, 2555, 3222, ..., 1964, 2770, 786])\n", + "deriv_tensor (40, 81), indices tensor([2775, 3101, 777, ..., 2042, 2481, 1206])\n", + "deriv_tensor (40, 81), indices tensor([2775, 3101, 777, ..., 2042, 2481, 1206])\n", + "deriv_tensor (40, 81), indices tensor([2775, 3101, 777, ..., 2042, 2481, 1206])\n", + "deriv_tensor (40, 81), indices tensor([2775, 3101, 777, ..., 2042, 2481, 1206])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1493, 1023, ..., 2949, 1658, 2498])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1493, 1023, ..., 2949, 1658, 2498])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1493, 1023, ..., 2949, 1658, 2498])\n", + "deriv_tensor (40, 81), indices tensor([2872, 1493, 1023, ..., 2949, 1658, 2498])\n", + "deriv_tensor (40, 81), indices tensor([ 943, 2567, 1626, ..., 1543, 1177, 2930])\n", + "deriv_tensor (40, 81), indices tensor([ 943, 2567, 1626, ..., 1543, 1177, 2930])\n", + "deriv_tensor (40, 81), indices tensor([ 943, 2567, 1626, ..., 1543, 1177, 2930])\n", + "deriv_tensor (40, 81), indices tensor([ 943, 2567, 1626, ..., 1543, 1177, 2930])\n", + "deriv_tensor (40, 81), indices tensor([2305, 1918, 2576, ..., 2696, 2961, 2083])\n", + "deriv_tensor (40, 81), indices tensor([2305, 1918, 2576, ..., 2696, 2961, 2083])\n", + "deriv_tensor (40, 81), indices tensor([2305, 1918, 2576, ..., 2696, 2961, 2083])\n", + "deriv_tensor (40, 81), indices tensor([2305, 1918, 2576, ..., 2696, 2961, 2083])\n", + "deriv_tensor (40, 81), indices tensor([1447, 3214, 446, ..., 3210, 1288, 2620])\n", + "deriv_tensor (40, 81), indices tensor([1447, 3214, 446, ..., 3210, 1288, 2620])\n", + "deriv_tensor (40, 81), indices tensor([1447, 3214, 446, ..., 3210, 1288, 2620])\n", + "deriv_tensor (40, 81), indices tensor([1447, 3214, 446, ..., 3210, 1288, 2620])\n", + "deriv_tensor (40, 81), indices tensor([1159, 2786, 156, ..., 1037, 1662, 2315])\n", + "deriv_tensor (40, 81), indices tensor([1159, 2786, 156, ..., 1037, 1662, 2315])\n", + "deriv_tensor (40, 81), indices tensor([1159, 2786, 156, ..., 1037, 1662, 2315])\n", + "deriv_tensor (40, 81), indices tensor([1159, 2786, 156, ..., 1037, 1662, 2315])\n", + "deriv_tensor (40, 81), indices tensor([1134, 482, 2054, ..., 1954, 416, 538])\n", + "deriv_tensor (40, 81), indices tensor([1134, 482, 2054, ..., 1954, 416, 538])\n", + "deriv_tensor (40, 81), indices tensor([1134, 482, 2054, ..., 1954, 416, 538])\n", + "deriv_tensor (40, 81), indices tensor([1134, 482, 2054, ..., 1954, 416, 538])\n", + "deriv_tensor (40, 81), indices tensor([1550, 113, 2496, ..., 46, 1296, 2207])\n", + "deriv_tensor (40, 81), indices tensor([1550, 113, 2496, ..., 46, 1296, 2207])\n", + "deriv_tensor (40, 81), indices tensor([1550, 113, 2496, ..., 46, 1296, 2207])\n", + "deriv_tensor (40, 81), indices tensor([1550, 113, 2496, ..., 46, 1296, 2207])\n", + "deriv_tensor (40, 81), indices tensor([2917, 1433, 275, ..., 598, 136, 2428])\n", + "deriv_tensor (40, 81), indices tensor([2917, 1433, 275, ..., 598, 136, 2428])\n", + "deriv_tensor (40, 81), indices tensor([2917, 1433, 275, ..., 598, 136, 2428])\n", + "deriv_tensor (40, 81), indices tensor([2917, 1433, 275, ..., 598, 136, 2428])\n", + "deriv_tensor (40, 81), indices tensor([3219, 2138, 2193, ..., 2162, 2241, 596])\n", + "deriv_tensor (40, 81), indices tensor([3219, 2138, 2193, ..., 2162, 2241, 596])\n", + "deriv_tensor (40, 81), indices tensor([3219, 2138, 2193, ..., 2162, 2241, 596])\n", + "deriv_tensor (40, 81), indices tensor([3219, 2138, 2193, ..., 2162, 2241, 596])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2420, 1618, ..., 885, 666, 2866])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2420, 1618, ..., 885, 666, 2866])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2420, 1618, ..., 885, 666, 2866])\n", + "deriv_tensor (40, 81), indices tensor([1122, 2420, 1618, ..., 885, 666, 2866])\n", + "deriv_tensor (40, 81), indices tensor([2328, 859, 1053, ..., 2469, 151, 441])\n", + "deriv_tensor (40, 81), indices tensor([2328, 859, 1053, ..., 2469, 151, 441])\n", + "deriv_tensor (40, 81), indices tensor([2328, 859, 1053, ..., 2469, 151, 441])\n", + "deriv_tensor (40, 81), indices tensor([2328, 859, 1053, ..., 2469, 151, 441])\n", + "deriv_tensor (40, 81), indices tensor([1787, 369, 208, ..., 3149, 2356, 2648])\n", + "deriv_tensor (40, 81), indices tensor([1787, 369, 208, ..., 3149, 2356, 2648])\n", + "deriv_tensor (40, 81), indices tensor([1787, 369, 208, ..., 3149, 2356, 2648])\n", + "deriv_tensor (40, 81), indices tensor([1787, 369, 208, ..., 3149, 2356, 2648])\n", + "deriv_tensor (40, 81), indices tensor([2470, 987, 2831, ..., 2483, 2699, 211])\n", + "deriv_tensor (40, 81), indices tensor([2470, 987, 2831, ..., 2483, 2699, 211])\n", + "deriv_tensor (40, 81), indices tensor([2470, 987, 2831, ..., 2483, 2699, 211])\n", + "deriv_tensor (40, 81), indices tensor([2470, 987, 2831, ..., 2483, 2699, 211])\n", + "deriv_tensor (40, 81), indices tensor([1404, 918, 2652, ..., 481, 482, 2832])\n", + "deriv_tensor (40, 81), indices tensor([1404, 918, 2652, ..., 481, 482, 2832])\n", + "deriv_tensor (40, 81), indices tensor([1404, 918, 2652, ..., 481, 482, 2832])\n", + "deriv_tensor (40, 81), indices tensor([1404, 918, 2652, ..., 481, 482, 2832])\n", + "deriv_tensor (40, 81), indices tensor([2405, 2160, 276, ..., 1835, 1998, 618])\n", + "deriv_tensor (40, 81), indices tensor([2405, 2160, 276, ..., 1835, 1998, 618])\n", + "deriv_tensor (40, 81), indices tensor([2405, 2160, 276, ..., 1835, 1998, 618])\n", + "deriv_tensor (40, 81), indices tensor([2405, 2160, 276, ..., 1835, 1998, 618])\n", + "deriv_tensor (40, 81), indices tensor([1754, 1661, 2831, ..., 2493, 256, 200])\n", + "deriv_tensor (40, 81), indices tensor([1754, 1661, 2831, ..., 2493, 256, 200])\n", + "deriv_tensor (40, 81), indices tensor([1754, 1661, 2831, ..., 2493, 256, 200])\n", + "deriv_tensor (40, 81), indices tensor([1754, 1661, 2831, ..., 2493, 256, 200])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2166, 2714, ..., 2108, 3223, 2782])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2166, 2714, ..., 2108, 3223, 2782])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2166, 2714, ..., 2108, 3223, 2782])\n", + "deriv_tensor (40, 81), indices tensor([ 360, 2166, 2714, ..., 2108, 3223, 2782])\n", + "deriv_tensor (40, 81), indices tensor([1357, 543, 2554, ..., 160, 2574, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1357, 543, 2554, ..., 160, 2574, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1357, 543, 2554, ..., 160, 2574, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1357, 543, 2554, ..., 160, 2574, 2353])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1814, 1036, ..., 2681, 716, 3238])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1814, 1036, ..., 2681, 716, 3238])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1814, 1036, ..., 2681, 716, 3238])\n", + "deriv_tensor (40, 81), indices tensor([2317, 1814, 1036, ..., 2681, 716, 3238])\n", + "deriv_tensor (40, 81), indices tensor([1006, 2807, 3131, ..., 2530, 2508, 2603])\n", + "deriv_tensor (40, 81), indices tensor([1006, 2807, 3131, ..., 2530, 2508, 2603])\n", + "deriv_tensor (40, 81), indices tensor([1006, 2807, 3131, ..., 2530, 2508, 2603])\n", + "deriv_tensor (40, 81), indices tensor([1006, 2807, 3131, ..., 2530, 2508, 2603])\n", + "deriv_tensor (40, 81), indices tensor([1102, 139, 903, ..., 680, 253, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1102, 139, 903, ..., 680, 253, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1102, 139, 903, ..., 680, 253, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1102, 139, 903, ..., 680, 253, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1372, 779, 1889, ..., 134, 1394, 997])\n", + "deriv_tensor (40, 81), indices tensor([1372, 779, 1889, ..., 134, 1394, 997])\n", + "deriv_tensor (40, 81), indices tensor([1372, 779, 1889, ..., 134, 1394, 997])\n", + "deriv_tensor (40, 81), indices tensor([1372, 779, 1889, ..., 134, 1394, 997])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2667, 1752, ..., 2247, 3132, 3197])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2667, 1752, ..., 2247, 3132, 3197])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2667, 1752, ..., 2247, 3132, 3197])\n", + "deriv_tensor (40, 81), indices tensor([3077, 2667, 1752, ..., 2247, 3132, 3197])\n", + "deriv_tensor (40, 81), indices tensor([ 270, 2884, 2246, ..., 3037, 451, 1243])\n", + "deriv_tensor (40, 81), indices tensor([ 270, 2884, 2246, ..., 3037, 451, 1243])\n", + "deriv_tensor (40, 81), indices tensor([ 270, 2884, 2246, ..., 3037, 451, 1243])\n", + "deriv_tensor (40, 81), indices tensor([ 270, 2884, 2246, ..., 3037, 451, 1243])\n", + "deriv_tensor (40, 81), indices tensor([ 297, 2790, 1014, ..., 352, 1296, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 297, 2790, 1014, ..., 352, 1296, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 297, 2790, 1014, ..., 352, 1296, 1331])\n", + "deriv_tensor (40, 81), indices tensor([ 297, 2790, 1014, ..., 352, 1296, 1331])\n", + "deriv_tensor (40, 81), indices tensor([1832, 268, 234, ..., 290, 992, 1640])\n", + "deriv_tensor (40, 81), indices tensor([1832, 268, 234, ..., 290, 992, 1640])\n", + "deriv_tensor (40, 81), indices tensor([1832, 268, 234, ..., 290, 992, 1640])\n", + "deriv_tensor (40, 81), indices tensor([1832, 268, 234, ..., 290, 992, 1640])\n", + "deriv_tensor (40, 81), indices tensor([2724, 817, 2830, ..., 1406, 969, 1003])\n", + "deriv_tensor (40, 81), indices tensor([2724, 817, 2830, ..., 1406, 969, 1003])\n", + "deriv_tensor (40, 81), indices tensor([2724, 817, 2830, ..., 1406, 969, 1003])\n", + "deriv_tensor (40, 81), indices tensor([2724, 817, 2830, ..., 1406, 969, 1003])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2509, 2103, ..., 1421, 2562, 3043])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2509, 2103, ..., 1421, 2562, 3043])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2509, 2103, ..., 1421, 2562, 3043])\n", + "deriv_tensor (40, 81), indices tensor([2435, 2509, 2103, ..., 1421, 2562, 3043])\n", + "deriv_tensor (40, 81), indices tensor([ 143, 2794, 446, ..., 1852, 2497, 2484])\n", + "deriv_tensor (40, 81), indices tensor([ 143, 2794, 446, ..., 1852, 2497, 2484])\n", + "deriv_tensor (40, 81), indices tensor([ 143, 2794, 446, ..., 1852, 2497, 2484])\n", + "deriv_tensor (40, 81), indices tensor([ 143, 2794, 446, ..., 1852, 2497, 2484])\n", + "deriv_tensor (40, 81), indices tensor([1545, 44, 2307, ..., 209, 3213, 2039])\n", + "deriv_tensor (40, 81), indices tensor([1545, 44, 2307, ..., 209, 3213, 2039])\n", + "deriv_tensor (40, 81), indices tensor([1545, 44, 2307, ..., 209, 3213, 2039])\n", + "deriv_tensor (40, 81), indices tensor([1545, 44, 2307, ..., 209, 3213, 2039])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2889, 2081, ..., 2373, 993, 706])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2889, 2081, ..., 2373, 993, 706])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2889, 2081, ..., 2373, 993, 706])\n", + "deriv_tensor (40, 81), indices tensor([2402, 2889, 2081, ..., 2373, 993, 706])\n", + "deriv_tensor (40, 81), indices tensor([2797, 3171, 795, ..., 2775, 2122, 1867])\n", + "deriv_tensor (40, 81), indices tensor([2797, 3171, 795, ..., 2775, 2122, 1867])\n", + "deriv_tensor (40, 81), indices tensor([2797, 3171, 795, ..., 2775, 2122, 1867])\n", + "deriv_tensor (40, 81), indices tensor([2797, 3171, 795, ..., 2775, 2122, 1867])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1145, 1379, ..., 2517, 3087, 696])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1145, 1379, ..., 2517, 3087, 696])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1145, 1379, ..., 2517, 3087, 696])\n", + "deriv_tensor (40, 81), indices tensor([ 996, 1145, 1379, ..., 2517, 3087, 696])\n", + "deriv_tensor (40, 81), indices tensor([2759, 3133, 139, ..., 2658, 1561, 1623])\n", + "deriv_tensor (40, 81), indices tensor([2759, 3133, 139, ..., 2658, 1561, 1623])\n", + "deriv_tensor (40, 81), indices tensor([2759, 3133, 139, ..., 2658, 1561, 1623])\n", + "deriv_tensor (40, 81), indices tensor([2759, 3133, 139, ..., 2658, 1561, 1623])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 2359, 958, ..., 959, 3156, 1296])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 2359, 958, ..., 959, 3156, 1296])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 2359, 958, ..., 959, 3156, 1296])\n", + "deriv_tensor (40, 81), indices tensor([ 646, 2359, 958, ..., 959, 3156, 1296])\n", + "deriv_tensor (40, 81), indices tensor([2115, 1722, 1395, ..., 234, 2533, 2454])\n", + "deriv_tensor (40, 81), indices tensor([2115, 1722, 1395, ..., 234, 2533, 2454])\n", + "deriv_tensor (40, 81), indices tensor([2115, 1722, 1395, ..., 234, 2533, 2454])\n", + "deriv_tensor (40, 81), indices tensor([2115, 1722, 1395, ..., 234, 2533, 2454])\n", + "deriv_tensor (40, 81), indices tensor([2938, 2016, 2852, ..., 2988, 92, 2080])\n", + "deriv_tensor (40, 81), indices tensor([2938, 2016, 2852, ..., 2988, 92, 2080])\n", + "deriv_tensor (40, 81), indices tensor([2938, 2016, 2852, ..., 2988, 92, 2080])\n", + "deriv_tensor (40, 81), indices tensor([2938, 2016, 2852, ..., 2988, 92, 2080])\n", + "deriv_tensor (40, 81), indices tensor([1364, 2234, 83, ..., 840, 1291, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1364, 2234, 83, ..., 840, 1291, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1364, 2234, 83, ..., 840, 1291, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1364, 2234, 83, ..., 840, 1291, 1210])\n", + "deriv_tensor (40, 81), indices tensor([1652, 2038, 129, ..., 3120, 1485, 107])\n", + "deriv_tensor (40, 81), indices tensor([1652, 2038, 129, ..., 3120, 1485, 107])\n", + "deriv_tensor (40, 81), indices tensor([1652, 2038, 129, ..., 3120, 1485, 107])\n", + "deriv_tensor (40, 81), indices tensor([1652, 2038, 129, ..., 3120, 1485, 107])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1225, 635, ..., 2201, 3219, 1976])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1225, 635, ..., 2201, 3219, 1976])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1225, 635, ..., 2201, 3219, 1976])\n", + "deriv_tensor (40, 81), indices tensor([ 467, 1225, 635, ..., 2201, 3219, 1976])\n", + "deriv_tensor (40, 81), indices tensor([3015, 1831, 1808, ..., 2758, 2146, 3215])\n", + "deriv_tensor (40, 81), indices tensor([3015, 1831, 1808, ..., 2758, 2146, 3215])\n", + "deriv_tensor (40, 81), indices tensor([3015, 1831, 1808, ..., 2758, 2146, 3215])\n", + "deriv_tensor (40, 81), indices tensor([3015, 1831, 1808, ..., 2758, 2146, 3215])\n", + "deriv_tensor (40, 81), indices tensor([ 570, 1405, 405, ..., 64, 414, 2190])\n", + "deriv_tensor (40, 81), indices tensor([ 570, 1405, 405, ..., 64, 414, 2190])\n", + "deriv_tensor (40, 81), indices tensor([ 570, 1405, 405, ..., 64, 414, 2190])\n", + "deriv_tensor (40, 81), indices tensor([ 570, 1405, 405, ..., 64, 414, 2190])\n", + "deriv_tensor (40, 81), indices tensor([1949, 87, 2582, ..., 2256, 68, 149])\n", + "deriv_tensor (40, 81), indices tensor([1949, 87, 2582, ..., 2256, 68, 149])\n", + "deriv_tensor (40, 81), indices tensor([1949, 87, 2582, ..., 2256, 68, 149])\n", + "deriv_tensor (40, 81), indices tensor([1949, 87, 2582, ..., 2256, 68, 149])\n", + "deriv_tensor (40, 81), indices tensor([2105, 2167, 1935, ..., 3181, 201, 1405])\n", + "deriv_tensor (40, 81), indices tensor([2105, 2167, 1935, ..., 3181, 201, 1405])\n", + "deriv_tensor (40, 81), indices tensor([2105, 2167, 1935, ..., 3181, 201, 1405])\n", + "deriv_tensor (40, 81), indices tensor([2105, 2167, 1935, ..., 3181, 201, 1405])\n", + "deriv_tensor (40, 81), indices tensor([2342, 677, 2588, ..., 217, 1401, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2342, 677, 2588, ..., 217, 1401, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2342, 677, 2588, ..., 217, 1401, 2966])\n", + "deriv_tensor (40, 81), indices tensor([2342, 677, 2588, ..., 217, 1401, 2966])\n", + "deriv_tensor (40, 81), indices tensor([1908, 1044, 1095, ..., 245, 2407, 171])\n", + "deriv_tensor (40, 81), indices tensor([1908, 1044, 1095, ..., 245, 2407, 171])\n", + "deriv_tensor (40, 81), indices tensor([1908, 1044, 1095, ..., 245, 2407, 171])\n", + "deriv_tensor (40, 81), indices tensor([1908, 1044, 1095, ..., 245, 2407, 171])\n", + "deriv_tensor (40, 81), indices tensor([1602, 2467, 2936, ..., 1177, 1719, 2642])\n", + "deriv_tensor (40, 81), indices tensor([1602, 2467, 2936, ..., 1177, 1719, 2642])\n", + "deriv_tensor (40, 81), indices tensor([1602, 2467, 2936, ..., 1177, 1719, 2642])\n", + "deriv_tensor (40, 81), indices tensor([1602, 2467, 2936, ..., 1177, 1719, 2642])\n", + "deriv_tensor (40, 81), indices tensor([2497, 861, 740, ..., 990, 1833, 41])\n", + "deriv_tensor (40, 81), indices tensor([2497, 861, 740, ..., 990, 1833, 41])\n", + "deriv_tensor (40, 81), indices tensor([2497, 861, 740, ..., 990, 1833, 41])\n", + "deriv_tensor (40, 81), indices tensor([2497, 861, 740, ..., 990, 1833, 41])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 1535, 2432, ..., 1822, 275, 1068])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 1535, 2432, ..., 1822, 275, 1068])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 1535, 2432, ..., 1822, 275, 1068])\n", + "deriv_tensor (40, 81), indices tensor([ 784, 1535, 2432, ..., 1822, 275, 1068])\n", + "deriv_tensor (40, 81), indices tensor([ 156, 2874, 197, ..., 2820, 3234, 3216])\n", + "deriv_tensor (40, 81), indices tensor([ 156, 2874, 197, ..., 2820, 3234, 3216])\n", + "deriv_tensor (40, 81), indices tensor([ 156, 2874, 197, ..., 2820, 3234, 3216])\n", + "deriv_tensor (40, 81), indices tensor([ 156, 2874, 197, ..., 2820, 3234, 3216])\n", + "deriv_tensor (40, 81), indices tensor([2209, 851, 667, ..., 694, 3233, 751])\n", + "deriv_tensor (40, 81), indices tensor([2209, 851, 667, ..., 694, 3233, 751])\n", + "deriv_tensor (40, 81), indices tensor([2209, 851, 667, ..., 694, 3233, 751])\n", + "deriv_tensor (40, 81), indices tensor([2209, 851, 667, ..., 694, 3233, 751])\n", + "deriv_tensor (40, 81), indices tensor([1315, 580, 2273, ..., 1432, 3203, 1209])\n", + "deriv_tensor (40, 81), indices tensor([1315, 580, 2273, ..., 1432, 3203, 1209])\n", + "deriv_tensor (40, 81), indices tensor([1315, 580, 2273, ..., 1432, 3203, 1209])\n", + "deriv_tensor (40, 81), indices tensor([1315, 580, 2273, ..., 1432, 3203, 1209])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 1778, 1541, ..., 1423, 2433, 773])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 1778, 1541, ..., 1423, 2433, 773])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 1778, 1541, ..., 1423, 2433, 773])\n", + "deriv_tensor (40, 81), indices tensor([ 354, 1778, 1541, ..., 1423, 2433, 773])\n", + "deriv_tensor (40, 81), indices tensor([ 500, 847, 2122, ..., 1426, 1816, 952])\n", + "deriv_tensor (40, 81), indices tensor([ 500, 847, 2122, ..., 1426, 1816, 952])\n", + "deriv_tensor (40, 81), indices tensor([ 500, 847, 2122, ..., 1426, 1816, 952])\n", + "deriv_tensor (40, 81), indices tensor([ 500, 847, 2122, ..., 1426, 1816, 952])\n", + "deriv_tensor (40, 81), indices tensor([ 626, 1456, 32, ..., 1094, 2360, 2489])\n", + "deriv_tensor (40, 81), indices tensor([ 626, 1456, 32, ..., 1094, 2360, 2489])\n", + "deriv_tensor (40, 81), indices tensor([ 626, 1456, 32, ..., 1094, 2360, 2489])\n", + "deriv_tensor (40, 81), indices tensor([ 626, 1456, 32, ..., 1094, 2360, 2489])\n", + "deriv_tensor (40, 81), indices tensor([2579, 2625, 1944, ..., 1991, 2559, 2602])\n", + "deriv_tensor (40, 81), indices tensor([2579, 2625, 1944, ..., 1991, 2559, 2602])\n", + "deriv_tensor (40, 81), indices tensor([2579, 2625, 1944, ..., 1991, 2559, 2602])\n", + "deriv_tensor (40, 81), indices tensor([2579, 2625, 1944, ..., 1991, 2559, 2602])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2807, 2572, ..., 3068, 2495, 524])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2807, 2572, ..., 3068, 2495, 524])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2807, 2572, ..., 3068, 2495, 524])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2807, 2572, ..., 3068, 2495, 524])\n", + "deriv_tensor (40, 81), indices tensor([2272, 2994, 750, ..., 1420, 2772, 751])\n", + "deriv_tensor (40, 81), indices tensor([2272, 2994, 750, ..., 1420, 2772, 751])\n", + "deriv_tensor (40, 81), indices tensor([2272, 2994, 750, ..., 1420, 2772, 751])\n", + "deriv_tensor (40, 81), indices tensor([2272, 2994, 750, ..., 1420, 2772, 751])\n", + "deriv_tensor (40, 81), indices tensor([3023, 2970, 1110, ..., 3101, 1862, 2958])\n", + "deriv_tensor (40, 81), indices tensor([3023, 2970, 1110, ..., 3101, 1862, 2958])\n", + "deriv_tensor (40, 81), indices tensor([3023, 2970, 1110, ..., 3101, 1862, 2958])\n", + "deriv_tensor (40, 81), indices tensor([3023, 2970, 1110, ..., 3101, 1862, 2958])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2246, 345, ..., 2225, 2329, 3201])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2246, 345, ..., 2225, 2329, 3201])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2246, 345, ..., 2225, 2329, 3201])\n", + "deriv_tensor (40, 81), indices tensor([ 506, 2246, 345, ..., 2225, 2329, 3201])\n", + "deriv_tensor (40, 81), indices tensor([3183, 2436, 2603, ..., 550, 1958, 1723])\n", + "deriv_tensor (40, 81), indices tensor([3183, 2436, 2603, ..., 550, 1958, 1723])\n", + "deriv_tensor (40, 81), indices tensor([3183, 2436, 2603, ..., 550, 1958, 1723])\n", + "deriv_tensor (40, 81), indices tensor([3183, 2436, 2603, ..., 550, 1958, 1723])\n", + "deriv_tensor (40, 81), indices tensor([3184, 2559, 198, ..., 2167, 2789, 2586])\n", + "deriv_tensor (40, 81), indices tensor([3184, 2559, 198, ..., 2167, 2789, 2586])\n", + "deriv_tensor (40, 81), indices tensor([3184, 2559, 198, ..., 2167, 2789, 2586])\n", + "deriv_tensor (40, 81), indices tensor([3184, 2559, 198, ..., 2167, 2789, 2586])\n", + "deriv_tensor (40, 81), indices tensor([1024, 2044, 2237, ..., 1457, 2391, 1454])\n", + "deriv_tensor (40, 81), indices tensor([1024, 2044, 2237, ..., 1457, 2391, 1454])\n", + "deriv_tensor (40, 81), indices tensor([1024, 2044, 2237, ..., 1457, 2391, 1454])\n", + "deriv_tensor (40, 81), indices tensor([1024, 2044, 2237, ..., 1457, 2391, 1454])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 2466, 182, ..., 1297, 2077, 2412])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 2466, 182, ..., 1297, 2077, 2412])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 2466, 182, ..., 1297, 2077, 2412])\n", + "deriv_tensor (40, 81), indices tensor([ 418, 2466, 182, ..., 1297, 2077, 2412])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 3029, 864, ..., 149, 830, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 3029, 864, ..., 149, 830, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 3029, 864, ..., 149, 830, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 398, 3029, 864, ..., 149, 830, 2926])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 1559, 335, ..., 2064, 1133, 2690])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 1559, 335, ..., 2064, 1133, 2690])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 1559, 335, ..., 2064, 1133, 2690])\n", + "deriv_tensor (40, 81), indices tensor([ 458, 1559, 335, ..., 2064, 1133, 2690])\n", + "deriv_tensor (40, 81), indices tensor([ 442, 1312, 1034, ..., 956, 2028, 2707])\n", + "deriv_tensor (40, 81), indices tensor([ 442, 1312, 1034, ..., 956, 2028, 2707])\n", + "deriv_tensor (40, 81), indices tensor([ 442, 1312, 1034, ..., 956, 2028, 2707])\n", + "deriv_tensor (40, 81), indices tensor([ 442, 1312, 1034, ..., 956, 2028, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1477, 809, 522, ..., 1426, 1879, 2263])\n", + "deriv_tensor (40, 81), indices tensor([1477, 809, 522, ..., 1426, 1879, 2263])\n", + "deriv_tensor (40, 81), indices tensor([1477, 809, 522, ..., 1426, 1879, 2263])\n", + "deriv_tensor (40, 81), indices tensor([1477, 809, 522, ..., 1426, 1879, 2263])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2297, 1870, ..., 1347, 687, 1570])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2297, 1870, ..., 1347, 687, 1570])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2297, 1870, ..., 1347, 687, 1570])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2297, 1870, ..., 1347, 687, 1570])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1707, 2483, ..., 761, 1985, 2997])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1707, 2483, ..., 761, 1985, 2997])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1707, 2483, ..., 761, 1985, 2997])\n", + "deriv_tensor (40, 81), indices tensor([ 668, 1707, 2483, ..., 761, 1985, 2997])\n", + "deriv_tensor (40, 81), indices tensor([2025, 2923, 2854, ..., 1529, 1597, 2732])\n", + "deriv_tensor (40, 81), indices tensor([2025, 2923, 2854, ..., 1529, 1597, 2732])\n", + "deriv_tensor (40, 81), indices tensor([2025, 2923, 2854, ..., 1529, 1597, 2732])\n", + "deriv_tensor (40, 81), indices tensor([2025, 2923, 2854, ..., 1529, 1597, 2732])\n", + "deriv_tensor (40, 81), indices tensor([1989, 789, 292, ..., 1171, 705, 1580])\n", + "deriv_tensor (40, 81), indices tensor([1989, 789, 292, ..., 1171, 705, 1580])\n", + "deriv_tensor (40, 81), indices tensor([1989, 789, 292, ..., 1171, 705, 1580])\n", + "deriv_tensor (40, 81), indices tensor([1989, 789, 292, ..., 1171, 705, 1580])\n", + "deriv_tensor (40, 81), indices tensor([2886, 857, 1796, ..., 2434, 151, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2886, 857, 1796, ..., 2434, 151, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2886, 857, 1796, ..., 2434, 151, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2886, 857, 1796, ..., 2434, 151, 2719])\n", + "deriv_tensor (40, 81), indices tensor([2781, 2224, 502, ..., 645, 1695, 2253])\n", + "deriv_tensor (40, 81), indices tensor([2781, 2224, 502, ..., 645, 1695, 2253])\n", + "deriv_tensor (40, 81), indices tensor([2781, 2224, 502, ..., 645, 1695, 2253])\n", + "deriv_tensor (40, 81), indices tensor([2781, 2224, 502, ..., 645, 1695, 2253])\n", + "deriv_tensor (40, 81), indices tensor([3222, 2401, 3087, ..., 1950, 15, 3093])\n", + "deriv_tensor (40, 81), indices tensor([3222, 2401, 3087, ..., 1950, 15, 3093])\n", + "deriv_tensor (40, 81), indices tensor([3222, 2401, 3087, ..., 1950, 15, 3093])\n", + "deriv_tensor (40, 81), indices tensor([3222, 2401, 3087, ..., 1950, 15, 3093])\n", + "deriv_tensor (40, 81), indices tensor([2960, 768, 1661, ..., 594, 357, 2315])\n", + "deriv_tensor (40, 81), indices tensor([2960, 768, 1661, ..., 594, 357, 2315])\n", + "deriv_tensor (40, 81), indices tensor([2960, 768, 1661, ..., 594, 357, 2315])\n", + "deriv_tensor (40, 81), indices tensor([2960, 768, 1661, ..., 594, 357, 2315])\n", + "deriv_tensor (40, 81), indices tensor([ 2, 2411, 1582, ..., 3168, 1833, 1348])\n", + "deriv_tensor (40, 81), indices tensor([ 2, 2411, 1582, ..., 3168, 1833, 1348])\n", + "deriv_tensor (40, 81), indices tensor([ 2, 2411, 1582, ..., 3168, 1833, 1348])\n", + "deriv_tensor (40, 81), indices tensor([ 2, 2411, 1582, ..., 3168, 1833, 1348])\n", + "deriv_tensor (40, 81), indices tensor([ 465, 134, 2695, ..., 2642, 2108, 3147])\n", + "deriv_tensor (40, 81), indices tensor([ 465, 134, 2695, ..., 2642, 2108, 3147])\n", + "deriv_tensor (40, 81), indices tensor([ 465, 134, 2695, ..., 2642, 2108, 3147])\n", + "deriv_tensor (40, 81), indices tensor([ 465, 134, 2695, ..., 2642, 2108, 3147])\n", + "deriv_tensor (40, 81), indices tensor([1198, 1186, 2164, ..., 1250, 471, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1198, 1186, 2164, ..., 1250, 471, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1198, 1186, 2164, ..., 1250, 471, 2510])\n", + "deriv_tensor (40, 81), indices tensor([1198, 1186, 2164, ..., 1250, 471, 2510])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1746, 2672, ..., 191, 641, 3008])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1746, 2672, ..., 191, 641, 3008])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1746, 2672, ..., 191, 641, 3008])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1746, 2672, ..., 191, 641, 3008])\n", + "deriv_tensor (40, 81), indices tensor([2655, 1709, 2117, ..., 376, 1062, 1359])\n", + "deriv_tensor (40, 81), indices tensor([2655, 1709, 2117, ..., 376, 1062, 1359])\n", + "deriv_tensor (40, 81), indices tensor([2655, 1709, 2117, ..., 376, 1062, 1359])\n", + "deriv_tensor (40, 81), indices tensor([2655, 1709, 2117, ..., 376, 1062, 1359])\n", + "deriv_tensor (40, 81), indices tensor([2246, 1018, 2399, ..., 2282, 1730, 186])\n", + "deriv_tensor (40, 81), indices tensor([2246, 1018, 2399, ..., 2282, 1730, 186])\n", + "deriv_tensor (40, 81), indices tensor([2246, 1018, 2399, ..., 2282, 1730, 186])\n", + "deriv_tensor (40, 81), indices tensor([2246, 1018, 2399, ..., 2282, 1730, 186])\n", + "deriv_tensor (40, 81), indices tensor([1798, 61, 2084, ..., 2316, 30, 2285])\n", + "deriv_tensor (40, 81), indices tensor([1798, 61, 2084, ..., 2316, 30, 2285])\n", + "deriv_tensor (40, 81), indices tensor([1798, 61, 2084, ..., 2316, 30, 2285])\n", + "deriv_tensor (40, 81), indices tensor([1798, 61, 2084, ..., 2316, 30, 2285])\n", + "deriv_tensor (40, 81), indices tensor([1907, 3205, 3230, ..., 1864, 2225, 1148])\n", + "deriv_tensor (40, 81), indices tensor([1907, 3205, 3230, ..., 1864, 2225, 1148])\n", + "deriv_tensor (40, 81), indices tensor([1907, 3205, 3230, ..., 1864, 2225, 1148])\n", + "deriv_tensor (40, 81), indices tensor([1907, 3205, 3230, ..., 1864, 2225, 1148])\n", + "deriv_tensor (40, 81), indices tensor([2859, 1546, 1204, ..., 1415, 1188, 1192])\n", + "deriv_tensor (40, 81), indices tensor([2859, 1546, 1204, ..., 1415, 1188, 1192])\n", + "deriv_tensor (40, 81), indices tensor([2859, 1546, 1204, ..., 1415, 1188, 1192])\n", + "deriv_tensor (40, 81), indices tensor([2859, 1546, 1204, ..., 1415, 1188, 1192])\n", + "deriv_tensor (40, 81), indices tensor([1553, 1453, 484, ..., 1350, 697, 1520])\n", + "deriv_tensor (40, 81), indices tensor([1553, 1453, 484, ..., 1350, 697, 1520])\n", + "deriv_tensor (40, 81), indices tensor([1553, 1453, 484, ..., 1350, 697, 1520])\n", + "deriv_tensor (40, 81), indices tensor([1553, 1453, 484, ..., 1350, 697, 1520])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1425, 870, ..., 1341, 1531, 1704])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1425, 870, ..., 1341, 1531, 1704])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1425, 870, ..., 1341, 1531, 1704])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1425, 870, ..., 1341, 1531, 1704])\n", + "deriv_tensor (40, 81), indices tensor([2692, 122, 873, ..., 1089, 53, 427])\n", + "deriv_tensor (40, 81), indices tensor([2692, 122, 873, ..., 1089, 53, 427])\n", + "deriv_tensor (40, 81), indices tensor([2692, 122, 873, ..., 1089, 53, 427])\n", + "deriv_tensor (40, 81), indices tensor([2692, 122, 873, ..., 1089, 53, 427])\n", + "deriv_tensor (40, 81), indices tensor([ 878, 2629, 350, ..., 2753, 1340, 922])\n", + "deriv_tensor (40, 81), indices tensor([ 878, 2629, 350, ..., 2753, 1340, 922])\n", + "deriv_tensor (40, 81), indices tensor([ 878, 2629, 350, ..., 2753, 1340, 922])\n", + "deriv_tensor (40, 81), indices tensor([ 878, 2629, 350, ..., 2753, 1340, 922])\n", + "deriv_tensor (40, 81), indices tensor([1187, 1259, 531, ..., 830, 2369, 1908])\n", + "deriv_tensor (40, 81), indices tensor([1187, 1259, 531, ..., 830, 2369, 1908])\n", + "deriv_tensor (40, 81), indices tensor([1187, 1259, 531, ..., 830, 2369, 1908])\n", + "deriv_tensor (40, 81), indices tensor([1187, 1259, 531, ..., 830, 2369, 1908])\n", + "deriv_tensor (40, 81), indices tensor([1000, 1957, 3133, ..., 257, 384, 918])\n", + "deriv_tensor (40, 81), indices tensor([1000, 1957, 3133, ..., 257, 384, 918])\n", + "deriv_tensor (40, 81), indices tensor([1000, 1957, 3133, ..., 257, 384, 918])\n", + "deriv_tensor (40, 81), indices tensor([1000, 1957, 3133, ..., 257, 384, 918])\n", + "deriv_tensor (40, 81), indices tensor([2912, 1739, 884, ..., 1337, 2489, 1753])\n", + "deriv_tensor (40, 81), indices tensor([2912, 1739, 884, ..., 1337, 2489, 1753])\n", + "deriv_tensor (40, 81), indices tensor([2912, 1739, 884, ..., 1337, 2489, 1753])\n", + "deriv_tensor (40, 81), indices tensor([2912, 1739, 884, ..., 1337, 2489, 1753])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 1545, 314, ..., 1501, 3097, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 1545, 314, ..., 1501, 3097, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 1545, 314, ..., 1501, 3097, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 250, 1545, 314, ..., 1501, 3097, 3109])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 2754, 1754, ..., 705, 2390, 2765])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 2754, 1754, ..., 705, 2390, 2765])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 2754, 1754, ..., 705, 2390, 2765])\n", + "deriv_tensor (40, 81), indices tensor([ 907, 2754, 1754, ..., 705, 2390, 2765])\n", + "deriv_tensor (40, 81), indices tensor([3071, 1979, 1087, ..., 2883, 1824, 2721])\n", + "deriv_tensor (40, 81), indices tensor([3071, 1979, 1087, ..., 2883, 1824, 2721])\n", + "deriv_tensor (40, 81), indices tensor([3071, 1979, 1087, ..., 2883, 1824, 2721])\n", + "deriv_tensor (40, 81), indices tensor([3071, 1979, 1087, ..., 2883, 1824, 2721])\n", + "deriv_tensor (40, 81), indices tensor([ 355, 1246, 2190, ..., 1296, 3110, 1264])\n", + "deriv_tensor (40, 81), indices tensor([ 355, 1246, 2190, ..., 1296, 3110, 1264])\n", + "deriv_tensor (40, 81), indices tensor([ 355, 1246, 2190, ..., 1296, 3110, 1264])\n", + "deriv_tensor (40, 81), indices tensor([ 355, 1246, 2190, ..., 1296, 3110, 1264])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 2501, 1602, ..., 2663, 2772, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 2501, 1602, ..., 2663, 2772, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 2501, 1602, ..., 2663, 2772, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 511, 2501, 1602, ..., 2663, 2772, 1084])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1065, 3095, ..., 1421, 674, 1126])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1065, 3095, ..., 1421, 674, 1126])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1065, 3095, ..., 1421, 674, 1126])\n", + "deriv_tensor (40, 81), indices tensor([ 910, 1065, 3095, ..., 1421, 674, 1126])\n", + "deriv_tensor (40, 81), indices tensor([3181, 1645, 1577, ..., 1185, 1458, 3158])\n", + "deriv_tensor (40, 81), indices tensor([3181, 1645, 1577, ..., 1185, 1458, 3158])\n", + "deriv_tensor (40, 81), indices tensor([3181, 1645, 1577, ..., 1185, 1458, 3158])\n", + "deriv_tensor (40, 81), indices tensor([3181, 1645, 1577, ..., 1185, 1458, 3158])\n", + "deriv_tensor (40, 81), indices tensor([1356, 2946, 185, ..., 2447, 1166, 957])\n", + "deriv_tensor (40, 81), indices tensor([1356, 2946, 185, ..., 2447, 1166, 957])\n", + "deriv_tensor (40, 81), indices tensor([1356, 2946, 185, ..., 2447, 1166, 957])\n", + "deriv_tensor (40, 81), indices tensor([1356, 2946, 185, ..., 2447, 1166, 957])\n", + "deriv_tensor (40, 81), indices tensor([2083, 967, 2933, ..., 2945, 2736, 3142])\n", + "deriv_tensor (40, 81), indices tensor([2083, 967, 2933, ..., 2945, 2736, 3142])\n", + "deriv_tensor (40, 81), indices tensor([2083, 967, 2933, ..., 2945, 2736, 3142])\n", + "deriv_tensor (40, 81), indices tensor([2083, 967, 2933, ..., 2945, 2736, 3142])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 2796, 568, ..., 278, 797, 2258])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 2796, 568, ..., 278, 797, 2258])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 2796, 568, ..., 278, 797, 2258])\n", + "deriv_tensor (40, 81), indices tensor([ 732, 2796, 568, ..., 278, 797, 2258])\n", + "deriv_tensor (40, 81), indices tensor([2798, 798, 2978, ..., 2027, 3091, 1527])\n", + "deriv_tensor (40, 81), indices tensor([2798, 798, 2978, ..., 2027, 3091, 1527])\n", + "deriv_tensor (40, 81), indices tensor([2798, 798, 2978, ..., 2027, 3091, 1527])\n", + "deriv_tensor (40, 81), indices tensor([2798, 798, 2978, ..., 2027, 3091, 1527])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 428, 607, ..., 2799, 2803, 360])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 428, 607, ..., 2799, 2803, 360])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 428, 607, ..., 2799, 2803, 360])\n", + "deriv_tensor (40, 81), indices tensor([ 869, 428, 607, ..., 2799, 2803, 360])\n", + "deriv_tensor (40, 81), indices tensor([2526, 583, 469, ..., 1902, 1437, 2413])\n", + "deriv_tensor (40, 81), indices tensor([2526, 583, 469, ..., 1902, 1437, 2413])\n", + "deriv_tensor (40, 81), indices tensor([2526, 583, 469, ..., 1902, 1437, 2413])\n", + "deriv_tensor (40, 81), indices tensor([2526, 583, 469, ..., 1902, 1437, 2413])\n", + "deriv_tensor (40, 81), indices tensor([1517, 1199, 3208, ..., 342, 1311, 523])\n", + "deriv_tensor (40, 81), indices tensor([1517, 1199, 3208, ..., 342, 1311, 523])\n", + "deriv_tensor (40, 81), indices tensor([1517, 1199, 3208, ..., 342, 1311, 523])\n", + "deriv_tensor (40, 81), indices tensor([1517, 1199, 3208, ..., 342, 1311, 523])\n", + "deriv_tensor (40, 81), indices tensor([1821, 2518, 1976, ..., 1106, 2937, 1300])\n", + "deriv_tensor (40, 81), indices tensor([1821, 2518, 1976, ..., 1106, 2937, 1300])\n", + "deriv_tensor (40, 81), indices tensor([1821, 2518, 1976, ..., 1106, 2937, 1300])\n", + "deriv_tensor (40, 81), indices tensor([1821, 2518, 1976, ..., 1106, 2937, 1300])\n", + "deriv_tensor (40, 81), indices tensor([ 438, 1338, 1875, ..., 382, 476, 393])\n", + "deriv_tensor (40, 81), indices tensor([ 438, 1338, 1875, ..., 382, 476, 393])\n", + "deriv_tensor (40, 81), indices tensor([ 438, 1338, 1875, ..., 382, 476, 393])\n", + "deriv_tensor (40, 81), indices tensor([ 438, 1338, 1875, ..., 382, 476, 393])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1396, 634, ..., 1109, 1732, 838])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1396, 634, ..., 1109, 1732, 838])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1396, 634, ..., 1109, 1732, 838])\n", + "deriv_tensor (40, 81), indices tensor([ 559, 1396, 634, ..., 1109, 1732, 838])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2990, 1669, ..., 2233, 2274, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2990, 1669, ..., 2233, 2274, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2990, 1669, ..., 2233, 2274, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2506, 2990, 1669, ..., 2233, 2274, 1697])\n", + "deriv_tensor (40, 81), indices tensor([2064, 2659, 1835, ..., 2443, 752, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2064, 2659, 1835, ..., 2443, 752, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2064, 2659, 1835, ..., 2443, 752, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2064, 2659, 1835, ..., 2443, 752, 2238])\n", + "deriv_tensor (40, 81), indices tensor([2912, 392, 2918, ..., 1103, 88, 1485])\n", + "deriv_tensor (40, 81), indices tensor([2912, 392, 2918, ..., 1103, 88, 1485])\n", + "deriv_tensor (40, 81), indices tensor([2912, 392, 2918, ..., 1103, 88, 1485])\n", + "deriv_tensor (40, 81), indices tensor([2912, 392, 2918, ..., 1103, 88, 1485])\n", + "deriv_tensor (40, 81), indices tensor([2750, 1701, 1188, ..., 546, 1475, 1316])\n", + "deriv_tensor (40, 81), indices tensor([2750, 1701, 1188, ..., 546, 1475, 1316])\n", + "deriv_tensor (40, 81), indices tensor([2750, 1701, 1188, ..., 546, 1475, 1316])\n", + "deriv_tensor (40, 81), indices tensor([2750, 1701, 1188, ..., 546, 1475, 1316])\n", + "deriv_tensor (40, 81), indices tensor([1370, 2432, 173, ..., 1957, 2161, 1755])\n", + "deriv_tensor (40, 81), indices tensor([1370, 2432, 173, ..., 1957, 2161, 1755])\n", + "deriv_tensor (40, 81), indices tensor([1370, 2432, 173, ..., 1957, 2161, 1755])\n", + "deriv_tensor (40, 81), indices tensor([1370, 2432, 173, ..., 1957, 2161, 1755])\n", + "deriv_tensor (40, 81), indices tensor([1910, 2805, 2708, ..., 1114, 2184, 2688])\n", + "deriv_tensor (40, 81), indices tensor([1910, 2805, 2708, ..., 1114, 2184, 2688])\n", + "deriv_tensor (40, 81), indices tensor([1910, 2805, 2708, ..., 1114, 2184, 2688])\n", + "deriv_tensor (40, 81), indices tensor([1910, 2805, 2708, ..., 1114, 2184, 2688])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1327, 2391, ..., 942, 3167, 1924])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1327, 2391, ..., 942, 3167, 1924])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1327, 2391, ..., 942, 3167, 1924])\n", + "deriv_tensor (40, 81), indices tensor([1140, 1327, 2391, ..., 942, 3167, 1924])\n", + "deriv_tensor (40, 81), indices tensor([ 34, 2484, 1205, ..., 1685, 951, 2165])\n", + "deriv_tensor (40, 81), indices tensor([ 34, 2484, 1205, ..., 1685, 951, 2165])\n", + "deriv_tensor (40, 81), indices tensor([ 34, 2484, 1205, ..., 1685, 951, 2165])\n", + "deriv_tensor (40, 81), indices tensor([ 34, 2484, 1205, ..., 1685, 951, 2165])\n", + "deriv_tensor (40, 81), indices tensor([2036, 2930, 716, ..., 480, 27, 1976])\n", + "deriv_tensor (40, 81), indices tensor([2036, 2930, 716, ..., 480, 27, 1976])\n", + "deriv_tensor (40, 81), indices tensor([2036, 2930, 716, ..., 480, 27, 1976])\n", + "deriv_tensor (40, 81), indices tensor([2036, 2930, 716, ..., 480, 27, 1976])\n", + "deriv_tensor (40, 81), indices tensor([2919, 2478, 2414, ..., 665, 2339, 2415])\n", + "deriv_tensor (40, 81), indices tensor([2919, 2478, 2414, ..., 665, 2339, 2415])\n", + "deriv_tensor (40, 81), indices tensor([2919, 2478, 2414, ..., 665, 2339, 2415])\n", + "deriv_tensor (40, 81), indices tensor([2919, 2478, 2414, ..., 665, 2339, 2415])\n", + "deriv_tensor (40, 81), indices tensor([ 648, 2266, 1914, ..., 1610, 1614, 2793])\n", + "deriv_tensor (40, 81), indices tensor([ 648, 2266, 1914, ..., 1610, 1614, 2793])\n", + "deriv_tensor (40, 81), indices tensor([ 648, 2266, 1914, ..., 1610, 1614, 2793])\n", + "deriv_tensor (40, 81), indices tensor([ 648, 2266, 1914, ..., 1610, 1614, 2793])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 1723, 2964, ..., 12, 1145, 1426])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 1723, 2964, ..., 12, 1145, 1426])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 1723, 2964, ..., 12, 1145, 1426])\n", + "deriv_tensor (40, 81), indices tensor([ 286, 1723, 2964, ..., 12, 1145, 1426])\n", + "deriv_tensor (40, 81), indices tensor([ 129, 1628, 1602, ..., 2560, 2153, 838])\n", + "deriv_tensor (40, 81), indices tensor([ 129, 1628, 1602, ..., 2560, 2153, 838])\n", + "deriv_tensor (40, 81), indices tensor([ 129, 1628, 1602, ..., 2560, 2153, 838])\n", + "deriv_tensor (40, 81), indices tensor([ 129, 1628, 1602, ..., 2560, 2153, 838])\n", + "deriv_tensor (40, 81), indices tensor([2620, 722, 1140, ..., 2089, 1417, 2292])\n", + "deriv_tensor (40, 81), indices tensor([2620, 722, 1140, ..., 2089, 1417, 2292])\n", + "deriv_tensor (40, 81), indices tensor([2620, 722, 1140, ..., 2089, 1417, 2292])\n", + "deriv_tensor (40, 81), indices tensor([2620, 722, 1140, ..., 2089, 1417, 2292])\n", + "deriv_tensor (40, 81), indices tensor([3039, 1486, 219, ..., 169, 2609, 2743])\n", + "deriv_tensor (40, 81), indices tensor([3039, 1486, 219, ..., 169, 2609, 2743])\n", + "deriv_tensor (40, 81), indices tensor([3039, 1486, 219, ..., 169, 2609, 2743])\n", + "deriv_tensor (40, 81), indices tensor([3039, 1486, 219, ..., 169, 2609, 2743])\n", + "deriv_tensor (40, 81), indices tensor([1551, 1417, 425, ..., 1583, 1143, 901])\n", + "deriv_tensor (40, 81), indices tensor([1551, 1417, 425, ..., 1583, 1143, 901])\n", + "deriv_tensor (40, 81), indices tensor([1551, 1417, 425, ..., 1583, 1143, 901])\n", + "deriv_tensor (40, 81), indices tensor([1551, 1417, 425, ..., 1583, 1143, 901])\n", + "deriv_tensor (40, 81), indices tensor([3151, 2369, 863, ..., 2410, 2996, 569])\n", + "deriv_tensor (40, 81), indices tensor([3151, 2369, 863, ..., 2410, 2996, 569])\n", + "deriv_tensor (40, 81), indices tensor([3151, 2369, 863, ..., 2410, 2996, 569])\n", + "deriv_tensor (40, 81), indices tensor([3151, 2369, 863, ..., 2410, 2996, 569])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 2620, 1146, ..., 535, 227, 1425])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 2620, 1146, ..., 535, 227, 1425])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 2620, 1146, ..., 535, 227, 1425])\n", + "deriv_tensor (40, 81), indices tensor([ 610, 2620, 1146, ..., 535, 227, 1425])\n", + "deriv_tensor (40, 81), indices tensor([1474, 1783, 835, ..., 2380, 2058, 466])\n", + "deriv_tensor (40, 81), indices tensor([1474, 1783, 835, ..., 2380, 2058, 466])\n", + "deriv_tensor (40, 81), indices tensor([1474, 1783, 835, ..., 2380, 2058, 466])\n", + "deriv_tensor (40, 81), indices tensor([1474, 1783, 835, ..., 2380, 2058, 466])\n", + "deriv_tensor (40, 81), indices tensor([2668, 1460, 993, ..., 1915, 2608, 2496])\n", + "deriv_tensor (40, 81), indices tensor([2668, 1460, 993, ..., 1915, 2608, 2496])\n", + "deriv_tensor (40, 81), indices tensor([2668, 1460, 993, ..., 1915, 2608, 2496])\n", + "deriv_tensor (40, 81), indices tensor([2668, 1460, 993, ..., 1915, 2608, 2496])\n", + "deriv_tensor (40, 81), indices tensor([1066, 1197, 2493, ..., 3160, 1667, 1743])\n", + "deriv_tensor (40, 81), indices tensor([1066, 1197, 2493, ..., 3160, 1667, 1743])\n", + "deriv_tensor (40, 81), indices tensor([1066, 1197, 2493, ..., 3160, 1667, 1743])\n", + "deriv_tensor (40, 81), indices tensor([1066, 1197, 2493, ..., 3160, 1667, 1743])\n", + "deriv_tensor (40, 81), indices tensor([ 334, 837, 2415, ..., 3112, 660, 2905])\n", + "deriv_tensor (40, 81), indices tensor([ 334, 837, 2415, ..., 3112, 660, 2905])\n", + "deriv_tensor (40, 81), indices tensor([ 334, 837, 2415, ..., 3112, 660, 2905])\n", + "deriv_tensor (40, 81), indices tensor([ 334, 837, 2415, ..., 3112, 660, 2905])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 150, 1343, ..., 1, 2861, 2243])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 150, 1343, ..., 1, 2861, 2243])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 150, 1343, ..., 1, 2861, 2243])\n", + "deriv_tensor (40, 81), indices tensor([ 157, 150, 1343, ..., 1, 2861, 2243])\n", + "deriv_tensor (40, 81), indices tensor([1935, 501, 138, ..., 1157, 1588, 211])\n", + "deriv_tensor (40, 81), indices tensor([1935, 501, 138, ..., 1157, 1588, 211])\n", + "deriv_tensor (40, 81), indices tensor([1935, 501, 138, ..., 1157, 1588, 211])\n", + "deriv_tensor (40, 81), indices tensor([1935, 501, 138, ..., 1157, 1588, 211])\n", + "deriv_tensor (40, 81), indices tensor([2163, 2574, 1502, ..., 275, 2517, 258])\n", + "deriv_tensor (40, 81), indices tensor([2163, 2574, 1502, ..., 275, 2517, 258])\n", + "deriv_tensor (40, 81), indices tensor([2163, 2574, 1502, ..., 275, 2517, 258])\n", + "deriv_tensor (40, 81), indices tensor([2163, 2574, 1502, ..., 275, 2517, 258])\n", + "deriv_tensor (40, 81), indices tensor([1837, 431, 1874, ..., 1842, 2928, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1837, 431, 1874, ..., 1842, 2928, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1837, 431, 1874, ..., 1842, 2928, 2108])\n", + "deriv_tensor (40, 81), indices tensor([1837, 431, 1874, ..., 1842, 2928, 2108])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 2352, 203, ..., 2071, 1139, 2856])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 2352, 203, ..., 2071, 1139, 2856])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 2352, 203, ..., 2071, 1139, 2856])\n", + "deriv_tensor (40, 81), indices tensor([ 700, 2352, 203, ..., 2071, 1139, 2856])\n", + "deriv_tensor (40, 81), indices tensor([3107, 2452, 271, ..., 3074, 2893, 2897])\n", + "deriv_tensor (40, 81), indices tensor([3107, 2452, 271, ..., 3074, 2893, 2897])\n", + "deriv_tensor (40, 81), indices tensor([3107, 2452, 271, ..., 3074, 2893, 2897])\n", + "deriv_tensor (40, 81), indices tensor([3107, 2452, 271, ..., 3074, 2893, 2897])\n", + "deriv_tensor (40, 81), indices tensor([ 259, 2391, 2167, ..., 2303, 489, 2745])\n", + "deriv_tensor (40, 81), indices tensor([ 259, 2391, 2167, ..., 2303, 489, 2745])\n", + "deriv_tensor (40, 81), indices tensor([ 259, 2391, 2167, ..., 2303, 489, 2745])\n", + "deriv_tensor (40, 81), indices tensor([ 259, 2391, 2167, ..., 2303, 489, 2745])\n", + "deriv_tensor (40, 81), indices tensor([1544, 3104, 601, ..., 602, 565, 2430])\n", + "deriv_tensor (40, 81), indices tensor([1544, 3104, 601, ..., 602, 565, 2430])\n", + "deriv_tensor (40, 81), indices tensor([1544, 3104, 601, ..., 602, 565, 2430])\n", + "deriv_tensor (40, 81), indices tensor([1544, 3104, 601, ..., 602, 565, 2430])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1547, 2944, ..., 2760, 671, 2841])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1547, 2944, ..., 2760, 671, 2841])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1547, 2944, ..., 2760, 671, 2841])\n", + "deriv_tensor (40, 81), indices tensor([3026, 1547, 2944, ..., 2760, 671, 2841])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2013, 3205, ..., 1494, 1306, 49])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2013, 3205, ..., 1494, 1306, 49])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2013, 3205, ..., 1494, 1306, 49])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2013, 3205, ..., 1494, 1306, 49])\n", + "deriv_tensor (40, 81), indices tensor([ 615, 964, 2057, ..., 127, 2095, 2403])\n", + "deriv_tensor (40, 81), indices tensor([ 615, 964, 2057, ..., 127, 2095, 2403])\n", + "deriv_tensor (40, 81), indices tensor([ 615, 964, 2057, ..., 127, 2095, 2403])\n", + "deriv_tensor (40, 81), indices tensor([ 615, 964, 2057, ..., 127, 2095, 2403])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 509, 2880, ..., 3011, 728, 1464])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 509, 2880, ..., 3011, 728, 1464])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 509, 2880, ..., 3011, 728, 1464])\n", + "deriv_tensor (40, 81), indices tensor([ 832, 509, 2880, ..., 3011, 728, 1464])\n", + "deriv_tensor (40, 81), indices tensor([2345, 3028, 1819, ..., 1397, 1668, 2333])\n", + "deriv_tensor (40, 81), indices tensor([2345, 3028, 1819, ..., 1397, 1668, 2333])\n", + "deriv_tensor (40, 81), indices tensor([2345, 3028, 1819, ..., 1397, 1668, 2333])\n", + "deriv_tensor (40, 81), indices tensor([2345, 3028, 1819, ..., 1397, 1668, 2333])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2891, 1802, ..., 333, 3200, 977])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2891, 1802, ..., 333, 3200, 977])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2891, 1802, ..., 333, 3200, 977])\n", + "deriv_tensor (40, 81), indices tensor([1050, 2891, 1802, ..., 333, 3200, 977])\n", + "deriv_tensor (40, 81), indices tensor([1980, 2609, 94, ..., 2702, 2206, 1129])\n", + "deriv_tensor (40, 81), indices tensor([1980, 2609, 94, ..., 2702, 2206, 1129])\n", + "deriv_tensor (40, 81), indices tensor([1980, 2609, 94, ..., 2702, 2206, 1129])\n", + "deriv_tensor (40, 81), indices tensor([1980, 2609, 94, ..., 2702, 2206, 1129])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 2779, 834, ..., 361, 1564, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 2779, 834, ..., 361, 1564, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 2779, 834, ..., 361, 1564, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 330, 2779, 834, ..., 361, 1564, 2038])\n", + "deriv_tensor (40, 81), indices tensor([ 582, 968, 211, ..., 479, 2395, 2903])\n", + "deriv_tensor (40, 81), indices tensor([ 582, 968, 211, ..., 479, 2395, 2903])\n", + "deriv_tensor (40, 81), indices tensor([ 582, 968, 211, ..., 479, 2395, 2903])\n", + "deriv_tensor (40, 81), indices tensor([ 582, 968, 211, ..., 479, 2395, 2903])\n", + "deriv_tensor (40, 81), indices tensor([2714, 1744, 3102, ..., 2439, 812, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2714, 1744, 3102, ..., 2439, 812, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2714, 1744, 3102, ..., 2439, 812, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2714, 1744, 3102, ..., 2439, 812, 2979])\n", + "deriv_tensor (40, 81), indices tensor([2408, 1167, 2829, ..., 790, 2050, 179])\n", + "deriv_tensor (40, 81), indices tensor([2408, 1167, 2829, ..., 790, 2050, 179])\n", + "deriv_tensor (40, 81), indices tensor([2408, 1167, 2829, ..., 790, 2050, 179])\n", + "deriv_tensor (40, 81), indices tensor([2408, 1167, 2829, ..., 790, 2050, 179])\n", + "deriv_tensor (40, 81), indices tensor([2203, 2482, 1131, ..., 62, 1108, 493])\n", + "deriv_tensor (40, 81), indices tensor([2203, 2482, 1131, ..., 62, 1108, 493])\n", + "deriv_tensor (40, 81), indices tensor([2203, 2482, 1131, ..., 62, 1108, 493])\n", + "deriv_tensor (40, 81), indices tensor([2203, 2482, 1131, ..., 62, 1108, 493])\n", + "deriv_tensor (40, 81), indices tensor([2028, 1625, 2175, ..., 2182, 2679, 2143])\n", + "deriv_tensor (40, 81), indices tensor([2028, 1625, 2175, ..., 2182, 2679, 2143])\n", + "deriv_tensor (40, 81), indices tensor([2028, 1625, 2175, ..., 2182, 2679, 2143])\n", + "deriv_tensor (40, 81), indices tensor([2028, 1625, 2175, ..., 2182, 2679, 2143])\n", + "deriv_tensor (40, 81), indices tensor([ 765, 2896, 2866, ..., 515, 19, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 765, 2896, 2866, ..., 515, 19, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 765, 2896, 2866, ..., 515, 19, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 765, 2896, 2866, ..., 515, 19, 1395])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 2541, 1178, ..., 2009, 1540, 1410])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 2541, 1178, ..., 2009, 1540, 1410])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 2541, 1178, ..., 2009, 1540, 1410])\n", + "deriv_tensor (40, 81), indices tensor([ 313, 2541, 1178, ..., 2009, 1540, 1410])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2442, 2934, ..., 2660, 1475, 426])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2442, 2934, ..., 2660, 1475, 426])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2442, 2934, ..., 2660, 1475, 426])\n", + "deriv_tensor (40, 81), indices tensor([2894, 2442, 2934, ..., 2660, 1475, 426])\n", + "deriv_tensor (40, 81), indices tensor([ 520, 1321, 335, ..., 2501, 643, 2720])\n", + "deriv_tensor (40, 81), indices tensor([ 520, 1321, 335, ..., 2501, 643, 2720])\n", + "deriv_tensor (40, 81), indices tensor([ 520, 1321, 335, ..., 2501, 643, 2720])\n", + "deriv_tensor (40, 81), indices tensor([ 520, 1321, 335, ..., 2501, 643, 2720])\n", + "deriv_tensor (40, 81), indices tensor([1689, 2370, 3163, ..., 1248, 2410, 523])\n", + "deriv_tensor (40, 81), indices tensor([1689, 2370, 3163, ..., 1248, 2410, 523])\n", + "deriv_tensor (40, 81), indices tensor([1689, 2370, 3163, ..., 1248, 2410, 523])\n", + "deriv_tensor (40, 81), indices tensor([1689, 2370, 3163, ..., 1248, 2410, 523])\n", + "deriv_tensor (40, 81), indices tensor([2991, 1687, 635, ..., 2296, 2159, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2991, 1687, 635, ..., 2296, 2159, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2991, 1687, 635, ..., 2296, 2159, 1729])\n", + "deriv_tensor (40, 81), indices tensor([2991, 1687, 635, ..., 2296, 2159, 1729])\n", + "deriv_tensor (40, 81), indices tensor([1138, 1171, 516, ..., 1339, 1597, 2332])\n", + "deriv_tensor (40, 81), indices tensor([1138, 1171, 516, ..., 1339, 1597, 2332])\n", + "deriv_tensor (40, 81), indices tensor([1138, 1171, 516, ..., 1339, 1597, 2332])\n", + "deriv_tensor (40, 81), indices tensor([1138, 1171, 516, ..., 1339, 1597, 2332])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2104, 1788, ..., 150, 2762, 1215])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2104, 1788, ..., 150, 2762, 1215])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2104, 1788, ..., 150, 2762, 1215])\n", + "deriv_tensor (40, 81), indices tensor([ 770, 2104, 1788, ..., 150, 2762, 1215])\n", + "deriv_tensor (40, 81), indices tensor([2778, 13, 670, ..., 1776, 2088, 870])\n", + "deriv_tensor (40, 81), indices tensor([2778, 13, 670, ..., 1776, 2088, 870])\n", + "deriv_tensor (40, 81), indices tensor([2778, 13, 670, ..., 1776, 2088, 870])\n", + "deriv_tensor (40, 81), indices tensor([2778, 13, 670, ..., 1776, 2088, 870])\n", + "deriv_tensor (40, 81), indices tensor([3088, 1875, 3073, ..., 20, 2054, 818])\n", + "deriv_tensor (40, 81), indices tensor([3088, 1875, 3073, ..., 20, 2054, 818])\n", + "deriv_tensor (40, 81), indices tensor([3088, 1875, 3073, ..., 20, 2054, 818])\n", + "deriv_tensor (40, 81), indices tensor([3088, 1875, 3073, ..., 20, 2054, 818])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 1091, 3068, ..., 1434, 927, 1813])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 1091, 3068, ..., 1434, 927, 1813])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 1091, 3068, ..., 1434, 927, 1813])\n", + "deriv_tensor (40, 81), indices tensor([ 964, 1091, 3068, ..., 1434, 927, 1813])\n", + "deriv_tensor (40, 81), indices tensor([1099, 2457, 2656, ..., 2295, 1386, 1275])\n", + "deriv_tensor (40, 81), indices tensor([1099, 2457, 2656, ..., 2295, 1386, 1275])\n", + "deriv_tensor (40, 81), indices tensor([1099, 2457, 2656, ..., 2295, 1386, 1275])\n", + "deriv_tensor (40, 81), indices tensor([1099, 2457, 2656, ..., 2295, 1386, 1275])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 1210, 2906, ..., 998, 2649, 545])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 1210, 2906, ..., 998, 2649, 545])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 1210, 2906, ..., 998, 2649, 545])\n", + "deriv_tensor (40, 81), indices tensor([ 913, 1210, 2906, ..., 998, 2649, 545])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 104, 1055, ..., 1430, 939, 2169])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 104, 1055, ..., 1430, 939, 2169])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 104, 1055, ..., 1430, 939, 2169])\n", + "deriv_tensor (40, 81), indices tensor([ 909, 104, 1055, ..., 1430, 939, 2169])\n", + "deriv_tensor (40, 81), indices tensor([1292, 121, 2857, ..., 1380, 1560, 1338])\n", + "deriv_tensor (40, 81), indices tensor([1292, 121, 2857, ..., 1380, 1560, 1338])\n", + "deriv_tensor (40, 81), indices tensor([1292, 121, 2857, ..., 1380, 1560, 1338])\n", + "deriv_tensor (40, 81), indices tensor([1292, 121, 2857, ..., 1380, 1560, 1338])\n", + "deriv_tensor (40, 81), indices tensor([2892, 2527, 1905, ..., 1762, 1650, 315])\n", + "deriv_tensor (40, 81), indices tensor([2892, 2527, 1905, ..., 1762, 1650, 315])\n", + "deriv_tensor (40, 81), indices tensor([2892, 2527, 1905, ..., 1762, 1650, 315])\n", + "deriv_tensor (40, 81), indices tensor([2892, 2527, 1905, ..., 1762, 1650, 315])\n", + "deriv_tensor (40, 81), indices tensor([1064, 552, 3073, ..., 1966, 1033, 2463])\n", + "deriv_tensor (40, 81), indices tensor([1064, 552, 3073, ..., 1966, 1033, 2463])\n", + "deriv_tensor (40, 81), indices tensor([1064, 552, 3073, ..., 1966, 1033, 2463])\n", + "deriv_tensor (40, 81), indices tensor([1064, 552, 3073, ..., 1966, 1033, 2463])\n", + "deriv_tensor (40, 81), indices tensor([2959, 2475, 1430, ..., 2420, 816, 2985])\n", + "deriv_tensor (40, 81), indices tensor([2959, 2475, 1430, ..., 2420, 816, 2985])\n", + "deriv_tensor (40, 81), indices tensor([2959, 2475, 1430, ..., 2420, 816, 2985])\n", + "deriv_tensor (40, 81), indices tensor([2959, 2475, 1430, ..., 2420, 816, 2985])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1844, 3100, ..., 2000, 95, 1454])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1844, 3100, ..., 2000, 95, 1454])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1844, 3100, ..., 2000, 95, 1454])\n", + "deriv_tensor (40, 81), indices tensor([ 564, 1844, 3100, ..., 2000, 95, 1454])\n", + "deriv_tensor (40, 81), indices tensor([1623, 2044, 50, ..., 136, 914, 1053])\n", + "deriv_tensor (40, 81), indices tensor([1623, 2044, 50, ..., 136, 914, 1053])\n", + "deriv_tensor (40, 81), indices tensor([1623, 2044, 50, ..., 136, 914, 1053])\n", + "deriv_tensor (40, 81), indices tensor([1623, 2044, 50, ..., 136, 914, 1053])\n", + "deriv_tensor (40, 81), indices tensor([3116, 55, 1500, ..., 1300, 2821, 1147])\n", + "deriv_tensor (40, 81), indices tensor([3116, 55, 1500, ..., 1300, 2821, 1147])\n", + "deriv_tensor (40, 81), indices tensor([3116, 55, 1500, ..., 1300, 2821, 1147])\n", + "deriv_tensor (40, 81), indices tensor([3116, 55, 1500, ..., 1300, 2821, 1147])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1556, 3176, ..., 1983, 2712, 161])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1556, 3176, ..., 1983, 2712, 161])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1556, 3176, ..., 1983, 2712, 161])\n", + "deriv_tensor (40, 81), indices tensor([1727, 1556, 3176, ..., 1983, 2712, 161])\n", + "deriv_tensor (40, 81), indices tensor([ 537, 195, 881, ..., 210, 164, 1519])\n", + "deriv_tensor (40, 81), indices tensor([ 537, 195, 881, ..., 210, 164, 1519])\n", + "deriv_tensor (40, 81), indices tensor([ 537, 195, 881, ..., 210, 164, 1519])\n", + "deriv_tensor (40, 81), indices tensor([ 537, 195, 881, ..., 210, 164, 1519])\n", + "deriv_tensor (40, 81), indices tensor([2204, 859, 1777, ..., 2702, 569, 1719])\n", + "deriv_tensor (40, 81), indices tensor([2204, 859, 1777, ..., 2702, 569, 1719])\n", + "deriv_tensor (40, 81), indices tensor([2204, 859, 1777, ..., 2702, 569, 1719])\n", + "deriv_tensor (40, 81), indices tensor([2204, 859, 1777, ..., 2702, 569, 1719])\n", + "deriv_tensor (40, 81), indices tensor([2427, 334, 11, ..., 277, 652, 249])\n", + "deriv_tensor (40, 81), indices tensor([2427, 334, 11, ..., 277, 652, 249])\n", + "deriv_tensor (40, 81), indices tensor([2427, 334, 11, ..., 277, 652, 249])\n", + "deriv_tensor (40, 81), indices tensor([2427, 334, 11, ..., 277, 652, 249])\n", + "deriv_tensor (40, 81), indices tensor([2061, 348, 2650, ..., 2509, 1755, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2061, 348, 2650, ..., 2509, 1755, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2061, 348, 2650, ..., 2509, 1755, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2061, 348, 2650, ..., 2509, 1755, 1903])\n", + "deriv_tensor (40, 81), indices tensor([2492, 2779, 684, ..., 2572, 3047, 2077])\n", + "deriv_tensor (40, 81), indices tensor([2492, 2779, 684, ..., 2572, 3047, 2077])\n", + "deriv_tensor (40, 81), indices tensor([2492, 2779, 684, ..., 2572, 3047, 2077])\n", + "deriv_tensor (40, 81), indices tensor([2492, 2779, 684, ..., 2572, 3047, 2077])\n", + "deriv_tensor (40, 81), indices tensor([1346, 804, 1564, ..., 2271, 1419, 1567])\n", + "deriv_tensor (40, 81), indices tensor([1346, 804, 1564, ..., 2271, 1419, 1567])\n", + "deriv_tensor (40, 81), indices tensor([1346, 804, 1564, ..., 2271, 1419, 1567])\n", + "deriv_tensor (40, 81), indices tensor([1346, 804, 1564, ..., 2271, 1419, 1567])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1040, 1138, ..., 1904, 1942, 1281])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1040, 1138, ..., 1904, 1942, 1281])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1040, 1138, ..., 1904, 1942, 1281])\n", + "deriv_tensor (40, 81), indices tensor([2902, 1040, 1138, ..., 1904, 1942, 1281])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 1110, 2212, ..., 2697, 1144, 926])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 1110, 2212, ..., 2697, 1144, 926])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 1110, 2212, ..., 2697, 1144, 926])\n", + "deriv_tensor (40, 81), indices tensor([ 847, 1110, 2212, ..., 2697, 1144, 926])\n", + "deriv_tensor (40, 81), indices tensor([1216, 860, 1339, ..., 2587, 508, 2390])\n", + "deriv_tensor (40, 81), indices tensor([1216, 860, 1339, ..., 2587, 508, 2390])\n", + "deriv_tensor (40, 81), indices tensor([1216, 860, 1339, ..., 2587, 508, 2390])\n", + "deriv_tensor (40, 81), indices tensor([1216, 860, 1339, ..., 2587, 508, 2390])\n", + "deriv_tensor (40, 81), indices tensor([3220, 563, 1383, ..., 2912, 1928, 2201])\n", + "deriv_tensor (40, 81), indices tensor([3220, 563, 1383, ..., 2912, 1928, 2201])\n", + "deriv_tensor (40, 81), indices tensor([3220, 563, 1383, ..., 2912, 1928, 2201])\n", + "deriv_tensor (40, 81), indices tensor([3220, 563, 1383, ..., 2912, 1928, 2201])\n", + "deriv_tensor (40, 81), indices tensor([3015, 123, 1881, ..., 405, 2656, 2353])\n", + "deriv_tensor (40, 81), indices tensor([3015, 123, 1881, ..., 405, 2656, 2353])\n", + "deriv_tensor (40, 81), indices tensor([3015, 123, 1881, ..., 405, 2656, 2353])\n", + "deriv_tensor (40, 81), indices tensor([3015, 123, 1881, ..., 405, 2656, 2353])\n", + "deriv_tensor (40, 81), indices tensor([1651, 2791, 1496, ..., 339, 80, 2647])\n", + "deriv_tensor (40, 81), indices tensor([1651, 2791, 1496, ..., 339, 80, 2647])\n", + "deriv_tensor (40, 81), indices tensor([1651, 2791, 1496, ..., 339, 80, 2647])\n", + "deriv_tensor (40, 81), indices tensor([1651, 2791, 1496, ..., 339, 80, 2647])\n", + "deriv_tensor (40, 81), indices tensor([1100, 1464, 733, ..., 2218, 1270, 1731])\n", + "deriv_tensor (40, 81), indices tensor([1100, 1464, 733, ..., 2218, 1270, 1731])\n", + "deriv_tensor (40, 81), indices tensor([1100, 1464, 733, ..., 2218, 1270, 1731])\n", + "deriv_tensor (40, 81), indices tensor([1100, 1464, 733, ..., 2218, 1270, 1731])\n", + "deriv_tensor (40, 81), indices tensor([1718, 435, 880, ..., 1301, 2955, 948])\n", + "deriv_tensor (40, 81), indices tensor([1718, 435, 880, ..., 1301, 2955, 948])\n", + "deriv_tensor (40, 81), indices tensor([1718, 435, 880, ..., 1301, 2955, 948])\n", + "deriv_tensor (40, 81), indices tensor([1718, 435, 880, ..., 1301, 2955, 948])\n", + "deriv_tensor (40, 81), indices tensor([1041, 3198, 1156, ..., 454, 401, 2520])\n", + "deriv_tensor (40, 81), indices tensor([1041, 3198, 1156, ..., 454, 401, 2520])\n", + "deriv_tensor (40, 81), indices tensor([1041, 3198, 1156, ..., 454, 401, 2520])\n", + "deriv_tensor (40, 81), indices tensor([1041, 3198, 1156, ..., 454, 401, 2520])\n", + "deriv_tensor (40, 81), indices tensor([2294, 1426, 1388, ..., 579, 533, 665])\n", + "deriv_tensor (40, 81), indices tensor([2294, 1426, 1388, ..., 579, 533, 665])\n", + "deriv_tensor (40, 81), indices tensor([2294, 1426, 1388, ..., 579, 533, 665])\n", + "deriv_tensor (40, 81), indices tensor([2294, 1426, 1388, ..., 579, 533, 665])\n", + "deriv_tensor (40, 81), indices tensor([2076, 1553, 410, ..., 3185, 795, 1222])\n", + "deriv_tensor (40, 81), indices tensor([2076, 1553, 410, ..., 3185, 795, 1222])\n", + "deriv_tensor (40, 81), indices tensor([2076, 1553, 410, ..., 3185, 795, 1222])\n", + "deriv_tensor (40, 81), indices tensor([2076, 1553, 410, ..., 3185, 795, 1222])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 790, 1807, ..., 1670, 734, 135])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 790, 1807, ..., 1670, 734, 135])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 790, 1807, ..., 1670, 734, 135])\n", + "deriv_tensor (40, 81), indices tensor([ 995, 790, 1807, ..., 1670, 734, 135])\n", + "deriv_tensor (40, 81), indices tensor([2430, 1541, 1438, ..., 1256, 1445, 2253])\n", + "deriv_tensor (40, 81), indices tensor([2430, 1541, 1438, ..., 1256, 1445, 2253])\n", + "deriv_tensor (40, 81), indices tensor([2430, 1541, 1438, ..., 1256, 1445, 2253])\n", + "deriv_tensor (40, 81), indices tensor([2430, 1541, 1438, ..., 1256, 1445, 2253])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 2679, 838, ..., 730, 1700, 1983])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 2679, 838, ..., 730, 1700, 1983])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 2679, 838, ..., 730, 1700, 1983])\n", + "deriv_tensor (40, 81), indices tensor([ 168, 2679, 838, ..., 730, 1700, 1983])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 659, 3032, ..., 933, 2763, 2769])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 659, 3032, ..., 933, 2763, 2769])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 659, 3032, ..., 933, 2763, 2769])\n", + "deriv_tensor (40, 81), indices tensor([ 872, 659, 3032, ..., 933, 2763, 2769])\n", + "deriv_tensor (40, 81), indices tensor([1465, 3025, 862, ..., 1835, 2329, 1704])\n", + "deriv_tensor (40, 81), indices tensor([1465, 3025, 862, ..., 1835, 2329, 1704])\n", + "deriv_tensor (40, 81), indices tensor([1465, 3025, 862, ..., 1835, 2329, 1704])\n", + "deriv_tensor (40, 81), indices tensor([1465, 3025, 862, ..., 1835, 2329, 1704])\n", + "deriv_tensor (40, 81), indices tensor([1051, 611, 1163, ..., 1053, 2349, 528])\n", + "deriv_tensor (40, 81), indices tensor([1051, 611, 1163, ..., 1053, 2349, 528])\n", + "deriv_tensor (40, 81), indices tensor([1051, 611, 1163, ..., 1053, 2349, 528])\n", + "deriv_tensor (40, 81), indices tensor([1051, 611, 1163, ..., 1053, 2349, 528])\n", + "deriv_tensor (40, 81), indices tensor([1210, 1069, 2496, ..., 2282, 234, 1564])\n", + "deriv_tensor (40, 81), indices tensor([1210, 1069, 2496, ..., 2282, 234, 1564])\n", + "deriv_tensor (40, 81), indices tensor([1210, 1069, 2496, ..., 2282, 234, 1564])\n", + "deriv_tensor (40, 81), indices tensor([1210, 1069, 2496, ..., 2282, 234, 1564])\n", + "deriv_tensor (40, 81), indices tensor([1907, 33, 2677, ..., 3235, 940, 1988])\n", + "deriv_tensor (40, 81), indices tensor([1907, 33, 2677, ..., 3235, 940, 1988])\n", + "deriv_tensor (40, 81), indices tensor([1907, 33, 2677, ..., 3235, 940, 1988])\n", + "deriv_tensor (40, 81), indices tensor([1907, 33, 2677, ..., 3235, 940, 1988])\n", + "deriv_tensor (40, 81), indices tensor([3131, 214, 1566, ..., 751, 1838, 1917])\n", + "deriv_tensor (40, 81), indices tensor([3131, 214, 1566, ..., 751, 1838, 1917])\n", + "deriv_tensor (40, 81), indices tensor([3131, 214, 1566, ..., 751, 1838, 1917])\n", + "deriv_tensor (40, 81), indices tensor([3131, 214, 1566, ..., 751, 1838, 1917])\n", + "deriv_tensor (40, 81), indices tensor([2469, 1850, 1651, ..., 1360, 1743, 1492])\n", + "deriv_tensor (40, 81), indices tensor([2469, 1850, 1651, ..., 1360, 1743, 1492])\n", + "deriv_tensor (40, 81), indices tensor([2469, 1850, 1651, ..., 1360, 1743, 1492])\n", + "deriv_tensor (40, 81), indices tensor([2469, 1850, 1651, ..., 1360, 1743, 1492])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2545, 873, ..., 1626, 1217, 2256])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2545, 873, ..., 1626, 1217, 2256])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2545, 873, ..., 1626, 1217, 2256])\n", + "deriv_tensor (40, 81), indices tensor([ 708, 2545, 873, ..., 1626, 1217, 2256])\n", + "deriv_tensor (40, 81), indices tensor([2065, 1093, 2960, ..., 1317, 219, 1608])\n", + "deriv_tensor (40, 81), indices tensor([2065, 1093, 2960, ..., 1317, 219, 1608])\n", + "deriv_tensor (40, 81), indices tensor([2065, 1093, 2960, ..., 1317, 219, 1608])\n", + "deriv_tensor (40, 81), indices tensor([2065, 1093, 2960, ..., 1317, 219, 1608])\n", + "deriv_tensor (40, 81), indices tensor([ 164, 842, 837, ..., 1775, 2376, 590])\n", + "deriv_tensor (40, 81), indices tensor([ 164, 842, 837, ..., 1775, 2376, 590])\n", + "deriv_tensor (40, 81), indices tensor([ 164, 842, 837, ..., 1775, 2376, 590])\n", + "deriv_tensor (40, 81), indices tensor([ 164, 842, 837, ..., 1775, 2376, 590])\n", + "deriv_tensor (40, 81), indices tensor([1345, 2263, 2616, ..., 531, 1733, 2340])\n", + "deriv_tensor (40, 81), indices tensor([1345, 2263, 2616, ..., 531, 1733, 2340])\n", + "deriv_tensor (40, 81), indices tensor([1345, 2263, 2616, ..., 531, 1733, 2340])\n", + "deriv_tensor (40, 81), indices tensor([1345, 2263, 2616, ..., 531, 1733, 2340])\n", + "deriv_tensor (40, 81), indices tensor([ 325, 852, 1386, ..., 2163, 494, 3140])\n", + "deriv_tensor (40, 81), indices tensor([ 325, 852, 1386, ..., 2163, 494, 3140])\n", + "deriv_tensor (40, 81), indices tensor([ 325, 852, 1386, ..., 2163, 494, 3140])\n", + "deriv_tensor (40, 81), indices tensor([ 325, 852, 1386, ..., 2163, 494, 3140])\n", + "deriv_tensor (40, 81), indices tensor([ 383, 1075, 3236, ..., 1550, 1891, 2336])\n", + "deriv_tensor (40, 81), indices tensor([ 383, 1075, 3236, ..., 1550, 1891, 2336])\n", + "deriv_tensor (40, 81), indices tensor([ 383, 1075, 3236, ..., 1550, 1891, 2336])\n", + "deriv_tensor (40, 81), indices tensor([ 383, 1075, 3236, ..., 1550, 1891, 2336])\n", + "deriv_tensor (40, 81), indices tensor([ 404, 933, 1543, ..., 278, 609, 229])\n", + "deriv_tensor (40, 81), indices tensor([ 404, 933, 1543, ..., 278, 609, 229])\n", + "deriv_tensor (40, 81), indices tensor([ 404, 933, 1543, ..., 278, 609, 229])\n", + "deriv_tensor (40, 81), indices tensor([ 404, 933, 1543, ..., 278, 609, 229])\n", + "deriv_tensor (40, 81), indices tensor([1102, 1531, 3153, ..., 2437, 1252, 1681])\n", + "deriv_tensor (40, 81), indices tensor([1102, 1531, 3153, ..., 2437, 1252, 1681])\n", + "deriv_tensor (40, 81), indices tensor([1102, 1531, 3153, ..., 2437, 1252, 1681])\n", + "deriv_tensor (40, 81), indices tensor([1102, 1531, 3153, ..., 2437, 1252, 1681])\n", + "deriv_tensor (40, 81), indices tensor([ 809, 2286, 2653, ..., 2864, 368, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 809, 2286, 2653, ..., 2864, 368, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 809, 2286, 2653, ..., 2864, 368, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 809, 2286, 2653, ..., 2864, 368, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 848, 1830, ..., 271, 864, 2918])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 848, 1830, ..., 271, 864, 2918])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 848, 1830, ..., 271, 864, 2918])\n", + "deriv_tensor (40, 81), indices tensor([ 425, 848, 1830, ..., 271, 864, 2918])\n", + "deriv_tensor (40, 81), indices tensor([ 616, 2216, 3112, ..., 1154, 2021, 1263])\n", + "deriv_tensor (40, 81), indices tensor([ 616, 2216, 3112, ..., 1154, 2021, 1263])\n", + "deriv_tensor (40, 81), indices tensor([ 616, 2216, 3112, ..., 1154, 2021, 1263])\n", + "deriv_tensor (40, 81), indices tensor([ 616, 2216, 3112, ..., 1154, 2021, 1263])\n", + "deriv_tensor (40, 81), indices tensor([2253, 210, 3093, ..., 344, 2439, 412])\n", + "deriv_tensor (40, 81), indices tensor([2253, 210, 3093, ..., 344, 2439, 412])\n", + "deriv_tensor (40, 81), indices tensor([2253, 210, 3093, ..., 344, 2439, 412])\n", + "deriv_tensor (40, 81), indices tensor([2253, 210, 3093, ..., 344, 2439, 412])\n", + "deriv_tensor (40, 81), indices tensor([1583, 1637, 2644, ..., 496, 2307, 1964])\n", + "deriv_tensor (40, 81), indices tensor([1583, 1637, 2644, ..., 496, 2307, 1964])\n", + "deriv_tensor (40, 81), indices tensor([1583, 1637, 2644, ..., 496, 2307, 1964])\n", + "deriv_tensor (40, 81), indices tensor([1583, 1637, 2644, ..., 496, 2307, 1964])\n", + "deriv_tensor (40, 81), indices tensor([1549, 2634, 1072, ..., 435, 854, 1321])\n", + "deriv_tensor (40, 81), indices tensor([1549, 2634, 1072, ..., 435, 854, 1321])\n", + "deriv_tensor (40, 81), indices tensor([1549, 2634, 1072, ..., 435, 854, 1321])\n", + "deriv_tensor (40, 81), indices tensor([1549, 2634, 1072, ..., 435, 854, 1321])\n", + "deriv_tensor (40, 81), indices tensor([2146, 2048, 2941, ..., 1243, 2766, 2760])\n", + "deriv_tensor (40, 81), indices tensor([2146, 2048, 2941, ..., 1243, 2766, 2760])\n", + "deriv_tensor (40, 81), indices tensor([2146, 2048, 2941, ..., 1243, 2766, 2760])\n", + "deriv_tensor (40, 81), indices tensor([2146, 2048, 2941, ..., 1243, 2766, 2760])\n", + "deriv_tensor (40, 81), indices tensor([2399, 712, 271, ..., 1376, 451, 1108])\n", + "deriv_tensor (40, 81), indices tensor([2399, 712, 271, ..., 1376, 451, 1108])\n", + "deriv_tensor (40, 81), indices tensor([2399, 712, 271, ..., 1376, 451, 1108])\n", + "deriv_tensor (40, 81), indices tensor([2399, 712, 271, ..., 1376, 451, 1108])\n", + "deriv_tensor (40, 81), indices tensor([1921, 1311, 1165, ..., 2870, 1782, 2790])\n", + "deriv_tensor (40, 81), indices tensor([1921, 1311, 1165, ..., 2870, 1782, 2790])\n", + "deriv_tensor (40, 81), indices tensor([1921, 1311, 1165, ..., 2870, 1782, 2790])\n", + "deriv_tensor (40, 81), indices tensor([1921, 1311, 1165, ..., 2870, 1782, 2790])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1213, 1940, ..., 851, 1319, 2592])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1213, 1940, ..., 851, 1319, 2592])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1213, 1940, ..., 851, 1319, 2592])\n", + "deriv_tensor (40, 81), indices tensor([2546, 1213, 1940, ..., 851, 1319, 2592])\n", + "deriv_tensor (40, 81), indices tensor([1568, 267, 557, ..., 977, 1952, 1726])\n", + "deriv_tensor (40, 81), indices tensor([1568, 267, 557, ..., 977, 1952, 1726])\n", + "deriv_tensor (40, 81), indices tensor([1568, 267, 557, ..., 977, 1952, 1726])\n", + "deriv_tensor (40, 81), indices tensor([1568, 267, 557, ..., 977, 1952, 1726])\n", + "deriv_tensor (40, 81), indices tensor([2535, 1315, 538, ..., 804, 2096, 63])\n", + "deriv_tensor (40, 81), indices tensor([2535, 1315, 538, ..., 804, 2096, 63])\n", + "deriv_tensor (40, 81), indices tensor([2535, 1315, 538, ..., 804, 2096, 63])\n", + "deriv_tensor (40, 81), indices tensor([2535, 1315, 538, ..., 804, 2096, 63])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 444, 2262, ..., 245, 1879, 61])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 444, 2262, ..., 245, 1879, 61])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 444, 2262, ..., 245, 1879, 61])\n", + "deriv_tensor (40, 81), indices tensor([ 539, 444, 2262, ..., 245, 1879, 61])\n", + "deriv_tensor (40, 81), indices tensor([2849, 257, 2295, ..., 1353, 1862, 1660])\n", + "deriv_tensor (40, 81), indices tensor([2849, 257, 2295, ..., 1353, 1862, 1660])\n", + "deriv_tensor (40, 81), indices tensor([2849, 257, 2295, ..., 1353, 1862, 1660])\n", + "deriv_tensor (40, 81), indices tensor([2849, 257, 2295, ..., 1353, 1862, 1660])\n", + "deriv_tensor (40, 81), indices tensor([2561, 1729, 143, ..., 1256, 733, 3098])\n", + "deriv_tensor (40, 81), indices tensor([2561, 1729, 143, ..., 1256, 733, 3098])\n", + "deriv_tensor (40, 81), indices tensor([2561, 1729, 143, ..., 1256, 733, 3098])\n", + "deriv_tensor (40, 81), indices tensor([2561, 1729, 143, ..., 1256, 733, 3098])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 2201, 344, ..., 1057, 1631, 2811])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 2201, 344, ..., 1057, 1631, 2811])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 2201, 344, ..., 1057, 1631, 2811])\n", + "deriv_tensor (40, 81), indices tensor([ 800, 2201, 344, ..., 1057, 1631, 2811])\n", + "deriv_tensor (40, 81), indices tensor([ 288, 1916, 2503, ..., 597, 3075, 575])\n", + "deriv_tensor (40, 81), indices tensor([ 288, 1916, 2503, ..., 597, 3075, 575])\n", + "deriv_tensor (40, 81), indices tensor([ 288, 1916, 2503, ..., 597, 3075, 575])\n", + "deriv_tensor (40, 81), indices tensor([ 288, 1916, 2503, ..., 597, 3075, 575])\n", + "deriv_tensor (40, 81), indices tensor([1289, 1073, 2718, ..., 1341, 1989, 2565])\n", + "deriv_tensor (40, 81), indices tensor([1289, 1073, 2718, ..., 1341, 1989, 2565])\n", + "deriv_tensor (40, 81), indices tensor([1289, 1073, 2718, ..., 1341, 1989, 2565])\n", + "deriv_tensor (40, 81), indices tensor([1289, 1073, 2718, ..., 1341, 1989, 2565])\n", + "deriv_tensor (40, 81), indices tensor([2536, 957, 1547, ..., 844, 2425, 1324])\n", + "deriv_tensor (40, 81), indices tensor([2536, 957, 1547, ..., 844, 2425, 1324])\n", + "deriv_tensor (40, 81), indices tensor([2536, 957, 1547, ..., 844, 2425, 1324])\n", + "deriv_tensor (40, 81), indices tensor([2536, 957, 1547, ..., 844, 2425, 1324])\n", + "deriv_tensor (40, 81), indices tensor([1749, 1785, 989, ..., 632, 2900, 2204])\n", + "deriv_tensor (40, 81), indices tensor([1749, 1785, 989, ..., 632, 2900, 2204])\n", + "deriv_tensor (40, 81), indices tensor([1749, 1785, 989, ..., 632, 2900, 2204])\n", + "deriv_tensor (40, 81), indices tensor([1749, 1785, 989, ..., 632, 2900, 2204])\n", + "deriv_tensor (40, 81), indices tensor([1180, 3161, 1068, ..., 1526, 262, 451])\n", + "deriv_tensor (40, 81), indices tensor([1180, 3161, 1068, ..., 1526, 262, 451])\n", + "deriv_tensor (40, 81), indices tensor([1180, 3161, 1068, ..., 1526, 262, 451])\n", + "deriv_tensor (40, 81), indices tensor([1180, 3161, 1068, ..., 1526, 262, 451])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2956, 2165, ..., 1792, 1539, 1])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2956, 2165, ..., 1792, 1539, 1])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2956, 2165, ..., 1792, 1539, 1])\n", + "deriv_tensor (40, 81), indices tensor([ 194, 2956, 2165, ..., 1792, 1539, 1])\n", + "deriv_tensor (40, 81), indices tensor([2505, 3050, 774, ..., 2568, 2220, 3046])\n", + "deriv_tensor (40, 81), indices tensor([2505, 3050, 774, ..., 2568, 2220, 3046])\n", + "deriv_tensor (40, 81), indices tensor([2505, 3050, 774, ..., 2568, 2220, 3046])\n", + "deriv_tensor (40, 81), indices tensor([2505, 3050, 774, ..., 2568, 2220, 3046])\n", + "deriv_tensor (40, 81), indices tensor([2609, 2292, 2404, ..., 1325, 2369, 314])\n", + "deriv_tensor (40, 81), indices tensor([2609, 2292, 2404, ..., 1325, 2369, 314])\n", + "deriv_tensor (40, 81), indices tensor([2609, 2292, 2404, ..., 1325, 2369, 314])\n", + "deriv_tensor (40, 81), indices tensor([2609, 2292, 2404, ..., 1325, 2369, 314])\n", + "deriv_tensor (40, 81), indices tensor([ 153, 575, 309, ..., 2217, 2793, 1357])\n", + "deriv_tensor (40, 81), indices tensor([ 153, 575, 309, ..., 2217, 2793, 1357])\n", + "deriv_tensor (40, 81), indices tensor([ 153, 575, 309, ..., 2217, 2793, 1357])\n", + "deriv_tensor (40, 81), indices tensor([ 153, 575, 309, ..., 2217, 2793, 1357])\n", + "deriv_tensor (40, 81), indices tensor([2902, 2401, 2186, ..., 2890, 925, 1093])\n", + "deriv_tensor (40, 81), indices tensor([2902, 2401, 2186, ..., 2890, 925, 1093])\n", + "deriv_tensor (40, 81), indices tensor([2902, 2401, 2186, ..., 2890, 925, 1093])\n", + "deriv_tensor (40, 81), indices tensor([2902, 2401, 2186, ..., 2890, 925, 1093])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 3003, 266, ..., 834, 1185, 627])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 3003, 266, ..., 834, 1185, 627])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 3003, 266, ..., 834, 1185, 627])\n", + "deriv_tensor (40, 81), indices tensor([ 585, 3003, 266, ..., 834, 1185, 627])\n", + "deriv_tensor (40, 81), indices tensor([2558, 42, 3123, ..., 35, 495, 2377])\n", + "deriv_tensor (40, 81), indices tensor([2558, 42, 3123, ..., 35, 495, 2377])\n", + "deriv_tensor (40, 81), indices tensor([2558, 42, 3123, ..., 35, 495, 2377])\n", + "deriv_tensor (40, 81), indices tensor([2558, 42, 3123, ..., 35, 495, 2377])\n", + "deriv_tensor (40, 81), indices tensor([2719, 2724, 844, ..., 2140, 3168, 1183])\n", + "deriv_tensor (40, 81), indices tensor([2719, 2724, 844, ..., 2140, 3168, 1183])\n", + "deriv_tensor (40, 81), indices tensor([2719, 2724, 844, ..., 2140, 3168, 1183])\n", + "deriv_tensor (40, 81), indices tensor([2719, 2724, 844, ..., 2140, 3168, 1183])\n", + "deriv_tensor (40, 81), indices tensor([1498, 3179, 1136, ..., 2743, 758, 2561])\n", + "deriv_tensor (40, 81), indices tensor([1498, 3179, 1136, ..., 2743, 758, 2561])\n", + "deriv_tensor (40, 81), indices tensor([1498, 3179, 1136, ..., 2743, 758, 2561])\n", + "deriv_tensor (40, 81), indices tensor([1498, 3179, 1136, ..., 2743, 758, 2561])\n", + "deriv_tensor (40, 81), indices tensor([1818, 1349, 298, ..., 589, 2582, 724])\n", + "deriv_tensor (40, 81), indices tensor([1818, 1349, 298, ..., 589, 2582, 724])\n", + "deriv_tensor (40, 81), indices tensor([1818, 1349, 298, ..., 589, 2582, 724])\n", + "deriv_tensor (40, 81), indices tensor([1818, 1349, 298, ..., 589, 2582, 724])\n", + "deriv_tensor (40, 81), indices tensor([1560, 533, 58, ..., 2856, 848, 2411])\n", + "deriv_tensor (40, 81), indices tensor([1560, 533, 58, ..., 2856, 848, 2411])\n", + "deriv_tensor (40, 81), indices tensor([1560, 533, 58, ..., 2856, 848, 2411])\n", + "deriv_tensor (40, 81), indices tensor([1560, 533, 58, ..., 2856, 848, 2411])\n", + "deriv_tensor (40, 81), indices tensor([2961, 904, 1778, ..., 2159, 1567, 1287])\n", + "deriv_tensor (40, 81), indices tensor([2961, 904, 1778, ..., 2159, 1567, 1287])\n", + "deriv_tensor (40, 81), indices tensor([2961, 904, 1778, ..., 2159, 1567, 1287])\n", + "deriv_tensor (40, 81), indices tensor([2961, 904, 1778, ..., 2159, 1567, 1287])\n", + "deriv_tensor (40, 81), indices tensor([ 939, 1966, 449, ..., 95, 1043, 1018])\n", + "deriv_tensor (40, 81), indices tensor([ 939, 1966, 449, ..., 95, 1043, 1018])\n", + "deriv_tensor (40, 81), indices tensor([ 939, 1966, 449, ..., 95, 1043, 1018])\n", + "deriv_tensor (40, 81), indices tensor([ 939, 1966, 449, ..., 95, 1043, 1018])\n", + "deriv_tensor (40, 81), indices tensor([ 594, 2914, 2532, ..., 42, 393, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 594, 2914, 2532, ..., 42, 393, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 594, 2914, 2532, ..., 42, 393, 2758])\n", + "deriv_tensor (40, 81), indices tensor([ 594, 2914, 2532, ..., 42, 393, 2758])\n", + "deriv_tensor (40, 81), indices tensor([2859, 943, 2163, ..., 328, 253, 1541])\n", + "deriv_tensor (40, 81), indices tensor([2859, 943, 2163, ..., 328, 253, 1541])\n", + "deriv_tensor (40, 81), indices tensor([2859, 943, 2163, ..., 328, 253, 1541])\n", + "deriv_tensor (40, 81), indices tensor([2859, 943, 2163, ..., 328, 253, 1541])\n", + "deriv_tensor (40, 81), indices tensor([2978, 2431, 25, ..., 2465, 2336, 441])\n", + "deriv_tensor (40, 81), indices tensor([2978, 2431, 25, ..., 2465, 2336, 441])\n", + "deriv_tensor (40, 81), indices tensor([2978, 2431, 25, ..., 2465, 2336, 441])\n", + "deriv_tensor (40, 81), indices tensor([2978, 2431, 25, ..., 2465, 2336, 441])\n", + "deriv_tensor (40, 81), indices tensor([1205, 165, 179, ..., 3114, 3233, 1298])\n", + "deriv_tensor (40, 81), indices tensor([1205, 165, 179, ..., 3114, 3233, 1298])\n", + "deriv_tensor (40, 81), indices tensor([1205, 165, 179, ..., 3114, 3233, 1298])\n", + "deriv_tensor (40, 81), indices tensor([1205, 165, 179, ..., 3114, 3233, 1298])\n", + "deriv_tensor (40, 81), indices tensor([1491, 2106, 787, ..., 1839, 1647, 2808])\n", + "deriv_tensor (40, 81), indices tensor([1491, 2106, 787, ..., 1839, 1647, 2808])\n", + "deriv_tensor (40, 81), indices tensor([1491, 2106, 787, ..., 1839, 1647, 2808])\n", + "deriv_tensor (40, 81), indices tensor([1491, 2106, 787, ..., 1839, 1647, 2808])\n", + "deriv_tensor (40, 81), indices tensor([1670, 2125, 567, ..., 825, 2178, 2006])\n", + "deriv_tensor (40, 81), indices tensor([1670, 2125, 567, ..., 825, 2178, 2006])\n", + "deriv_tensor (40, 81), indices tensor([1670, 2125, 567, ..., 825, 2178, 2006])\n", + "deriv_tensor (40, 81), indices tensor([1670, 2125, 567, ..., 825, 2178, 2006])\n", + "deriv_tensor (40, 81), indices tensor([2472, 1767, 2954, ..., 1961, 2893, 984])\n", + "deriv_tensor (40, 81), indices tensor([2472, 1767, 2954, ..., 1961, 2893, 984])\n", + "deriv_tensor (40, 81), indices tensor([2472, 1767, 2954, ..., 1961, 2893, 984])\n", + "deriv_tensor (40, 81), indices tensor([2472, 1767, 2954, ..., 1961, 2893, 984])\n", + "deriv_tensor (40, 81), indices tensor([2419, 2798, 2778, ..., 949, 2535, 1370])\n", + "deriv_tensor (40, 81), indices tensor([2419, 2798, 2778, ..., 949, 2535, 1370])\n", + "deriv_tensor (40, 81), indices tensor([2419, 2798, 2778, ..., 949, 2535, 1370])\n", + "deriv_tensor (40, 81), indices tensor([2419, 2798, 2778, ..., 949, 2535, 1370])\n", + "deriv_tensor (40, 81), indices tensor([2469, 670, 1641, ..., 2294, 451, 2553])\n", + "deriv_tensor (40, 81), indices tensor([2469, 670, 1641, ..., 2294, 451, 2553])\n", + "deriv_tensor (40, 81), indices tensor([2469, 670, 1641, ..., 2294, 451, 2553])\n", + "deriv_tensor (40, 81), indices tensor([2469, 670, 1641, ..., 2294, 451, 2553])\n", + "deriv_tensor (40, 81), indices tensor([1170, 130, 1703, ..., 1758, 2267, 1946])\n", + "deriv_tensor (40, 81), indices tensor([1170, 130, 1703, ..., 1758, 2267, 1946])\n", + "deriv_tensor (40, 81), indices tensor([1170, 130, 1703, ..., 1758, 2267, 1946])\n", + "deriv_tensor (40, 81), indices tensor([1170, 130, 1703, ..., 1758, 2267, 1946])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 260, 2591, ..., 3211, 2062, 2164])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 260, 2591, ..., 3211, 2062, 2164])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 260, 2591, ..., 3211, 2062, 2164])\n", + "deriv_tensor (40, 81), indices tensor([ 527, 260, 2591, ..., 3211, 2062, 2164])\n", + "deriv_tensor (40, 81), indices tensor([2144, 1957, 2646, ..., 2570, 1886, 962])\n", + "deriv_tensor (40, 81), indices tensor([2144, 1957, 2646, ..., 2570, 1886, 962])\n", + "deriv_tensor (40, 81), indices tensor([2144, 1957, 2646, ..., 2570, 1886, 962])\n", + "deriv_tensor (40, 81), indices tensor([2144, 1957, 2646, ..., 2570, 1886, 962])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 114, 2733, ..., 3020, 656, 231])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 114, 2733, ..., 3020, 656, 231])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 114, 2733, ..., 3020, 656, 231])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 114, 2733, ..., 3020, 656, 231])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 2737, 2816, ..., 446, 628, 1339])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 2737, 2816, ..., 446, 628, 1339])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 2737, 2816, ..., 446, 628, 1339])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 2737, 2816, ..., 446, 628, 1339])\n", + "deriv_tensor (40, 81), indices tensor([3234, 454, 744, ..., 746, 3162, 2707])\n", + "deriv_tensor (40, 81), indices tensor([3234, 454, 744, ..., 746, 3162, 2707])\n", + "deriv_tensor (40, 81), indices tensor([3234, 454, 744, ..., 746, 3162, 2707])\n", + "deriv_tensor (40, 81), indices tensor([3234, 454, 744, ..., 746, 3162, 2707])\n", + "deriv_tensor (40, 81), indices tensor([1815, 160, 2033, ..., 1763, 3051, 3037])\n", + "deriv_tensor (40, 81), indices tensor([1815, 160, 2033, ..., 1763, 3051, 3037])\n", + "deriv_tensor (40, 81), indices tensor([1815, 160, 2033, ..., 1763, 3051, 3037])\n", + "deriv_tensor (40, 81), indices tensor([1815, 160, 2033, ..., 1763, 3051, 3037])\n", + "deriv_tensor (40, 81), indices tensor([3014, 740, 2882, ..., 1731, 242, 1185])\n", + "deriv_tensor (40, 81), indices tensor([3014, 740, 2882, ..., 1731, 242, 1185])\n", + "deriv_tensor (40, 81), indices tensor([3014, 740, 2882, ..., 1731, 242, 1185])\n", + "deriv_tensor (40, 81), indices tensor([3014, 740, 2882, ..., 1731, 242, 1185])\n", + "deriv_tensor (40, 81), indices tensor([2609, 1782, 1330, ..., 2892, 1785, 2194])\n", + "deriv_tensor (40, 81), indices tensor([2609, 1782, 1330, ..., 2892, 1785, 2194])\n", + "deriv_tensor (40, 81), indices tensor([2609, 1782, 1330, ..., 2892, 1785, 2194])\n", + "deriv_tensor (40, 81), indices tensor([2609, 1782, 1330, ..., 2892, 1785, 2194])\n", + "deriv_tensor (40, 81), indices tensor([3030, 2289, 2084, ..., 305, 861, 3070])\n", + "deriv_tensor (40, 81), indices tensor([3030, 2289, 2084, ..., 305, 861, 3070])\n", + "deriv_tensor (40, 81), indices tensor([3030, 2289, 2084, ..., 305, 861, 3070])\n", + "deriv_tensor (40, 81), indices tensor([3030, 2289, 2084, ..., 305, 861, 3070])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 2623, 2432, ..., 2598, 182, 1209])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 2623, 2432, ..., 2598, 182, 1209])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 2623, 2432, ..., 2598, 182, 1209])\n", + "deriv_tensor (40, 81), indices tensor([ 519, 2623, 2432, ..., 2598, 182, 1209])\n", + "deriv_tensor (40, 81), indices tensor([ 177, 611, 2552, ..., 1395, 2983, 1861])\n", + "deriv_tensor (40, 81), indices tensor([ 177, 611, 2552, ..., 1395, 2983, 1861])\n", + "deriv_tensor (40, 81), indices tensor([ 177, 611, 2552, ..., 1395, 2983, 1861])\n", + "deriv_tensor (40, 81), indices tensor([ 177, 611, 2552, ..., 1395, 2983, 1861])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1055, 550, ..., 3154, 879, 1730])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1055, 550, ..., 3154, 879, 1730])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1055, 550, ..., 3154, 879, 1730])\n", + "deriv_tensor (40, 81), indices tensor([2758, 1055, 550, ..., 3154, 879, 1730])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 114, 2630, ..., 1962, 2730, 2393])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 114, 2630, ..., 1962, 2730, 2393])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 114, 2630, ..., 1962, 2730, 2393])\n", + "deriv_tensor (40, 81), indices tensor([ 656, 114, 2630, ..., 1962, 2730, 2393])\n", + "deriv_tensor (40, 81), indices tensor([ 348, 199, 50, ..., 400, 1073, 2936])\n", + "deriv_tensor (40, 81), indices tensor([ 348, 199, 50, ..., 400, 1073, 2936])\n", + "deriv_tensor (40, 81), indices tensor([ 348, 199, 50, ..., 400, 1073, 2936])\n", + "deriv_tensor (40, 81), indices tensor([ 348, 199, 50, ..., 400, 1073, 2936])\n", + "deriv_tensor (40, 81), indices tensor([1945, 271, 2740, ..., 2704, 501, 592])\n", + "deriv_tensor (40, 81), indices tensor([1945, 271, 2740, ..., 2704, 501, 592])\n", + "deriv_tensor (40, 81), indices tensor([1945, 271, 2740, ..., 2704, 501, 592])\n", + "deriv_tensor (40, 81), indices tensor([1945, 271, 2740, ..., 2704, 501, 592])\n", + "deriv_tensor (40, 81), indices tensor([1582, 2856, 1370, ..., 2055, 785, 669])\n", + "deriv_tensor (40, 81), indices tensor([1582, 2856, 1370, ..., 2055, 785, 669])\n", + "deriv_tensor (40, 81), indices tensor([1582, 2856, 1370, ..., 2055, 785, 669])\n", + "deriv_tensor (40, 81), indices tensor([1582, 2856, 1370, ..., 2055, 785, 669])\n", + "deriv_tensor (40, 81), indices tensor([ 224, 427, 2817, ..., 1273, 2207, 16])\n", + "deriv_tensor (40, 81), indices tensor([ 224, 427, 2817, ..., 1273, 2207, 16])\n", + "deriv_tensor (40, 81), indices tensor([ 224, 427, 2817, ..., 1273, 2207, 16])\n", + "deriv_tensor (40, 81), indices tensor([ 224, 427, 2817, ..., 1273, 2207, 16])\n", + "deriv_tensor (40, 81), indices tensor([2671, 451, 677, ..., 2466, 2133, 2398])\n", + "deriv_tensor (40, 81), indices tensor([2671, 451, 677, ..., 2466, 2133, 2398])\n", + "deriv_tensor (40, 81), indices tensor([2671, 451, 677, ..., 2466, 2133, 2398])\n", + "deriv_tensor (40, 81), indices tensor([2671, 451, 677, ..., 2466, 2133, 2398])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 1940, 906, ..., 2569, 1033, 2503])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 1940, 906, ..., 2569, 1033, 2503])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 1940, 906, ..., 2569, 1033, 2503])\n", + "deriv_tensor (40, 81), indices tensor([ 472, 1940, 906, ..., 2569, 1033, 2503])\n", + "deriv_tensor (40, 81), indices tensor([ 7, 3077, 1342, ..., 1813, 3045, 1365])\n", + "deriv_tensor (40, 81), indices tensor([ 7, 3077, 1342, ..., 1813, 3045, 1365])\n", + "deriv_tensor (40, 81), indices tensor([ 7, 3077, 1342, ..., 1813, 3045, 1365])\n", + "deriv_tensor (40, 81), indices tensor([ 7, 3077, 1342, ..., 1813, 3045, 1365])\n", + "deriv_tensor (40, 81), indices tensor([ 766, 2543, 317, ..., 2988, 2660, 3221])\n", + "deriv_tensor (40, 81), indices tensor([ 766, 2543, 317, ..., 2988, 2660, 3221])\n", + "deriv_tensor (40, 81), indices tensor([ 766, 2543, 317, ..., 2988, 2660, 3221])\n", + "deriv_tensor (40, 81), indices tensor([ 766, 2543, 317, ..., 2988, 2660, 3221])\n", + "deriv_tensor (40, 81), indices tensor([ 965, 3121, 2530, ..., 2392, 316, 22])\n", + "deriv_tensor (40, 81), indices tensor([ 965, 3121, 2530, ..., 2392, 316, 22])\n", + "deriv_tensor (40, 81), indices tensor([ 965, 3121, 2530, ..., 2392, 316, 22])\n", + "deriv_tensor (40, 81), indices tensor([ 965, 3121, 2530, ..., 2392, 316, 22])\n", + "deriv_tensor (40, 81), indices tensor([1850, 150, 3225, ..., 1605, 1189, 2437])\n", + "deriv_tensor (40, 81), indices tensor([1850, 150, 3225, ..., 1605, 1189, 2437])\n", + "deriv_tensor (40, 81), indices tensor([1850, 150, 3225, ..., 1605, 1189, 2437])\n", + "deriv_tensor (40, 81), indices tensor([1850, 150, 3225, ..., 1605, 1189, 2437])\n", + "deriv_tensor (40, 81), indices tensor([2813, 2946, 1006, ..., 1716, 1727, 918])\n", + "deriv_tensor (40, 81), indices tensor([2813, 2946, 1006, ..., 1716, 1727, 918])\n", + "deriv_tensor (40, 81), indices tensor([2813, 2946, 1006, ..., 1716, 1727, 918])\n", + "deriv_tensor (40, 81), indices tensor([2813, 2946, 1006, ..., 1716, 1727, 918])\n", + "deriv_tensor (40, 81), indices tensor([ 654, 203, 726, ..., 684, 2093, 1154])\n", + "deriv_tensor (40, 81), indices tensor([ 654, 203, 726, ..., 684, 2093, 1154])\n", + "deriv_tensor (40, 81), indices tensor([ 654, 203, 726, ..., 684, 2093, 1154])\n", + "deriv_tensor (40, 81), indices tensor([ 654, 203, 726, ..., 684, 2093, 1154])\n", + "deriv_tensor (40, 81), indices tensor([1254, 2841, 2155, ..., 2593, 875, 1226])\n", + "deriv_tensor (40, 81), indices tensor([1254, 2841, 2155, ..., 2593, 875, 1226])\n", + "deriv_tensor (40, 81), indices tensor([1254, 2841, 2155, ..., 2593, 875, 1226])\n", + "deriv_tensor (40, 81), indices tensor([1254, 2841, 2155, ..., 2593, 875, 1226])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 13, 129, ..., 569, 1253, 1202])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 13, 129, ..., 569, 1253, 1202])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 13, 129, ..., 569, 1253, 1202])\n", + "deriv_tensor (40, 81), indices tensor([ 373, 13, 129, ..., 569, 1253, 1202])\n", + "deriv_tensor (40, 81), indices tensor([1569, 463, 2247, ..., 3059, 2171, 560])\n", + "deriv_tensor (40, 81), indices tensor([1569, 463, 2247, ..., 3059, 2171, 560])\n", + "deriv_tensor (40, 81), indices tensor([1569, 463, 2247, ..., 3059, 2171, 560])\n", + "deriv_tensor (40, 81), indices tensor([1569, 463, 2247, ..., 3059, 2171, 560])\n", + "deriv_tensor (40, 81), indices tensor([ 773, 240, 3148, ..., 64, 3195, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 773, 240, 3148, ..., 64, 3195, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 773, 240, 3148, ..., 64, 3195, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 773, 240, 3148, ..., 64, 3195, 904])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 3176, 89, ..., 1089, 2236, 1940])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 3176, 89, ..., 1089, 2236, 1940])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 3176, 89, ..., 1089, 2236, 1940])\n", + "deriv_tensor (40, 81), indices tensor([ 104, 3176, 89, ..., 1089, 2236, 1940])\n", + "deriv_tensor (40, 81), indices tensor([1105, 961, 1515, ..., 2465, 2898, 501])\n", + "deriv_tensor (40, 81), indices tensor([1105, 961, 1515, ..., 2465, 2898, 501])\n", + "deriv_tensor (40, 81), indices tensor([1105, 961, 1515, ..., 2465, 2898, 501])\n", + "deriv_tensor (40, 81), indices tensor([1105, 961, 1515, ..., 2465, 2898, 501])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1948, 1910, ..., 1229, 2430, 2081])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1948, 1910, ..., 1229, 2430, 2081])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1948, 1910, ..., 1229, 2430, 2081])\n", + "deriv_tensor (40, 81), indices tensor([1151, 1948, 1910, ..., 1229, 2430, 2081])\n", + "deriv_tensor (40, 81), indices tensor([ 40, 692, 1994, ..., 774, 3095, 292])\n", + "deriv_tensor (40, 81), indices tensor([ 40, 692, 1994, ..., 774, 3095, 292])\n", + "deriv_tensor (40, 81), indices tensor([ 40, 692, 1994, ..., 774, 3095, 292])\n", + "deriv_tensor (40, 81), indices tensor([ 40, 692, 1994, ..., 774, 3095, 292])\n", + "deriv_tensor (40, 81), indices tensor([1534, 2839, 709, ..., 2744, 741, 583])\n", + "deriv_tensor (40, 81), indices tensor([1534, 2839, 709, ..., 2744, 741, 583])\n", + "deriv_tensor (40, 81), indices tensor([1534, 2839, 709, ..., 2744, 741, 583])\n", + "deriv_tensor (40, 81), indices tensor([1534, 2839, 709, ..., 2744, 741, 583])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1463, 1519, ..., 115, 1441, 985])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1463, 1519, ..., 115, 1441, 985])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1463, 1519, ..., 115, 1441, 985])\n", + "deriv_tensor (40, 81), indices tensor([ 158, 1463, 1519, ..., 115, 1441, 985])\n", + "deriv_tensor (40, 81), indices tensor([2243, 167, 2977, ..., 326, 2189, 2775])\n", + "deriv_tensor (40, 81), indices tensor([2243, 167, 2977, ..., 326, 2189, 2775])\n", + "deriv_tensor (40, 81), indices tensor([2243, 167, 2977, ..., 326, 2189, 2775])\n", + "deriv_tensor (40, 81), indices tensor([2243, 167, 2977, ..., 326, 2189, 2775])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1519, 2273, ..., 2855, 478, 324])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1519, 2273, ..., 2855, 478, 324])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1519, 2273, ..., 2855, 478, 324])\n", + "deriv_tensor (40, 81), indices tensor([ 779, 1519, 2273, ..., 2855, 478, 324])\n", + "deriv_tensor (40, 81), indices tensor([1748, 191, 216, ..., 261, 15, 1072])\n", + "deriv_tensor (40, 81), indices tensor([1748, 191, 216, ..., 261, 15, 1072])\n", + "deriv_tensor (40, 81), indices tensor([1748, 191, 216, ..., 261, 15, 1072])\n", + "deriv_tensor (40, 81), indices tensor([1748, 191, 216, ..., 261, 15, 1072])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 99, 208, ..., 1643, 2049, 1278])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 99, 208, ..., 1643, 2049, 1278])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 99, 208, ..., 1643, 2049, 1278])\n", + "deriv_tensor (40, 81), indices tensor([ 967, 99, 208, ..., 1643, 2049, 1278])\n", + "deriv_tensor (40, 81), indices tensor([3058, 581, 2468, ..., 1600, 2030, 1246])\n", + "deriv_tensor (40, 81), indices tensor([3058, 581, 2468, ..., 1600, 2030, 1246])\n", + "deriv_tensor (40, 81), indices tensor([3058, 581, 2468, ..., 1600, 2030, 1246])\n", + "deriv_tensor (40, 81), indices tensor([3058, 581, 2468, ..., 1600, 2030, 1246])\n", + "deriv_tensor (40, 81), indices tensor([2616, 1744, 1694, ..., 668, 721, 1272])\n", + "deriv_tensor (40, 81), indices tensor([2616, 1744, 1694, ..., 668, 721, 1272])\n", + "deriv_tensor (40, 81), indices tensor([2616, 1744, 1694, ..., 668, 721, 1272])\n", + "deriv_tensor (40, 81), indices tensor([2616, 1744, 1694, ..., 668, 721, 1272])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 20, 1984, ..., 3101, 874, 2858])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 20, 1984, ..., 3101, 874, 2858])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 20, 1984, ..., 3101, 874, 2858])\n", + "deriv_tensor (40, 81), indices tensor([ 717, 20, 1984, ..., 3101, 874, 2858])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2964, 2738, ..., 141, 2100, 1557])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2964, 2738, ..., 141, 2100, 1557])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2964, 2738, ..., 141, 2100, 1557])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2964, 2738, ..., 141, 2100, 1557])\n", + "deriv_tensor (40, 81), indices tensor([ 112, 75, 1035, ..., 2903, 2400, 1823])\n", + "deriv_tensor (40, 81), indices tensor([ 112, 75, 1035, ..., 2903, 2400, 1823])\n", + "deriv_tensor (40, 81), indices tensor([ 112, 75, 1035, ..., 2903, 2400, 1823])\n", + "deriv_tensor (40, 81), indices tensor([ 112, 75, 1035, ..., 2903, 2400, 1823])\n", + "deriv_tensor (40, 81), indices tensor([1703, 189, 1472, ..., 907, 1255, 846])\n", + "deriv_tensor (40, 81), indices tensor([1703, 189, 1472, ..., 907, 1255, 846])\n", + "deriv_tensor (40, 81), indices tensor([1703, 189, 1472, ..., 907, 1255, 846])\n", + "deriv_tensor (40, 81), indices tensor([1703, 189, 1472, ..., 907, 1255, 846])\n", + "deriv_tensor (40, 81), indices tensor([ 234, 1214, 600, ..., 3037, 723, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 234, 1214, 600, ..., 3037, 723, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 234, 1214, 600, ..., 3037, 723, 145])\n", + "deriv_tensor (40, 81), indices tensor([ 234, 1214, 600, ..., 3037, 723, 145])\n", + "deriv_tensor (40, 81), indices tensor([2201, 421, 1778, ..., 2016, 1861, 898])\n", + "deriv_tensor (40, 81), indices tensor([2201, 421, 1778, ..., 2016, 1861, 898])\n", + "deriv_tensor (40, 81), indices tensor([2201, 421, 1778, ..., 2016, 1861, 898])\n", + "deriv_tensor (40, 81), indices tensor([2201, 421, 1778, ..., 2016, 1861, 898])\n", + "deriv_tensor (40, 81), indices tensor([3052, 435, 132, ..., 2419, 463, 2716])\n", + "deriv_tensor (40, 81), indices tensor([3052, 435, 132, ..., 2419, 463, 2716])\n", + "deriv_tensor (40, 81), indices tensor([3052, 435, 132, ..., 2419, 463, 2716])\n", + "deriv_tensor (40, 81), indices tensor([3052, 435, 132, ..., 2419, 463, 2716])\n", + "deriv_tensor (40, 81), indices tensor([2054, 2296, 3176, ..., 230, 2578, 409])\n", + "deriv_tensor (40, 81), indices tensor([2054, 2296, 3176, ..., 230, 2578, 409])\n", + "deriv_tensor (40, 81), indices tensor([2054, 2296, 3176, ..., 230, 2578, 409])\n", + "deriv_tensor (40, 81), indices tensor([2054, 2296, 3176, ..., 230, 2578, 409])\n", + "deriv_tensor (40, 81), indices tensor([1705, 492, 988, ..., 1076, 2229, 1058])\n", + "deriv_tensor (40, 81), indices tensor([1705, 492, 988, ..., 1076, 2229, 1058])\n", + "deriv_tensor (40, 81), indices tensor([1705, 492, 988, ..., 1076, 2229, 1058])\n", + "deriv_tensor (40, 81), indices tensor([1705, 492, 988, ..., 1076, 2229, 1058])\n", + "deriv_tensor (40, 81), indices tensor([1236, 2176, 2758, ..., 814, 2419, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1236, 2176, 2758, ..., 814, 2419, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1236, 2176, 2758, ..., 814, 2419, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1236, 2176, 2758, ..., 814, 2419, 3001])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2934, 1497, ..., 2662, 722, 800])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2934, 1497, ..., 2662, 722, 800])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2934, 1497, ..., 2662, 722, 800])\n", + "deriv_tensor (40, 81), indices tensor([1677, 2934, 1497, ..., 2662, 722, 800])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2347, 1696, ..., 8, 2789, 2251])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2347, 1696, ..., 8, 2789, 2251])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2347, 1696, ..., 8, 2789, 2251])\n", + "deriv_tensor (40, 81), indices tensor([1629, 2347, 1696, ..., 8, 2789, 2251])\n", + "deriv_tensor (40, 81), indices tensor([ 885, 849, 1709, ..., 1110, 990, 1484])\n", + "deriv_tensor (40, 81), indices tensor([ 885, 849, 1709, ..., 1110, 990, 1484])\n", + "deriv_tensor (40, 81), indices tensor([ 885, 849, 1709, ..., 1110, 990, 1484])\n", + "deriv_tensor (40, 81), indices tensor([ 885, 849, 1709, ..., 1110, 990, 1484])\n", + "deriv_tensor (40, 81), indices tensor([3000, 2564, 1955, ..., 1480, 1791, 889])\n", + "deriv_tensor (40, 81), indices tensor([3000, 2564, 1955, ..., 1480, 1791, 889])\n", + "deriv_tensor (40, 81), indices tensor([3000, 2564, 1955, ..., 1480, 1791, 889])\n", + "deriv_tensor (40, 81), indices tensor([3000, 2564, 1955, ..., 1480, 1791, 889])\n", + "deriv_tensor (40, 81), indices tensor([1842, 1562, 263, ..., 1574, 272, 2483])\n", + "deriv_tensor (40, 81), indices tensor([1842, 1562, 263, ..., 1574, 272, 2483])\n", + "deriv_tensor (40, 81), indices tensor([1842, 1562, 263, ..., 1574, 272, 2483])\n", + "deriv_tensor (40, 81), indices tensor([1842, 1562, 263, ..., 1574, 272, 2483])\n", + "deriv_tensor (40, 81), indices tensor([2059, 877, 53, ..., 35, 1381, 2570])\n", + "deriv_tensor (40, 81), indices tensor([2059, 877, 53, ..., 35, 1381, 2570])\n", + "deriv_tensor (40, 81), indices tensor([2059, 877, 53, ..., 35, 1381, 2570])\n", + "deriv_tensor (40, 81), indices tensor([2059, 877, 53, ..., 35, 1381, 2570])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 2169, 387, ..., 2960, 2367, 554])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 2169, 387, ..., 2960, 2367, 554])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 2169, 387, ..., 2960, 2367, 554])\n", + "deriv_tensor (40, 81), indices tensor([ 10, 2169, 387, ..., 2960, 2367, 554])\n", + "deriv_tensor (40, 81), indices tensor([1653, 389, 2721, ..., 965, 2860, 2664])\n", + "deriv_tensor (40, 81), indices tensor([1653, 389, 2721, ..., 965, 2860, 2664])\n", + "deriv_tensor (40, 81), indices tensor([1653, 389, 2721, ..., 965, 2860, 2664])\n", + "deriv_tensor (40, 81), indices tensor([1653, 389, 2721, ..., 965, 2860, 2664])\n", + "deriv_tensor (40, 81), indices tensor([2737, 3218, 432, ..., 1007, 2386, 89])\n", + "deriv_tensor (40, 81), indices tensor([2737, 3218, 432, ..., 1007, 2386, 89])\n", + "deriv_tensor (40, 81), indices tensor([2737, 3218, 432, ..., 1007, 2386, 89])\n", + "deriv_tensor (40, 81), indices tensor([2737, 3218, 432, ..., 1007, 2386, 89])\n", + "deriv_tensor (40, 81), indices tensor([1092, 2916, 2646, ..., 1072, 2574, 2596])\n", + "deriv_tensor (40, 81), indices tensor([1092, 2916, 2646, ..., 1072, 2574, 2596])\n", + "deriv_tensor (40, 81), indices tensor([1092, 2916, 2646, ..., 1072, 2574, 2596])\n", + "deriv_tensor (40, 81), indices tensor([1092, 2916, 2646, ..., 1072, 2574, 2596])\n", + "deriv_tensor (40, 81), indices tensor([ 87, 1300, 413, ..., 690, 1641, 1266])\n", + "deriv_tensor (40, 81), indices tensor([ 87, 1300, 413, ..., 690, 1641, 1266])\n", + "deriv_tensor (40, 81), indices tensor([ 87, 1300, 413, ..., 690, 1641, 1266])\n", + "deriv_tensor (40, 81), indices tensor([ 87, 1300, 413, ..., 690, 1641, 1266])\n", + "deriv_tensor (40, 81), indices tensor([2502, 2254, 685, ..., 1035, 1631, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2502, 2254, 685, ..., 1035, 1631, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2502, 2254, 685, ..., 1035, 1631, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2502, 2254, 685, ..., 1035, 1631, 1228])\n", + "deriv_tensor (40, 81), indices tensor([2578, 2976, 2649, ..., 2086, 997, 751])\n", + "deriv_tensor (40, 81), indices tensor([2578, 2976, 2649, ..., 2086, 997, 751])\n", + "deriv_tensor (40, 81), indices tensor([2578, 2976, 2649, ..., 2086, 997, 751])\n", + "deriv_tensor (40, 81), indices tensor([2578, 2976, 2649, ..., 2086, 997, 751])\n", + "deriv_tensor (40, 81), indices tensor([2235, 2179, 2969, ..., 1616, 2268, 2229])\n", + "deriv_tensor (40, 81), indices tensor([2235, 2179, 2969, ..., 1616, 2268, 2229])\n", + "deriv_tensor (40, 81), indices tensor([2235, 2179, 2969, ..., 1616, 2268, 2229])\n", + "deriv_tensor (40, 81), indices tensor([2235, 2179, 2969, ..., 1616, 2268, 2229])\n", + "deriv_tensor (40, 81), indices tensor([ 60, 2198, 3214, ..., 1988, 2499, 2608])\n", + "deriv_tensor (40, 81), indices tensor([ 60, 2198, 3214, ..., 1988, 2499, 2608])\n", + "deriv_tensor (40, 81), indices tensor([ 60, 2198, 3214, ..., 1988, 2499, 2608])\n", + "deriv_tensor (40, 81), indices tensor([ 60, 2198, 3214, ..., 1988, 2499, 2608])\n", + "deriv_tensor (40, 81), indices tensor([ 226, 1143, 1851, ..., 381, 2163, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 226, 1143, 1851, ..., 381, 2163, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 226, 1143, 1851, ..., 381, 2163, 2668])\n", + "deriv_tensor (40, 81), indices tensor([ 226, 1143, 1851, ..., 381, 2163, 2668])\n", + "deriv_tensor (40, 81), indices tensor([2095, 2122, 1577, ..., 976, 1616, 1470])\n", + "deriv_tensor (40, 81), indices tensor([2095, 2122, 1577, ..., 976, 1616, 1470])\n", + "deriv_tensor (40, 81), indices tensor([2095, 2122, 1577, ..., 976, 1616, 1470])\n", + "deriv_tensor (40, 81), indices tensor([2095, 2122, 1577, ..., 976, 1616, 1470])\n", + "deriv_tensor (40, 81), indices tensor([2811, 1926, 2211, ..., 1618, 486, 2410])\n", + "deriv_tensor (40, 81), indices tensor([2811, 1926, 2211, ..., 1618, 486, 2410])\n", + "deriv_tensor (40, 81), indices tensor([2811, 1926, 2211, ..., 1618, 486, 2410])\n", + "deriv_tensor (40, 81), indices tensor([2811, 1926, 2211, ..., 1618, 486, 2410])\n", + "deriv_tensor (40, 81), indices tensor([2448, 214, 114, ..., 3014, 1252, 2162])\n", + "deriv_tensor (40, 81), indices tensor([2448, 214, 114, ..., 3014, 1252, 2162])\n", + "deriv_tensor (40, 81), indices tensor([2448, 214, 114, ..., 3014, 1252, 2162])\n", + "deriv_tensor (40, 81), indices tensor([2448, 214, 114, ..., 3014, 1252, 2162])\n", + "deriv_tensor (40, 81), indices tensor([ 850, 383, 730, ..., 2479, 2918, 3134])\n", + "deriv_tensor (40, 81), indices tensor([ 850, 383, 730, ..., 2479, 2918, 3134])\n", + "deriv_tensor (40, 81), indices tensor([ 850, 383, 730, ..., 2479, 2918, 3134])\n", + "deriv_tensor (40, 81), indices tensor([ 850, 383, 730, ..., 2479, 2918, 3134])\n", + "deriv_tensor (40, 81), indices tensor([2233, 346, 2666, ..., 2680, 1128, 1136])\n", + "deriv_tensor (40, 81), indices tensor([2233, 346, 2666, ..., 2680, 1128, 1136])\n", + "deriv_tensor (40, 81), indices tensor([2233, 346, 2666, ..., 2680, 1128, 1136])\n", + "deriv_tensor (40, 81), indices tensor([2233, 346, 2666, ..., 2680, 1128, 1136])\n", + "deriv_tensor (40, 81), indices tensor([1710, 1199, 2699, ..., 1586, 3044, 1589])\n", + "deriv_tensor (40, 81), indices tensor([1710, 1199, 2699, ..., 1586, 3044, 1589])\n", + "deriv_tensor (40, 81), indices tensor([1710, 1199, 2699, ..., 1586, 3044, 1589])\n", + "deriv_tensor (40, 81), indices tensor([1710, 1199, 2699, ..., 1586, 3044, 1589])\n", + "deriv_tensor (40, 81), indices tensor([ 141, 1038, 741, ..., 205, 1079, 72])\n", + "deriv_tensor (40, 81), indices tensor([ 141, 1038, 741, ..., 205, 1079, 72])\n", + "deriv_tensor (40, 81), indices tensor([ 141, 1038, 741, ..., 205, 1079, 72])\n", + "deriv_tensor (40, 81), indices tensor([ 141, 1038, 741, ..., 205, 1079, 72])\n", + "deriv_tensor (40, 81), indices tensor([2070, 722, 1739, ..., 3177, 1321, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2070, 722, 1739, ..., 3177, 1321, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2070, 722, 1739, ..., 3177, 1321, 2618])\n", + "deriv_tensor (40, 81), indices tensor([2070, 722, 1739, ..., 3177, 1321, 2618])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 933, 2560, ..., 500, 2089, 239])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 933, 2560, ..., 500, 2089, 239])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 933, 2560, ..., 500, 2089, 239])\n", + "deriv_tensor (40, 81), indices tensor([ 414, 933, 2560, ..., 500, 2089, 239])\n", + "deriv_tensor (40, 81), indices tensor([2322, 1634, 234, ..., 1255, 1056, 3029])\n", + "deriv_tensor (40, 81), indices tensor([2322, 1634, 234, ..., 1255, 1056, 3029])\n", + "deriv_tensor (40, 81), indices tensor([2322, 1634, 234, ..., 1255, 1056, 3029])\n", + "deriv_tensor (40, 81), indices tensor([2322, 1634, 234, ..., 1255, 1056, 3029])\n", + "deriv_tensor (40, 81), indices tensor([ 263, 1053, 1468, ..., 1914, 252, 292])\n", + "deriv_tensor (40, 81), indices tensor([ 263, 1053, 1468, ..., 1914, 252, 292])\n", + "deriv_tensor (40, 81), indices tensor([ 263, 1053, 1468, ..., 1914, 252, 292])\n", + "deriv_tensor (40, 81), indices tensor([ 263, 1053, 1468, ..., 1914, 252, 292])\n", + "deriv_tensor (40, 81), indices tensor([2218, 173, 2025, ..., 2558, 250, 1017])\n", + "deriv_tensor (40, 81), indices tensor([2218, 173, 2025, ..., 2558, 250, 1017])\n", + "deriv_tensor (40, 81), indices tensor([2218, 173, 2025, ..., 2558, 250, 1017])\n", + "deriv_tensor (40, 81), indices tensor([2218, 173, 2025, ..., 2558, 250, 1017])\n", + "deriv_tensor (40, 81), indices tensor([1033, 1685, 2055, ..., 3143, 914, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1033, 1685, 2055, ..., 3143, 914, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1033, 1685, 2055, ..., 3143, 914, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1033, 1685, 2055, ..., 3143, 914, 2668])\n", + "deriv_tensor (40, 81), indices tensor([1921, 2446, 2956, ..., 1868, 2364, 1772])\n", + "deriv_tensor (40, 81), indices tensor([1921, 2446, 2956, ..., 1868, 2364, 1772])\n", + "deriv_tensor (40, 81), indices tensor([1921, 2446, 2956, ..., 1868, 2364, 1772])\n", + "deriv_tensor (40, 81), indices tensor([1921, 2446, 2956, ..., 1868, 2364, 1772])\n", + "deriv_tensor (40, 81), indices tensor([1607, 56, 2327, ..., 1482, 2082, 1198])\n", + "deriv_tensor (40, 81), indices tensor([1607, 56, 2327, ..., 1482, 2082, 1198])\n", + "deriv_tensor (40, 81), indices tensor([1607, 56, 2327, ..., 1482, 2082, 1198])\n", + "deriv_tensor (40, 81), indices tensor([1607, 56, 2327, ..., 1482, 2082, 1198])\n", + "deriv_tensor (40, 81), indices tensor([1000, 675, 256, ..., 764, 2542, 1937])\n", + "deriv_tensor (40, 81), indices tensor([1000, 675, 256, ..., 764, 2542, 1937])\n", + "deriv_tensor (40, 81), indices tensor([1000, 675, 256, ..., 764, 2542, 1937])\n", + "deriv_tensor (40, 81), indices tensor([1000, 675, 256, ..., 764, 2542, 1937])\n", + "deriv_tensor (40, 81), indices tensor([3018, 1337, 2522, ..., 330, 1602, 1378])\n", + "deriv_tensor (40, 81), indices tensor([3018, 1337, 2522, ..., 330, 1602, 1378])\n", + "deriv_tensor (40, 81), indices tensor([3018, 1337, 2522, ..., 330, 1602, 1378])\n", + "deriv_tensor (40, 81), indices tensor([3018, 1337, 2522, ..., 330, 1602, 1378])\n", + "deriv_tensor (40, 81), indices tensor([ 115, 337, 2642, ..., 920, 717, 1810])\n", + "deriv_tensor (40, 81), indices tensor([ 115, 337, 2642, ..., 920, 717, 1810])\n", + "deriv_tensor (40, 81), indices tensor([ 115, 337, 2642, ..., 920, 717, 1810])\n", + "deriv_tensor (40, 81), indices tensor([ 115, 337, 2642, ..., 920, 717, 1810])\n", + "deriv_tensor (40, 81), indices tensor([1241, 2637, 1412, ..., 3111, 1529, 982])\n", + "deriv_tensor (40, 81), indices tensor([1241, 2637, 1412, ..., 3111, 1529, 982])\n", + "deriv_tensor (40, 81), indices tensor([1241, 2637, 1412, ..., 3111, 1529, 982])\n", + "deriv_tensor (40, 81), indices tensor([1241, 2637, 1412, ..., 3111, 1529, 982])\n", + "deriv_tensor (40, 81), indices tensor([3036, 2118, 911, ..., 1118, 1874, 822])\n", + "deriv_tensor (40, 81), indices tensor([3036, 2118, 911, ..., 1118, 1874, 822])\n", + "deriv_tensor (40, 81), indices tensor([3036, 2118, 911, ..., 1118, 1874, 822])\n", + "deriv_tensor (40, 81), indices tensor([3036, 2118, 911, ..., 1118, 1874, 822])\n", + "deriv_tensor (40, 81), indices tensor([ 321, 581, 970, ..., 1876, 1092, 2662])\n", + "deriv_tensor (40, 81), indices tensor([ 321, 581, 970, ..., 1876, 1092, 2662])\n", + "deriv_tensor (40, 81), indices tensor([ 321, 581, 970, ..., 1876, 1092, 2662])\n", + "deriv_tensor (40, 81), indices tensor([ 321, 581, 970, ..., 1876, 1092, 2662])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2748, 317, ..., 1357, 1412, 2226])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2748, 317, ..., 1357, 1412, 2226])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2748, 317, ..., 1357, 1412, 2226])\n", + "deriv_tensor (40, 81), indices tensor([2646, 2748, 317, ..., 1357, 1412, 2226])\n", + "deriv_tensor (40, 81), indices tensor([ 502, 154, 3072, ..., 2078, 971, 2256])\n", + "deriv_tensor (40, 81), indices tensor([ 502, 154, 3072, ..., 2078, 971, 2256])\n", + "deriv_tensor (40, 81), indices tensor([ 502, 154, 3072, ..., 2078, 971, 2256])\n", + "deriv_tensor (40, 81), indices tensor([ 502, 154, 3072, ..., 2078, 971, 2256])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2274, 2273, ..., 2983, 2584, 25])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2274, 2273, ..., 2983, 2584, 25])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2274, 2273, ..., 2983, 2584, 25])\n", + "deriv_tensor (40, 81), indices tensor([2227, 2274, 2273, ..., 2983, 2584, 25])\n", + "deriv_tensor (40, 81), indices tensor([1734, 716, 2629, ..., 3230, 2533, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1734, 716, 2629, ..., 3230, 2533, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1734, 716, 2629, ..., 3230, 2533, 1329])\n", + "deriv_tensor (40, 81), indices tensor([1734, 716, 2629, ..., 3230, 2533, 1329])\n", + "deriv_tensor (40, 81), indices tensor([2528, 582, 1511, ..., 1130, 588, 2758])\n", + "deriv_tensor (40, 81), indices tensor([2528, 582, 1511, ..., 1130, 588, 2758])\n", + "deriv_tensor (40, 81), indices tensor([2528, 582, 1511, ..., 1130, 588, 2758])\n", + "deriv_tensor (40, 81), indices tensor([2528, 582, 1511, ..., 1130, 588, 2758])\n", + "deriv_tensor (40, 81), indices tensor([1672, 1105, 707, ..., 161, 1148, 3009])\n", + "deriv_tensor (40, 81), indices tensor([1672, 1105, 707, ..., 161, 1148, 3009])\n", + "deriv_tensor (40, 81), indices tensor([1672, 1105, 707, ..., 161, 1148, 3009])\n", + "deriv_tensor (40, 81), indices tensor([1672, 1105, 707, ..., 161, 1148, 3009])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2912, 315, ..., 937, 1749, 1667])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2912, 315, ..., 937, 1749, 1667])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2912, 315, ..., 937, 1749, 1667])\n", + "deriv_tensor (40, 81), indices tensor([2364, 2912, 315, ..., 937, 1749, 1667])\n", + "deriv_tensor (40, 81), indices tensor([2304, 1611, 2328, ..., 1738, 1576, 1518])\n", + "deriv_tensor (40, 81), indices tensor([2304, 1611, 2328, ..., 1738, 1576, 1518])\n", + "deriv_tensor (40, 81), indices tensor([2304, 1611, 2328, ..., 1738, 1576, 1518])\n", + "deriv_tensor (40, 81), indices tensor([2304, 1611, 2328, ..., 1738, 1576, 1518])\n", + "deriv_tensor (40, 81), indices tensor([1785, 361, 795, ..., 173, 748, 1240])\n", + "deriv_tensor (40, 81), indices tensor([1785, 361, 795, ..., 173, 748, 1240])\n", + "deriv_tensor (40, 81), indices tensor([1785, 361, 795, ..., 173, 748, 1240])\n", + "deriv_tensor (40, 81), indices tensor([1785, 361, 795, ..., 173, 748, 1240])\n", + "deriv_tensor (40, 81), indices tensor([2336, 195, 1507, ..., 2291, 127, 913])\n", + "deriv_tensor (40, 81), indices tensor([2336, 195, 1507, ..., 2291, 127, 913])\n", + "deriv_tensor (40, 81), indices tensor([2336, 195, 1507, ..., 2291, 127, 913])\n", + "deriv_tensor (40, 81), indices tensor([2336, 195, 1507, ..., 2291, 127, 913])\n", + "deriv_tensor (40, 81), indices tensor([2951, 162, 1078, ..., 1449, 1663, 2952])\n", + "deriv_tensor (40, 81), indices tensor([2951, 162, 1078, ..., 1449, 1663, 2952])\n", + "deriv_tensor (40, 81), indices tensor([2951, 162, 1078, ..., 1449, 1663, 2952])\n", + "deriv_tensor (40, 81), indices tensor([2951, 162, 1078, ..., 1449, 1663, 2952])\n", + "deriv_tensor (40, 81), indices tensor([1733, 72, 820, ..., 1974, 439, 870])\n", + "deriv_tensor (40, 81), indices tensor([1733, 72, 820, ..., 1974, 439, 870])\n", + "deriv_tensor (40, 81), indices tensor([1733, 72, 820, ..., 1974, 439, 870])\n", + "deriv_tensor (40, 81), indices tensor([1733, 72, 820, ..., 1974, 439, 870])\n", + "deriv_tensor (40, 81), indices tensor([1524, 962, 1502, ..., 2938, 184, 655])\n", + "deriv_tensor (40, 81), indices tensor([1524, 962, 1502, ..., 2938, 184, 655])\n", + "deriv_tensor (40, 81), indices tensor([1524, 962, 1502, ..., 2938, 184, 655])\n", + "deriv_tensor (40, 81), indices tensor([1524, 962, 1502, ..., 2938, 184, 655])\n", + "deriv_tensor (40, 81), indices tensor([1934, 5, 337, ..., 2880, 2722, 2843])\n", + "deriv_tensor (40, 81), indices tensor([1934, 5, 337, ..., 2880, 2722, 2843])\n", + "deriv_tensor (40, 81), indices tensor([1934, 5, 337, ..., 2880, 2722, 2843])\n", + "deriv_tensor (40, 81), indices tensor([1934, 5, 337, ..., 2880, 2722, 2843])\n", + "deriv_tensor (40, 81), indices tensor([3099, 772, 2763, ..., 1938, 2050, 209])\n", + "deriv_tensor (40, 81), indices tensor([3099, 772, 2763, ..., 1938, 2050, 209])\n", + "deriv_tensor (40, 81), indices tensor([3099, 772, 2763, ..., 1938, 2050, 209])\n", + "deriv_tensor (40, 81), indices tensor([3099, 772, 2763, ..., 1938, 2050, 209])\n", + "deriv_tensor (40, 81), indices tensor([1838, 2155, 2413, ..., 1252, 1127, 2365])\n", + "deriv_tensor (40, 81), indices tensor([1838, 2155, 2413, ..., 1252, 1127, 2365])\n", + "deriv_tensor (40, 81), indices tensor([1838, 2155, 2413, ..., 1252, 1127, 2365])\n", + "deriv_tensor (40, 81), indices tensor([1838, 2155, 2413, ..., 1252, 1127, 2365])\n", + "deriv_tensor (40, 81), indices tensor([ 6, 2872, 777, ..., 355, 2863, 2496])\n", + "deriv_tensor (40, 81), indices tensor([ 6, 2872, 777, ..., 355, 2863, 2496])\n", + "deriv_tensor (40, 81), indices tensor([ 6, 2872, 777, ..., 355, 2863, 2496])\n", + "deriv_tensor (40, 81), indices tensor([ 6, 2872, 777, ..., 355, 2863, 2496])\n", + "deriv_tensor (40, 81), indices tensor([2974, 2231, 2979, ..., 2316, 3100, 3057])\n", + "deriv_tensor (40, 81), indices tensor([2974, 2231, 2979, ..., 2316, 3100, 3057])\n", + "deriv_tensor (40, 81), indices tensor([2974, 2231, 2979, ..., 2316, 3100, 3057])\n", + "deriv_tensor (40, 81), indices tensor([2974, 2231, 2979, ..., 2316, 3100, 3057])\n", + "deriv_tensor (40, 81), indices tensor([3125, 1870, 522, ..., 2042, 12, 1869])\n", + "deriv_tensor (40, 81), indices tensor([3125, 1870, 522, ..., 2042, 12, 1869])\n", + "deriv_tensor (40, 81), indices tensor([3125, 1870, 522, ..., 2042, 12, 1869])\n", + "deriv_tensor (40, 81), indices tensor([3125, 1870, 522, ..., 2042, 12, 1869])\n", + "deriv_tensor (40, 81), indices tensor([1263, 2153, 1917, ..., 1170, 287, 657])\n", + "deriv_tensor (40, 81), indices tensor([1263, 2153, 1917, ..., 1170, 287, 657])\n", + "deriv_tensor (40, 81), indices tensor([1263, 2153, 1917, ..., 1170, 287, 657])\n", + "deriv_tensor (40, 81), indices tensor([1263, 2153, 1917, ..., 1170, 287, 657])\n", + "deriv_tensor (40, 81), indices tensor([1890, 775, 2417, ..., 696, 1374, 279])\n", + "deriv_tensor (40, 81), indices tensor([1890, 775, 2417, ..., 696, 1374, 279])\n", + "deriv_tensor (40, 81), indices tensor([1890, 775, 2417, ..., 696, 1374, 279])\n", + "deriv_tensor (40, 81), indices tensor([1890, 775, 2417, ..., 696, 1374, 279])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 1391, 1657, ..., 2779, 3038, 642])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 1391, 1657, ..., 2779, 3038, 642])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 1391, 1657, ..., 2779, 3038, 642])\n", + "deriv_tensor (40, 81), indices tensor([ 565, 1391, 1657, ..., 2779, 3038, 642])\n", + "deriv_tensor (40, 81), indices tensor([1425, 2311, 1649, ..., 2147, 2035, 2765])\n", + "deriv_tensor (40, 81), indices tensor([1425, 2311, 1649, ..., 2147, 2035, 2765])\n", + "deriv_tensor (40, 81), indices tensor([1425, 2311, 1649, ..., 2147, 2035, 2765])\n", + "deriv_tensor (40, 81), indices tensor([1425, 2311, 1649, ..., 2147, 2035, 2765])\n", + "deriv_tensor (40, 81), indices tensor([2649, 1626, 2381, ..., 2751, 2405, 2735])\n", + "deriv_tensor (40, 81), indices tensor([2649, 1626, 2381, ..., 2751, 2405, 2735])\n", + "deriv_tensor (40, 81), indices tensor([2649, 1626, 2381, ..., 2751, 2405, 2735])\n", + "deriv_tensor (40, 81), indices tensor([2649, 1626, 2381, ..., 2751, 2405, 2735])\n", + "deriv_tensor (40, 81), indices tensor([2309, 1144, 1925, ..., 567, 2679, 3045])\n", + "deriv_tensor (40, 81), indices tensor([2309, 1144, 1925, ..., 567, 2679, 3045])\n", + "deriv_tensor (40, 81), indices tensor([2309, 1144, 1925, ..., 567, 2679, 3045])\n", + "deriv_tensor (40, 81), indices tensor([2309, 1144, 1925, ..., 567, 2679, 3045])\n", + "deriv_tensor (40, 81), indices tensor([ 395, 2035, 1878, ..., 3013, 871, 415])\n", + "deriv_tensor (40, 81), indices tensor([ 395, 2035, 1878, ..., 3013, 871, 415])\n", + "deriv_tensor (40, 81), indices tensor([ 395, 2035, 1878, ..., 3013, 871, 415])\n", + "deriv_tensor (40, 81), indices tensor([ 395, 2035, 1878, ..., 3013, 871, 415])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1740, 2930, ..., 2164, 1178, 559])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1740, 2930, ..., 2164, 1178, 559])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1740, 2930, ..., 2164, 1178, 559])\n", + "deriv_tensor (40, 81), indices tensor([ 735, 1740, 2930, ..., 2164, 1178, 559])\n", + "deriv_tensor (40, 81), indices tensor([2188, 2396, 1133, ..., 1623, 64, 1091])\n", + "deriv_tensor (40, 81), indices tensor([2188, 2396, 1133, ..., 1623, 64, 1091])\n", + "deriv_tensor (40, 81), indices tensor([2188, 2396, 1133, ..., 1623, 64, 1091])\n", + "deriv_tensor (40, 81), indices tensor([2188, 2396, 1133, ..., 1623, 64, 1091])\n", + "deriv_tensor (40, 81), indices tensor([1251, 1212, 3127, ..., 2878, 630, 387])\n", + "deriv_tensor (40, 81), indices tensor([1251, 1212, 3127, ..., 2878, 630, 387])\n", + "deriv_tensor (40, 81), indices tensor([1251, 1212, 3127, ..., 2878, 630, 387])\n", + "deriv_tensor (40, 81), indices tensor([1251, 1212, 3127, ..., 2878, 630, 387])\n", + "deriv_tensor (40, 81), indices tensor([1568, 2653, 80, ..., 720, 251, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1568, 2653, 80, ..., 720, 251, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1568, 2653, 80, ..., 720, 251, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1568, 2653, 80, ..., 720, 251, 2992])\n", + "deriv_tensor (40, 81), indices tensor([1144, 1906, 3023, ..., 704, 2288, 58])\n", + "deriv_tensor (40, 81), indices tensor([1144, 1906, 3023, ..., 704, 2288, 58])\n", + "deriv_tensor (40, 81), indices tensor([1144, 1906, 3023, ..., 704, 2288, 58])\n", + "deriv_tensor (40, 81), indices tensor([1144, 1906, 3023, ..., 704, 2288, 58])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 1542, 670, ..., 1444, 2807, 2318])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 1542, 670, ..., 1444, 2807, 2318])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 1542, 670, ..., 1444, 2807, 2318])\n", + "deriv_tensor (40, 81), indices tensor([ 516, 1542, 670, ..., 1444, 2807, 2318])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2070, 187, ..., 3002, 1729, 2509])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2070, 187, ..., 3002, 1729, 2509])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2070, 187, ..., 3002, 1729, 2509])\n", + "deriv_tensor (40, 81), indices tensor([2213, 2070, 187, ..., 3002, 1729, 2509])\n", + "deriv_tensor (40, 81), indices tensor([ 940, 442, 1683, ..., 472, 937, 3075])\n", + "deriv_tensor (40, 81), indices tensor([ 940, 442, 1683, ..., 472, 937, 3075])\n", + "deriv_tensor (40, 81), indices tensor([ 940, 442, 1683, ..., 472, 937, 3075])\n", + "deriv_tensor (40, 81), indices tensor([ 940, 442, 1683, ..., 472, 937, 3075])\n", + "deriv_tensor (40, 81), indices tensor([1982, 723, 126, ..., 132, 504, 429])\n", + "deriv_tensor (40, 81), indices tensor([1982, 723, 126, ..., 132, 504, 429])\n", + "deriv_tensor (40, 81), indices tensor([1982, 723, 126, ..., 132, 504, 429])\n", + "deriv_tensor (40, 81), indices tensor([1982, 723, 126, ..., 132, 504, 429])\n", + "deriv_tensor (40, 81), indices tensor([1429, 2765, 2612, ..., 511, 705, 1225])\n", + "deriv_tensor (40, 81), indices tensor([1429, 2765, 2612, ..., 511, 705, 1225])\n", + "deriv_tensor (40, 81), indices tensor([1429, 2765, 2612, ..., 511, 705, 1225])\n", + "deriv_tensor (40, 81), indices tensor([1429, 2765, 2612, ..., 511, 705, 1225])\n", + "deriv_tensor (40, 81), indices tensor([ 982, 141, 864, ..., 1165, 3103, 1517])\n", + "deriv_tensor (40, 81), indices tensor([ 982, 141, 864, ..., 1165, 3103, 1517])\n", + "deriv_tensor (40, 81), indices tensor([ 982, 141, 864, ..., 1165, 3103, 1517])\n", + "deriv_tensor (40, 81), indices tensor([ 982, 141, 864, ..., 1165, 3103, 1517])\n", + "deriv_tensor (40, 81), indices tensor([1047, 2853, 2716, ..., 811, 1505, 2002])\n", + "deriv_tensor (40, 81), indices tensor([1047, 2853, 2716, ..., 811, 1505, 2002])\n", + "deriv_tensor (40, 81), indices tensor([1047, 2853, 2716, ..., 811, 1505, 2002])\n", + "deriv_tensor (40, 81), indices tensor([1047, 2853, 2716, ..., 811, 1505, 2002])\n", + "deriv_tensor (40, 81), indices tensor([3204, 834, 2566, ..., 711, 1693, 2252])\n", + "deriv_tensor (40, 81), indices tensor([3204, 834, 2566, ..., 711, 1693, 2252])\n", + "deriv_tensor (40, 81), indices tensor([3204, 834, 2566, ..., 711, 1693, 2252])\n", + "deriv_tensor (40, 81), indices tensor([3204, 834, 2566, ..., 711, 1693, 2252])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2754, 1107, ..., 2938, 2483, 645])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2754, 1107, ..., 2938, 2483, 645])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2754, 1107, ..., 2938, 2483, 645])\n", + "deriv_tensor (40, 81), indices tensor([1360, 2754, 1107, ..., 2938, 2483, 645])\n", + "deriv_tensor (40, 81), indices tensor([1840, 2840, 2390, ..., 2027, 44, 1006])\n", + "deriv_tensor (40, 81), indices tensor([1840, 2840, 2390, ..., 2027, 44, 1006])\n", + "deriv_tensor (40, 81), indices tensor([1840, 2840, 2390, ..., 2027, 44, 1006])\n", + "deriv_tensor (40, 81), indices tensor([1840, 2840, 2390, ..., 2027, 44, 1006])\n", + "deriv_tensor (40, 81), indices tensor([2077, 2674, 122, ..., 2949, 1603, 2756])\n", + "deriv_tensor (40, 81), indices tensor([2077, 2674, 122, ..., 2949, 1603, 2756])\n", + "deriv_tensor (40, 81), indices tensor([2077, 2674, 122, ..., 2949, 1603, 2756])\n", + "deriv_tensor (40, 81), indices tensor([2077, 2674, 122, ..., 2949, 1603, 2756])\n", + "deriv_tensor (40, 81), indices tensor([2713, 79, 2886, ..., 2947, 246, 1624])\n", + "deriv_tensor (40, 81), indices tensor([2713, 79, 2886, ..., 2947, 246, 1624])\n", + "deriv_tensor (40, 81), indices tensor([2713, 79, 2886, ..., 2947, 246, 1624])\n", + "deriv_tensor (40, 81), indices tensor([2713, 79, 2886, ..., 2947, 246, 1624])\n", + "deriv_tensor (40, 81), indices tensor([2058, 1970, 2682, ..., 3052, 1417, 35])\n", + "deriv_tensor (40, 81), indices tensor([2058, 1970, 2682, ..., 3052, 1417, 35])\n", + "deriv_tensor (40, 81), indices tensor([2058, 1970, 2682, ..., 3052, 1417, 35])\n", + "deriv_tensor (40, 81), indices tensor([2058, 1970, 2682, ..., 3052, 1417, 35])\n", + "deriv_tensor (40, 81), indices tensor([2621, 1488, 2347, ..., 84, 1778, 2994])\n", + "deriv_tensor (40, 81), indices tensor([2621, 1488, 2347, ..., 84, 1778, 2994])\n", + "deriv_tensor (40, 81), indices tensor([2621, 1488, 2347, ..., 84, 1778, 2994])\n", + "deriv_tensor (40, 81), indices tensor([2621, 1488, 2347, ..., 84, 1778, 2994])\n", + "deriv_tensor (40, 81), indices tensor([2609, 873, 1961, ..., 2749, 3038, 2263])\n", + "deriv_tensor (40, 81), indices tensor([2609, 873, 1961, ..., 2749, 3038, 2263])\n", + "deriv_tensor (40, 81), indices tensor([2609, 873, 1961, ..., 2749, 3038, 2263])\n", + "deriv_tensor (40, 81), indices tensor([2609, 873, 1961, ..., 2749, 3038, 2263])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1430, 2993, ..., 1732, 1570, 822])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1430, 2993, ..., 1732, 1570, 822])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1430, 2993, ..., 1732, 1570, 822])\n", + "deriv_tensor (40, 81), indices tensor([1103, 1430, 2993, ..., 1732, 1570, 822])\n", + "deriv_tensor (40, 81), indices tensor([1421, 2089, 305, ..., 298, 1936, 1730])\n", + "deriv_tensor (40, 81), indices tensor([1421, 2089, 305, ..., 298, 1936, 1730])\n", + "deriv_tensor (40, 81), indices tensor([1421, 2089, 305, ..., 298, 1936, 1730])\n", + "deriv_tensor (40, 81), indices tensor([1421, 2089, 305, ..., 298, 1936, 1730])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 1337, 1236, ..., 1574, 2087, 2080])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 1337, 1236, ..., 1574, 2087, 2080])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 1337, 1236, ..., 1574, 2087, 2080])\n", + "deriv_tensor (40, 81), indices tensor([ 392, 1337, 1236, ..., 1574, 2087, 2080])\n", + "deriv_tensor (40, 81), indices tensor([ 945, 2363, 1764, ..., 779, 867, 236])\n", + "deriv_tensor (40, 81), indices tensor([ 945, 2363, 1764, ..., 779, 867, 236])\n", + "deriv_tensor (40, 81), indices tensor([ 945, 2363, 1764, ..., 779, 867, 236])\n", + "deriv_tensor (40, 81), indices tensor([ 945, 2363, 1764, ..., 779, 867, 236])\n", + "deriv_tensor (40, 81), indices tensor([2644, 2932, 2404, ..., 398, 321, 402])\n", + "deriv_tensor (40, 81), indices tensor([2644, 2932, 2404, ..., 398, 321, 402])\n", + "deriv_tensor (40, 81), indices tensor([2644, 2932, 2404, ..., 398, 321, 402])\n", + "deriv_tensor (40, 81), indices tensor([2644, 2932, 2404, ..., 398, 321, 402])\n", + "deriv_tensor (40, 81), indices tensor([ 796, 1993, 1151, ..., 3118, 1628, 794])\n", + "deriv_tensor (40, 81), indices tensor([ 796, 1993, 1151, ..., 3118, 1628, 794])\n", + "deriv_tensor (40, 81), indices tensor([ 796, 1993, 1151, ..., 3118, 1628, 794])\n", + "deriv_tensor (40, 81), indices tensor([ 796, 1993, 1151, ..., 3118, 1628, 794])\n", + "deriv_tensor (40, 81), indices tensor([1238, 89, 3108, ..., 1016, 29, 1463])\n", + "deriv_tensor (40, 81), indices tensor([1238, 89, 3108, ..., 1016, 29, 1463])\n", + "deriv_tensor (40, 81), indices tensor([1238, 89, 3108, ..., 1016, 29, 1463])\n", + "deriv_tensor (40, 81), indices tensor([1238, 89, 3108, ..., 1016, 29, 1463])\n", + "deriv_tensor (40, 81), indices tensor([2302, 2167, 2805, ..., 1775, 790, 381])\n", + "deriv_tensor (40, 81), indices tensor([2302, 2167, 2805, ..., 1775, 790, 381])\n", + "deriv_tensor (40, 81), indices tensor([2302, 2167, 2805, ..., 1775, 790, 381])\n", + "deriv_tensor (40, 81), indices tensor([2302, 2167, 2805, ..., 1775, 790, 381])\n", + "deriv_tensor (40, 81), indices tensor([3181, 723, 1162, ..., 2467, 2664, 1632])\n", + "deriv_tensor (40, 81), indices tensor([3181, 723, 1162, ..., 2467, 2664, 1632])\n", + "deriv_tensor (40, 81), indices tensor([3181, 723, 1162, ..., 2467, 2664, 1632])\n", + "deriv_tensor (40, 81), indices tensor([3181, 723, 1162, ..., 2467, 2664, 1632])\n", + "deriv_tensor (40, 81), indices tensor([2100, 3176, 2717, ..., 2176, 784, 1269])\n", + "deriv_tensor (40, 81), indices tensor([2100, 3176, 2717, ..., 2176, 784, 1269])\n", + "deriv_tensor (40, 81), indices tensor([2100, 3176, 2717, ..., 2176, 784, 1269])\n", + "deriv_tensor (40, 81), indices tensor([2100, 3176, 2717, ..., 2176, 784, 1269])\n", + "deriv_tensor (40, 81), indices tensor([2029, 1273, 2796, ..., 133, 56, 266])\n", + "deriv_tensor (40, 81), indices tensor([2029, 1273, 2796, ..., 133, 56, 266])\n", + "deriv_tensor (40, 81), indices tensor([2029, 1273, 2796, ..., 133, 56, 266])\n", + "deriv_tensor (40, 81), indices tensor([2029, 1273, 2796, ..., 133, 56, 266])\n", + "deriv_tensor (40, 81), indices tensor([2697, 3031, 3180, ..., 664, 1210, 1711])\n", + "deriv_tensor (40, 81), indices tensor([2697, 3031, 3180, ..., 664, 1210, 1711])\n", + "deriv_tensor (40, 81), indices tensor([2697, 3031, 3180, ..., 664, 1210, 1711])\n", + "deriv_tensor (40, 81), indices tensor([2697, 3031, 3180, ..., 664, 1210, 1711])\n", + "deriv_tensor (40, 81), indices tensor([1406, 1865, 1741, ..., 1920, 1241, 1998])\n", + "deriv_tensor (40, 81), indices tensor([1406, 1865, 1741, ..., 1920, 1241, 1998])\n", + "deriv_tensor (40, 81), indices tensor([1406, 1865, 1741, ..., 1920, 1241, 1998])\n", + "deriv_tensor (40, 81), indices tensor([1406, 1865, 1741, ..., 1920, 1241, 1998])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 247, 2221, ..., 3091, 2380, 2488])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 247, 2221, ..., 3091, 2380, 2488])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 247, 2221, ..., 3091, 2380, 2488])\n", + "deriv_tensor (40, 81), indices tensor([ 430, 247, 2221, ..., 3091, 2380, 2488])\n", + "deriv_tensor (40, 81), indices tensor([2450, 2392, 357, ..., 1534, 1031, 2847])\n", + "deriv_tensor (40, 81), indices tensor([2450, 2392, 357, ..., 1534, 1031, 2847])\n", + "deriv_tensor (40, 81), indices tensor([2450, 2392, 357, ..., 1534, 1031, 2847])\n", + "deriv_tensor (40, 81), indices tensor([2450, 2392, 357, ..., 1534, 1031, 2847])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 918, 1833, ..., 3035, 1572, 2879])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 918, 1833, ..., 3035, 1572, 2879])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 918, 1833, ..., 3035, 1572, 2879])\n", + "deriv_tensor (40, 81), indices tensor([ 482, 918, 1833, ..., 3035, 1572, 2879])\n", + "deriv_tensor (40, 81), indices tensor([ 607, 1147, 1964, ..., 348, 2624, 1593])\n", + "deriv_tensor (40, 81), indices tensor([ 607, 1147, 1964, ..., 348, 2624, 1593])\n", + "deriv_tensor (40, 81), indices tensor([ 607, 1147, 1964, ..., 348, 2624, 1593])\n", + "deriv_tensor (40, 81), indices tensor([ 607, 1147, 1964, ..., 348, 2624, 1593])\n", + "deriv_tensor (40, 81), indices tensor([1125, 2463, 952, ..., 1548, 547, 944])\n", + "deriv_tensor (40, 81), indices tensor([1125, 2463, 952, ..., 1548, 547, 944])\n", + "deriv_tensor (40, 81), indices tensor([1125, 2463, 952, ..., 1548, 547, 944])\n", + "deriv_tensor (40, 81), indices tensor([1125, 2463, 952, ..., 1548, 547, 944])\n", + "deriv_tensor (40, 81), indices tensor([1565, 2960, 943, ..., 1118, 1605, 2585])\n", + "deriv_tensor (40, 81), indices tensor([1565, 2960, 943, ..., 1118, 1605, 2585])\n", + "deriv_tensor (40, 81), indices tensor([1565, 2960, 943, ..., 1118, 1605, 2585])\n", + "deriv_tensor (40, 81), indices tensor([1565, 2960, 943, ..., 1118, 1605, 2585])\n", + "deriv_tensor (40, 81), indices tensor([1661, 391, 2493, ..., 1943, 1767, 1746])\n", + "deriv_tensor (40, 81), indices tensor([1661, 391, 2493, ..., 1943, 1767, 1746])\n", + "deriv_tensor (40, 81), indices tensor([1661, 391, 2493, ..., 1943, 1767, 1746])\n", + "deriv_tensor (40, 81), indices tensor([1661, 391, 2493, ..., 1943, 1767, 1746])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 1370, 1613, ..., 2891, 2497, 2454])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 1370, 1613, ..., 2891, 2497, 2454])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 1370, 1613, ..., 2891, 2497, 2454])\n", + "deriv_tensor (40, 81), indices tensor([ 941, 1370, 1613, ..., 2891, 2497, 2454])\n", + "deriv_tensor (40, 81), indices tensor([2395, 3151, 2754, ..., 3003, 631, 879])\n", + "deriv_tensor (40, 81), indices tensor([2395, 3151, 2754, ..., 3003, 631, 879])\n", + "deriv_tensor (40, 81), indices tensor([2395, 3151, 2754, ..., 3003, 631, 879])\n", + "deriv_tensor (40, 81), indices tensor([2395, 3151, 2754, ..., 3003, 631, 879])\n", + "deriv_tensor (40, 81), indices tensor([2037, 632, 1365, ..., 1354, 1708, 997])\n", + "deriv_tensor (40, 81), indices tensor([2037, 632, 1365, ..., 1354, 1708, 997])\n", + "deriv_tensor (40, 81), indices tensor([2037, 632, 1365, ..., 1354, 1708, 997])\n", + "deriv_tensor (40, 81), indices tensor([2037, 632, 1365, ..., 1354, 1708, 997])\n", + "deriv_tensor (40, 81), indices tensor([ 999, 2668, 796, ..., 1083, 1532, 1508])\n", + "deriv_tensor (40, 81), indices tensor([ 999, 2668, 796, ..., 1083, 1532, 1508])\n", + "deriv_tensor (40, 81), indices tensor([ 999, 2668, 796, ..., 1083, 1532, 1508])\n", + "deriv_tensor (40, 81), indices tensor([ 999, 2668, 796, ..., 1083, 1532, 1508])\n", + "min loss is 1260.7212524414062, in last epoch: [1189.7750244140625, 1331.66748046875], \n", + "The cardinality of defined token pool is [1 4 2 2]\n", + "Among them, the pool contains [1 4]\n", + "self.vars_demand_equation {'u'}\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 1/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 2/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 3/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 4/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 5/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 6/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 7/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 8/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 9/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 10/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 11/12 solutions.\n", + "Creating new equation, sparsity value 0.5\n", + "New solution accepted, confirmed 12/12 solutions.\n", + "The optimization has been conducted.\n" + ] + } + ], + "source": [ + "epde_search_obj = epde_discovery(grids_training, data_training, derivs = None, use_ann=True,\n", + " multiobjective_mode = False)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "9fba80c0", + "metadata": {}, + "outputs": [], + "source": [ + "import epde.globals as global_var" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "16f5c0e1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Sequential(\n", + " (0): Linear(in_features=2, out_features=112, bias=True)\n", + " (1): Tanh()\n", + " (2): Linear(in_features=112, out_features=112, bias=True)\n", + " (3): Tanh()\n", + " (4): Linear(in_features=112, out_features=1, bias=True)\n", + ")" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "global_var.solution_guess_nn" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "0ab986bb", + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "fname = os.path.join('C:\\\\Users\\\\Mike\\\\Documents\\\\Work\\\\EPDE\\\\projects\\\\misc\\\\data', 'ann_pretrained.pickle')\n", + "with open(fname, 'wb') as output_file:\n", + " pickle.dump(epde.globals.solution_guess_nn, output_file)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "7d8c292f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0 * du/dx1{power: 1.0} + 0.04010590037618242 * d^2u/dx1^2{power: 1.0} * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} + -0.063220572205291 = d^2u/dx0^2{power: 1.0} * u{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': False, 'value': 0.5}} , with objective function values of [0.0497236] \n", + "\n" + ] + } + ], + "source": [ + "epde_search_obj.equations()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "b799e138", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(3240, 4)" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ders = epde_search_obj.saved_derivaties\n", + "ders['u'].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "b4485b24", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "setting builder with \n", + "setting builder with \n", + "trig_token_params: VALUES = (0, 1)\n", + "Deriv orders after definition [[0], [0, 0], [1], [1, 1]]\n", + "initial_shape (40, 81) derivs_tensor.shape (3240, 4)\n", + "self.tokens is ['du/dx0', 'd^2u/dx0^2', 'du/dx1', 'd^2u/dx1^2']\n", + "Here, derivs order is {'du/dx0': [0], 'd^2u/dx0^2': [0, 0], 'du/dx1': [1], 'd^2u/dx1^2': [1, 1]}\n", + "self.tokens is ['u']\n", + "Here, derivs order is {'u': [None]}\n", + "The cardinality of defined token pool is [1 4 2 2]\n", + "Among them, the pool contains [1 4]\n", + "self.vars_demand_equation {'u'}\n", + "Creating new equation, sparsity value [4.25184543e-07]\n", + "New solution accepted, confirmed 1/12 solutions.\n", + "Creating new equation, sparsity value [0.51710179]\n", + "New solution accepted, confirmed 2/12 solutions.\n", + "Creating new equation, sparsity value [0.70789877]\n", + "New solution accepted, confirmed 3/12 solutions.\n", + "Creating new equation, sparsity value [0.15955379]\n", + "New solution accepted, confirmed 4/12 solutions.\n", + "Creating new equation, sparsity value [0.00067337]\n", + "New solution accepted, confirmed 5/12 solutions.\n", + "Creating new equation, sparsity value [1.5963121e-07]\n", + "New solution accepted, confirmed 6/12 solutions.\n", + "Creating new equation, sparsity value [0.0336304]\n", + "New solution accepted, confirmed 7/12 solutions.\n", + "Creating new equation, sparsity value [0.00076552]\n", + "New solution accepted, confirmed 8/12 solutions.\n", + "Creating new equation, sparsity value [0.11985522]\n", + "New solution accepted, confirmed 9/12 solutions.\n", + "Creating new equation, sparsity value [1.54394265e-08]\n", + "New solution accepted, confirmed 10/12 solutions.\n", + "Creating new equation, sparsity value [0.08059209]\n", + "New solution accepted, confirmed 11/12 solutions.\n", + "Creating new equation, sparsity value [1.34773717e-06]\n", + "New solution accepted, confirmed 12/12 solutions.\n", + "[0.48, 0.52] [[0.48, 0.52], [0.2, 0.8], [0.46, 0.54], [0.44, 0.56], [0.98, 0.020000000000000018], [0.7000000000000001, 0.29999999999999993], [0.0, 1.0]]\n", + "[0.7000000000000001, 0.29999999999999993] [[0.48, 0.52], [0.2, 0.8], [0.46, 0.54], [0.44, 0.56], [0.98, 0.020000000000000018], [0.7000000000000001, 0.29999999999999993], [0.0, 1.0], [0.92, 0.07999999999999996], [0.84, 0.16000000000000003], [0.16, 0.84]]\n", + "best_obj 2\n", + "Multiobjective optimization : 0-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 1-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 2-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 3-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 4-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 5-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 6-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 7-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 8-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 9-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 10-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 11-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 12-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 13-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 14-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 15-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 16-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 17-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 18-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 19-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 20-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 21-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 22-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 23-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 24-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 25-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 26-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 27-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 28-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 29-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 30-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 31-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 32-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 33-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 34-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 35-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 36-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 37-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 38-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "Multiobjective optimization : 39-th epoch.\n", + "During MO : processing 0-th weight.\n", + "During MO : processing 1-th weight.\n", + "During MO : processing 2-th weight.\n", + "During MO : processing 3-th weight.\n", + "During MO : processing 4-th weight.\n", + "During MO : processing 5-th weight.\n", + "During MO : processing 6-th weight.\n", + "During MO : processing 7-th weight.\n", + "During MO : processing 8-th weight.\n", + "During MO : processing 9-th weight.\n", + "During MO : processing 10-th weight.\n", + "During MO : processing 11-th weight.\n", + "The optimization has been conducted.\n" + ] + } + ], + "source": [ + "epde_search_obj = epde_discovery(grids_training, data_training, derivs = [ders['u']],\n", + " multiobjective_mode = True)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "f6725b74", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "0-th non-dominated level\n", + "\n", + "\n", + "-0.5496878845172918 * u{power: 1.0} * t{power: 1.0} + -0.05112489217014476 * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} * x{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.7892463027250493 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0020510539676752717}} , with objective function values of [0.64180359 2.5 ] \n", + "\n", + "0.0 * d^2u/dx0^2{power: 1.0} * du/dx1{power: 1.0} + 0.039994341975063886 * d^2u/dx1^2{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.0 * u{power: 1.0} * du/dx1{power: 1.0} + -0.02059504177543492 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0005922902356125623}} , with objective function values of [0.00960299 4. ] \n", + "\n", + "0.0 * du/dx1{power: 1.0} + 0.0 * u{power: 1.0} * du/dx1{power: 1.0} + -0.18452312183899572 * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} * d^2u/dx1^2{power: 1.0} + 0.7864096452453555 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.002190874557986789}} , with objective function values of [2.1143071 1.5 ] \n", + "\n", + "-0.18452312183899572 * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.0 * du/dx0{power: 1.0} * du/dx1{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * u{power: 1.0} + 0.7864096452453555 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.004001307668963563}} , with objective function values of [2.1143071 1.5 ] \n", + "\n", + "0.0 * du/dx1{power: 1.0} + 0.03986724979300948 * d^2u/dx1^2{power: 1.0} + -0.003790903741304338 * u{power: 1.0} + 0.0 * u{power: 1.0} * x{power: 1.0} + -0.009668142992528914 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.005028842968265235}} , with objective function values of [0.00842261 4.5 ] \n", + "\n", + "0.0 * du/dx1{power: 1.0} + 0.0 * d^2u/dx0^2{power: 1.0} * du/dx0{power: 1.0} + -0.18452312183899572 * u{power: 1.0} + 0.0 * u{power: 1.0} * du/dx1{power: 1.0} + 0.7864096452453555 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003171661595486404}} , with objective function values of [2.1143071 1.5 ] \n", + "\n", + "0.0 * du/dx1{power: 1.0} + -0.18452312183899572 * u{power: 1.0} + 0.0 * du/dx0{power: 1.0} * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} * x{power: 1.0} + 0.7864096452453555 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0022888546468261698}} , with objective function values of [2.1143071 1.5 ] \n", + "\n", + "0.0 * d^2u/dx1^2{power: 1.0} * du/dx1{power: 1.0} + 0.3170861385853261 * du/dx0{power: 1.0} + 0.0 * du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.5792977481576362, dim: 0.0} + 0.012091286071880938 * u{power: 1.0} + -0.053694967683941545 = du/dx0{power: 1.0} * t{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.006472967327617509}} , with objective function values of [0.10427561 3. ] \n", + "\n", + "0.0 * du/dx1{power: 1.0} * cos{power: 1.0, freq: 2.810542685823843, dim: 0.0} + 0.0 * du/dx1{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.039994341975063886 * d^2u/dx1^2{power: 1.0} + -0.02059504177543492 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.003201153918935575}} , with objective function values of [0.00960299 4. ] \n", + "\n", + "0.03986724979300948 * d^2u/dx1^2{power: 1.0} + 0.0 * du/dx1{power: 1.0} + -0.003790903741304338 * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} * d^2u/dx1^2{power: 1.0} + -0.009668142992528914 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0025273391878526505}} , with objective function values of [0.00842261 4.5 ] \n", + "\n", + "-0.003790903741304279 * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.03986724979300945 * d^2u/dx1^2{power: 1.0} + 0.0 * du/dx1{power: 1.0} * d^2u/dx0^2{power: 1.0} + -0.009668142992528446 = d^2u/dx0^2{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0025868099968101555}} , with objective function values of [0.00842261 4.5 ] \n", + "\n", + "-0.18452312183899572 * u{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.0 * du/dx1{power: 1.0} * d^2u/dx0^2{power: 1.0} + 0.0 * d^2u/dx1^2{power: 1.0} * du/dx0{power: 1.0} + 0.7864096452453555 = du/dx0{power: 1.0}\n", + "{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.0031414337806716383}} , with objective function values of [2.1143071 1.5 ] \n", + "\n" + ] + } + ], + "source": [ + "epde_search_obj.equations()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "474161b7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = -5.497\\cdot 10^{-1} u \\cdot t{power: 1.0} + -5.112\\cdot 10^{-2} u + 7.892\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = 3.999\\cdot 10^{-2} \\frac{\\partial ^2u}{\\partial x_1^2} + -2.06\\cdot 10^{-2} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} = -1.845\\cdot 10^{-1} u + 7.864\\cdot 10^{-1} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial ^2u}{\\partial x_0^2} = 3.987\\cdot 10^{-2} \\frac{\\partial ^2u}{\\partial x_1^2} + -3.791\\cdot 10^{-3} u + -9.668\\cdot 10^{-3} \\end{eqnarray*}$\n", + "$\\begin{eqnarray*} \\frac{\\partial u}{\\partial x_0} \\cdot t{power: 1.0} = 3.171\\cdot 10^{-1} \\frac{\\partial u}{\\partial x_0} + 1.209\\cdot 10^{-2} u + -5.369\\cdot 10^{-2} \\end{eqnarray*}$\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAApEAAAGyCAYAAAC8+5/OAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABiCUlEQVR4nO3dT4zj2H0n8G/NpNM7PXAXSx1gEdu966acc7BSNZBDEuy6yTHQybTjsVR1MhBkPRKSS4KJI45yWQywgFqKN0j2kFgsr7HAIMhWieNMJkgDbrF9SHzYRUm07xmxJuvYvkxRr2rQ7e0uzGgPymOL+ktKqiKl+n6AwbSof6/IR/Kn9+f3Nnq9Xg9ERERERBG8EHcBiIiIiGj1MIgkIiIiosgYRBIRERFRZAwiiYiIiCgyBpFEREREFBmDSCIiIiKKjEEkEREREUXGIJKIiIiIIvu5uAtw0T755BP85Cc/wac+9SlsbGzEXRwiIiIKodfr4aOPPsKnP/1pvPAC28CS4NIFkT/5yU9w8+bNuItBREREc/jRj36Ez372s3EXg3AJg8hPfepTAPqV8Pr16zGXpu/s7AwPHz7EK6+8gitXrsRdnETjvgqP+yo87qvwuK/C474KL8y+Oj09xc2bN/37OMXv0gWRsgv7+vXriQoir127huvXr/NCMwP3VXjcV+FxX4XHfRUe91V4UfYVh6IlBwcVEBEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQU2c/F+eW2baNer0PXdaiqimazidu3byOXy019X61Wg6IoAAAhBEql0gWUloiIiIikWFsihRCwbRvFYhHFYhHpdDpUAAkAhUIBhUIBmUwGxWLxIopLRERERP9qo9fr9eL6csuyoGma36oYxtbWFo6OjgLv2djYQNg/4/T0FJubmzg5OcH169cjlvh8nJ2d4cGDB7h79y6uXLlybt/z1a9+FScnJ+f2+Reh1+vh8ePHePnll7GxsRF3cRKN+yo87qvwuK/CW/d9tbm5ibfffnspnxXmPpjE+/dlF2t3dlSu60IIMTbotG0bmqZdfKFWyMnJCd577724i0FERGvg3r17cReBYhZ7EHlwcIBUKgXP89DpdFCtVie+1nXdsdsVRYEQYuxzT58+xdOnT/3Hp6enAPq/es7OzuYv+BLJcpx3eWJsdCYiojXT6/WWdt8Kcx9Myj2bnos1iMxkMgAAVVUBAKZpIp/Po9FoRPocGYSOU6lU8NZbb41sf/jwIa5duxaxxOer2Wye6+c/fvz4XD+fiIguj8ePH+PBgwdL/cxp98EnT54s9btocbEGkTJ4lHZ2dlAsFid2WU8yKYAEgHK5jDfeeMN/fHp6ips3b+KVV15JzJiKs7MzNJtN6Lp+rmMiv/Wtby30fsuy4Hke2u028vk8hw9cYqwLRPTyyy/j7t27S/msMPdB2ZNIyRFrEGlZVmA2tgwcXdf1WykHDQedkhBi4nNXr17F1atXR7ZfuXLlXAO2eZx3mcIM7JYz5jOZTGCfOo4DoD8rXgiBW7duodvtnltZKRnG1QfWBSIC+veUZd+zpt0Hk3bPphhT/AghkM/nA+Mc5bjGSQGhqqpQFGXs2Ei2hCzOtm1/gpLruoHUSZ7n+d0MiqIglUr5wQStp0n1gXWBiIiAGINIRVFQKpUCAaNpmsjlcoEWSZkXUiqXy7Bt239sWRYKhcKFlHmdCSHQbDb9/a9pGnRdh2EYAPpBer1e91/ved7Y1mJaD9PqA+sCEREBMScbL5fLqNVq/n/Hx8eBSTVyRZtBpVIJQghYlgXLsnB4eDjyGopOtjoN0jQNpmmOvLZYLGJvb++iikYxCFsfWBeIiC6vWJONxyGJyUovKtn4vXv3IueJ3NjYQLfb9VuHLcsCgJkrC9F6GqwPrAtEl9s895RJmGx8NcWeJ5KSwzRNpFIpAP0uSjnO1PM8KIoC27b9rk3HcaAoClRVhW3bEELA8zx/aIEQAnfu3EG73Y7t76HFTKsPrVZrbF0AwPpARHRJMIgkAICu66hWq4GxbXIihaqqcF0X+Xzef04IgV6vB9d1kUqloKoqdF33gwbbtv0AhFbPtPoAYGxdAMD6QER0icQ6JpKSwTAMZDKZkckRrVbLb31SVRXdbtf/bzBoyGQysCwr8H6Z74tWz6z6MKkuAKwPRESXCYNIQq1Ww+7u7sj24TQ/48ggc39/P/DaVqvFGbsrivWBiIjCYHf2JSdn4A7f4B3HgRAi1KQJIQQcxwnk6hx+TMkwa6UZ1geaxLIsqKqKVqsFAEyttsZ4rCksBpE0Nrl7pVIJnTrJdd2R1W0GJ1kweEiGsCvNsD7QMCEEKpUK2u02VFXF1tYWA4s1xWNNUbA7+5LTNG1k7XGZuiXshUNRlMBa5/v7+35L1rjVhSgeYVaaYX2gcRRF8WfWu67LHwJrjMeaomBLJKHdbsMwDNy4cQNA/yIymPR9FlVVsb29jVqtBlVVsbu7i0qlAtM0+Qs2QTRNC9wQJq00w/pAk5imiWazGak+0GrisaYwmGw8AZKcbHxVCCFwcHAAAOh0OnBdF3t7e4EWsXEGl9U8Pj5GtVoNPG+aJoQQUBQFnU4H5XLZ/8x8Po/d3V1/TfdBk9Z/D/t3NBoNv9VwuLzyu4QQKJVKc31PsViErutMFI756k6YYz+rbs061vP+Heddd2zbRqPRWNmVwmYdl0kMw0A6nQYApFKpkXNn2vPTriPzSMqxZrJxYkskrQXDMGAYhn8DLxaLyOfzU2/O+Xw+kMvQNE0YhuHfVGq1GgqFQuBi/Prrr/u/zB3H8bt6B+Vyubl+vTuOg1ar5SfqHiZvfoO5F4vFon+Bl0uHDrtx40bgJmJZ1soEkMPjK8/DPHVn1rGfVbdmHeuoLqLuyCBI0zTk8/mxE7MWcRHHetZxGUcmyn/06BEURYHjOMhms35qq1nPz7qORLUOx5rWSO+SOTk56QHonZycxF0U37Nnz3rvvvtu79mzZ+f6Pa+++uq5fn6cNE3rVatV/3G1Wu0pijLx9Z1Opweg1+12/W3dbjewTdO0sd8z+B3D6vX6HKUPajQavUwmM7JdUZRAeXu9Xi/qKdxsNnvNZrPX6/V67Xa71+l05i7nRSgUCuf+HVHrjnzNMHnsw9QtadKxntd51Z16vd4rlUr+Y1VVe+12e+5yjnPexzrKcRku1/DxludQmOdnXUfmlYRjvcx7Spj7YBLv35cdJ9bQWmg2m4HWtsPDw6m/nOUEj8EuJflvmdZCURToug4hhP+ewZaS4ZY827axvb29yJ8xtbyydWCYTMsT5jNkq8LW1hay2ey5t/ysgqh1B5h+7MPUrYu0jLqzs7OD27dvw7ZtGIaBYrEYGE9r2zYsy4Jpmv42IQSy2ezC5V+WeY+LaZrI5XJwXdffX4P1Y9bzs64jy3QRx5poELuzae1YlgUhxNTuInkRH3fBlTebvb09ZLNZbG1toVQqIZ1OB8YGDd4IXNc915mMk2Y1K4ri35xmkSvN0GRh6g4w/diHqVsXaRl1R1EUP3AeruOrstTlPMdFbpdpqlRV9Yc7aJo283lg9nVkmc77WBMNYxBJsZu1CoqUzWanzu6Vg82FEMjn81MHrquqCk3TYNu2f8Ec/qWuKAoMw0Cz2UStVoOmadjZ2Rn7udVqNZaJBqlUailj6i67KHVn2PCxD1O3kmBZdUcG0LVaLdFLXc5zXAZbL+XfVq1W/Ryrs56Xz4W9jpwXXifovDCIvOQ2NjYu5Ht6U5IALCv4UhQlMGB+a2sLR0dHEy/WzWYThmHA8zy/JQV43mJhGAZ0XUej0fC7grPZLDqdTuBzhnMtTmJZFgzDGHn/IpZ9Y4izPoz7MSEnBQyaNinIsizs7+/P/P5yuRwIeKLWHWnSsZ9Vt6I4j3oDLK/uDC51OThBpdVqIZ/Pj31PXMd63uMyOExFtuoNBqCTntc0LfR1RP5NST7WRMMYRF5y04K7VSFXWBhMm6Fpmn8hnzYLefCmJ7t7tre3/bFFg12U7XYb2WwWlmUFPrNer/upPaZRVXXusUWTbnJCiKWOr4qzPoz7MTE4qzSMXC4Xadb5InVHlnnSsZ9Ut6JapN7I94+zzLoTdanLOI61FOW4TNo/iqJMHb4in49yHZHPJ/1YEw1iEEmxW7Q723Vd1Go1FIvFQBoNAFNbkhzHGZkckMvloCgKWq3W2PdOakEJM4Egk8nMndZD5iMcNyif45bmN2/dkSYd+2l1K6pF6g1wMXVnVZa6jHpc5DhH13UD7xNCYHt7e+bzruuGvo4Aq3GsiQYxiKSlsiwLnueh3W6Hzi22aHd2JpNBqVQKXDTlUnvy+13XhWVZgVm4+Xwe9Xrdf029XvfLomkaqtXqyCD8drs9Ut5lz7ac1PVULpdh27YfSFuWlegVYOapCxdt3rojTTr20+rWoGV3M8ZVd5QVWepy1nEZd6yr1Wrg77EsC5qmBcZAzno+zHUkqnW5TtBqYxBJAbIbL5PJRA6M5PiwQqEAIURgcPl5K5fLgZUohBB49OiR/9i2bdTr9cDNoV6vw3EcuK6LTqeDer0e+JsbjQYqlQpu3Ljhj3Mal5RYVdWlzEKVN7D9/X04jgPDMHD79m2/y6tUKqFWq/lJrg8PD899Ms+89SHOuhDVPHVHmnTsZ9WtWcc6qrjrzqosdTnruIw71rlcDp7n+XXk+Pg4kIh+1vNhryNhxX2siQLiTVN58ZKYrDQpycabzWav0Wj0ut1ur9lsRk7+O/ye80hITBdnkfqwrLpwEcnGKRl4rFcPk40Tk40TgH7rS7PZ9McHaZoGXddhGEboz9A0LfCL1/M8JqldUYvWh2XVhSj1j1YbjzXR6mEQSQD63TjDOdM0TQusQBFFsVjE3t7eMopGMVhmfVikLnBG6eXBY020ehhEEoD+uJ52ux3YJsfvhF3pQLIsa2p+N0q+ZdUH1gUiovXFiTXkM03TnyTgeZ4/g9HzPH9moRysLZP22raNTqeDYrEIVVVh27bf/ek4DhRFYQvDippVH1gXiIguNwaRBKC/MkS1Wg2MW5O5zOSN3zRNf7kuGTg0Gg3ouu7Pxh1coUIIsRbJzC+jWfWBdYGIiBhEEgzDQCaTGZn40Gq1Arn9UqmU3yLpuq4fVAyms0hqGhcKL0x9YF0gIiIGkYRarTYy/g3oBwflctl/PDiurdVqLbSyAiVXmPrAukBERAwiLzk5A3e41clxHAghxk6IkCtQjFvOS46T8zwPqqomcpUSmixqfZhWF4QQ/mzucYm6iYhotXF2No2d7FCpVCaucjC8Fq4MPFzX9XMLFgqFhVZloPhEqQ+T6oL89/Hx8fkUkoiIYscg8pLTNG1kDVbZmji4XJlM1QL0x73JWbuDKV/kbFxJUZSRXIOUbGHqQ5i6APS7vNPp9AWUmoiI4sDubEK73YZhGLhx4waAfvA3PMZNVVXoug7LsrC3twfDMPzgQnZxdjod/zOA/uSLqDkmKX6z6kOYukBEROuPQSRBVdWZXc/Ds3UndXUPG27VouSbVR/mrQtERLReEtWdLbvIprFtG/l8HqZpwrbtQCvISvrZz4Cvf73/769/vf94RQ13XcrJNURERLR+EhNEWpYVavycEAK2baNYLKJYLCKdTq9uF9pv/RZw7Rog1xXe2+s//q3firNUc9M0DYeHh/5j13U5O5uIiGhNJaI7WwgRqdvz6OhobEqRlfJbvwX87d+Of+5v/7b//LvvLvUrf/rTny7184apqord3V1YlgXP8wI5JunysW0bzWYTQgioqrq6P/aIiGisRASRBwcH2NnZ8Ve9WHs/+9nkAFL627/tv+6ll5b2tZ988snSPmsSBgokaZrGlmgiojUWexA5nGcujIODA6RSKXieh06nM3USwNOnT/H06VP/8enpKQDg7OwMZ2dn8xV6UW++GQgOz/7132fDAeObbwLf+MZFloyIiCiUXq+3tPuo/JxpnxfbPZsmij2IlF1dYVPByFmhcsKGaZrI5/MTl12rVCp46623RrY/fPgQ165dm6/Qi/rCF/r/DWl++9ujr33wYGlfexEtkUREdDk8fvwYD5Z4jwL6uWcnefLkyVK/ixa30ev1enF9uWmafgJjIQS2trYQtTjyfd1ud+w4yXEtkTdv3sSHH36I69evL1T+uX39688n06DfAtn89reh/87v4Mrg7OzXX19qS+Sv/MqvjF0TmYiIKKpXX30V3/nOd5byWWdnZ2g2m9B1HVeuXBn7mtPTU/zCL/wCTk5O4rt/U0BsLZGO42B7ezvy+yzLCoy7k4Gj67oj6/0CwNWrV3H16tWR7VeuXJlYUc/d/fvAf//vI5uv/OxnwSDy/n0grjISERFNsbGxsfT76LR7c2z3bJootiDS8zw4juOn9el0OgCAWq02cSanEAL5fB6dTsfvzpbd4CuVj/Cll4AvfWn65JovfWmpk2qAfqvsvXv3Jj7/5MkTfPzxx3j55Zfx//7f/8NHH32Ef/tv/y2ePHni/xvoB+yf/vSn8W/+zb9ZavkoOSbVBfkc68Pl9eMf/xibm5t4+eWXsbGxEXdxEq3X6+Hx48dru682NzfjLgLFrZcQ7Xa7N1ycTqfTq1argW2lUinwuFqt9nK5XOjvOTk56QHonZyczF/YZfnSl3o9oPfspZd67777bu/ZSy/1ekB/+zl49dVXJz7X7XZH9m2j0RjZ1uv1eoqiLL1slBxR6kKvx/pw2fzmb/5m/3r17FncRUm8Z8+ecV+FFGZfJer+Tb1er9dLRLJxy7JQqVQAAIZh+K2Ttm2PLKlWLpdRq9X8/46PjydOqkm8d98Fnjzpj30E+v9/8mTp+SHDsG17JNm7pmkwTTOwrVgsYm9gPCetn7B1AWB9ICK6zGKdWBOH09NTbG5uJmpg7tnZGR48eIC7d++e65iPe/fu4b333ov0no2NDX/SklxekrkgL6fBugCA9eGSevXVV/G1r33t3K9X6+Ciru3rIMy+SuL9+7KLPcUPJYdpmkilUgD6Y1Zl/k7P89BqtaAoCjRNg+M4UBTFH4dq27a/6tDgbPs7d+5wNviKmlYXFEWBbdtj6wPrAhHR5cEgkgAAuq6jWq0GZrgPriCUz+f9fwsh/FRMrusilUpBVVXouu4HDrZt+0EIrZZpdUFVVbiuO7Y+sC4QEV0uiRgTSfEyDAOZTGYkRVKr1YKmaVBVFd1u1/9vcASETK1kWVbg/TLfF62WWXUBwMT6wLpARHS5MIgk1Go17O7ujmx3XXfmeuYysNjf3w+8ttVqjc3bScnGukBERGGxO/uSk7Nwh2/yjuNACBFq0oQQAo7jBNZAH35MyWBZFjzPQ7vdRj6fDxwj1oXLa1q9ICKahEEkjU3UXqlURtIrTeK6buAzHMcJTLrhDSkZHMcBABQKBQghcOvWLXS73cBrWBcunzD1gohoHHZnX3KapsHzvMA2mbpFToyYRVGUwLrl+/v7fmuW67rLKSgtzPM8NJtNAP1jlkql/AACYF24rGbVCyKiSdgSSWi32zAMAzdu3ADQv5FESeCuqiq2t7f9JSt3d3dRqVRgmmbo4IPOn6ZpgZZAz/NGuq5ZFy6fMPWCiGgcBpEEVVVRrVYX+ozh7s6LXkVICIGDgwMA/XXYXdfF3t5eoFVsnFqt5v/7+Ph4ZD8s+nxU8u9oNBp+69Dw98m/SQiBUqk01/dMWmlmHepCVPIYdjodAKPln/SeWcfBMAyk02kAQCqVWigpe9z1gohoHAaRtBYMw4BhGP74u2KxiHw+P/aGK+Xz+UA+Q9M0YRiGH0Qt+nxUjuOg1Wr5ybqHyWBnMP9isVj0gx65DOiwGzduBIIKy7Kg6/pKrDQzPMZy2YaPV7FYhK7rU+vNrOMgk6s/evQIiqLAcRxks1nMuzgY6wURJVaM63bHIokLuIdZeH4ZXn311XP9/DhpmtarVqv+42q12lMUZeLrO51OD0Cv2+3627rdrr9t0ecX0Wg0eplMZmS7oigjnx31FG42m71ms9nr9Xq9drvd63Q6c5fzIhQKhXP77G6329M0LbBP2+12D8DU/TLrOBQKhUBd7PV6/j5fRFLqxW/+5m9eyPVqHVzUtX0dhNlXSbx/X3ZsiaS1MNxydHh4OHUmsJzkMdjdLf/darVGtkV9ftmzkF3XhRBibPd82FnPk1aaucxarZafJB14PjtdCDH29WGOg2ma/pAK13VHxhwuE+sFEcWJs7Np7ViWBSHE1LFd04KFwS7UeZ9ftkmfqSjKxIBn2LSVhy4jRVHQ7XYDk0hkrsxJXeizjoN8XubWVFUVxWLR/9xlY70gojixJZJiNWsVFCmbzc6c3SsnHwghkM/np06qUVUVmqbBtm1/DNjgjX7R5y9KKpUaO06O5iNzYs6akDVMHofBFm4ZnFar1QvPvch6QUQXgUHkJbexsXEh3zOpdSNsEuswFEUJTHLZ2trC0dHRxICg2WzCMAx4nodUKuW3Psn/L/r8IMuyYBiGPwN4WZYZKMRdF8b9oJCTRAZNmvxhWRb29/dnfn+5XB6bwsYwDOzu7s6Vimj4OGxvb/v/lq2Ck7qXz6NuMIAkoovAIPKSW4euKyEEKpUKyuWyHzBqmubfuKfNNh2cmSu7/wYDgEWfl1RVXSj33qTuVdllugxx14VxPygGZxnPksvl5p5ZbFkW0un0zABy1nGY9LyiKBO7nhepGxdRL4iIJmEQSbFaRne267qo1WooFouBXHkApnZLOo4zMh4ul8v571n0+UGZTGahfImqqvqByHBwwKUEFyOHIcj6JVPpjAvCwhwHVVUDk3XkZ477cQEsVjdYL4goTgwiaWksy4LneWi328jn86FuYsvozs5kMiiVSoGbqFxuT5bBdV1YlhXIi5fP51Gv1/3X1Ov1QHkWfX5ek7oiy+UybNv2gx3LshK9Csw89eGiOY4Dx3GQy+X8lsLB/Tqu3sw6DtVqNbDco2VZ0DRt4VVg1qVeENH6YBBJAbILOJPJROoOk2vtFgoFCCEufCJBuVwOrB4jhMCjR4/8x7Zto16vB4KBer0Ox3Hgui46nQ7q9Xrgb170+ahkwLK/vw/HcWAYBm7fvu130ZZKJdRqNX8968PDw6WOKR02b10A4q8PYcik4EIIGIYReE7Wk3H1ZtZxyOVy8DzPr4/Hx8dTk5fPkrR6QUTkiy9FZTySmKw0KcnGm81mr9Fo9Lrdbq/ZbEZK9Dz8elVVe+12e+6yUrwWqQvy/cuoD+eZbJzmw2Tj4THZeHhMNr6amCeSAPRbZZrNpj+mT9M06Lo+0kIziaZpgdYPz/MW7r6jeCxaF4Dl1Yco30lERBeLQSQB6HfbDec5lKtvRFUsFqcm+qZkW2ZdABarD5xhTESUXAwiCUB/HFe73Q5sk/ntwq58AfQH9U/K40erYVl1AWB9ICJaZ5xYQz7TNJFKpQD0ux/lbFrP86Aoij9wXybZtm0bnU4HxWIRqqrCtm2/+9NxHCiKwpakFTWrLgBgfSAiuuQYRBKA/iog1Wo1MG5N5nBUVRWmaWJnZweKovhBQ6PRgK7r/kzcfD7vv1cIEXvyaprPrLoAgPWBiIgYRFJ/8kImkxmZ+NBqtfwWqFQq5bdAua7rBxWDqUuSlsKFogtTFwDWByIiYhBJAGq12sgYOKAfHJTLZQAIjGlrtVoLrb5CyRWmLgCsD0RExCDy0pOzcIdbnhzHgRBiZEKEXNVj3NJ+coycXDIuiSuU0GRR6wIwvT4IIfwZ3YPJuomIaD1wdjaNnexQqVTGrnph23YgOJSBh+u6fm7BQqGAarV6fgWmcxOlLgCT64P89/Hx8fILSUREicAg8pLTNG1kTV7Zoji4Fq+u6wD6Y97krN3BlC9yJq6kKMpIrkFKtjB1QW6bVR+Afpd3Op0+51ITEVFc2J1NaLfbMAwDN27cANAPAAfHuKmqCl3XYVkW9vb2YBiGH1zILs5Op+O/H+hPvIiaU5DiN6suAOHqAxERrT8GkQRVVad2Pw/P1p3UtTlsuFWLkm9WXQDmrw9ERLReEtWdLbvIZqnVajBNE6ZpolarnXOpztmzZ8Bf/EX/33/xF/3HK2i421JOriEiIqL1lJgg0rKsUGPoZNBYKBRQKBSQyWT8HHUrp1QCrl0DZOqUcrn/eAVnsmqahsPDQ/+x67qcnU1ERLTGEtGdLYQI3fVZqVRwdHTkP9Y0Dbqur16XWqkE/MmfjG7/+OPn21eolVVVVezu7sKyLHieF8gpSJeTbdtoNpsQQkBVVY6XJCJaM4kIIg8ODrCzszOzRdF1XQghxuakG041kmjPngF/+qfTX/Onfwr81/8K/PzPX0yZloBBAg3SNG11zkkiIoos9iAySvAnExsPUxRl4kzgp0+f4unTp/7j09NTAMDZ2RnOzs6iFXZZ/vIvA8Hh2UsvBf4feN3v/d7SvpZrFxPRouR1JLbr5wqR+4j7arYw+4r7MXliDyJlV9ci6WBSqdTE7vBKpYK33nprZPvDhw9x7dq1ub9zIZ/7HPDXfz2yufntb4++9sGDpX3t48ePl/ZZRHQ5PXnyBEBwnXSajvsqvGn7StY9So5Yg0jTNANJjOc1bTxluVzGG2+84T8+PT3FzZs38corr+D69esLf/dc/uIvnk+mQb8Fsvntb0P/nd/BlZ/97PnrKpWltkR+61vfWtpnEdHlJH9867qOK1euxFyaZDs7O0Oz2eS+CiHMvpI9iZQcsQWRjuNge3s70nsmpYyRrZnjXL16FVevXh3ZfuXKlfhO6t/9XeAP/7A/iWawTD/72fMg8sUX+69bYhk3NjaW9llEdDnJ60is19AVw30V3rR9xX2YPLEFkZ7nwXEcP61Pp9MB0E/hM2kmp6qqUBQFruuOBI0rNYD/538eeOON8bOzpTfeWPqkms3NTdy7d2+pn3nRer0eHj9+jJdffplB8QzcV+FxX4UXWw8OESVObEHk8MxNx3FgmiZKAzkSXdeFZVmBbeVyGbZtB9Z1XkaX+IWT6XuGZ2m/+GI/gDyH9D5vv/320j/zop2dneHBgwe4e/cuf5XOwH0VHvdVeHJfERElItm4ZVmoVCoAAMMw/NZJ27ZH8j+WSiUIIWBZFizLwuHh4erliJRqNeDJk/7YR6D//ydPVio/JBEREV1Osc/OBvr5Bcd1X8tVaYYNtkyufG7Cn//5/uSZBw/6/2crCBEREa2ARLREEhEREdFqYRBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRRY5iDw9PcX3vvc9nJ6ejn3+e9/73sKFIiIiIqJkixRE7u7uYmtrC5qmYWtrC3/8x38ceP7k5AS6ri+1gERERESUPKGDyDfffBPtdhsPHz5Et9vFd7/7XbRarZFAstfrLb2QRERERJQsoYPId955B6Zp4s6dO9jc3ISmaXj48CHef/99lMtl/3UbGxvnUlAiIiIiSo7QQeTx8TG2t7dHth8cHKDT6eB//I//sdSCEREREVFyhQ4iNU3DwcHB2OcODg7w3e9+F3t7e0srGBERERElV+ggcm9vDw8fPsQXv/hFfPDBByPPHxwc4P/8n/+zzLIRERERUUL9XNgXbm5u4uDgAEdHR/jc5z439jWNRgNHR0fLKhsRERERJVTkPJG3bt1a6HkiIiIiWn1csYaIiIiIImMQSURERESRMYgkIiIiosgYRBIRERFRZAwiiYiIiCgyBpFEREREFNncQeQPf/hDvPnmm/jiF7/ob/vGN76BH/7wh8soFxEREREl2FxB5N7eHu7cuYN0Oo1Wq+Vvv3XrFgzDCP05QgiYpgnTNGEYBvL5PIQQU99j2zby+TxM04Rt2zAMA5ZlzfNnEBEREdGc5goia7Ua2u02Xn/99cD2r3zlK4GgchbDMKBpGgqFAqrVKlKpFPL5/NT3CCFg2zaKxSKKxSLS6TRyudw8fwYRERERzWmuIPL4+Bg3btwY2X50dIRerxf6c1zXDbQiDrdsTiK/p9PpoFAohP4+IiIiIlqOjV6UqO9fFYtF/PM//zMODg5w69YtHB8f4/T0FLlcDplMBvfv35+rMLIVstFoTHyNZVnQNA2Kosz1Haenp9jc3MTJyQmuX78+12cs29nZGR48eIC7d+/iypUrsZXjq1/9Kk5OTmL7/jB6vR4eP36Ml19+GRsbG3EXJ9G4r8LjvgqP+yo87qvwwuyrTz75BP/0T/+EX/qlX8ILL1yeecGbm5t4++234y7GWD83z5vq9Try+bwfyN2+fRuO46BQKMwdQFqWBSHE1ABSOjg4QCqVgud56HQ6qFarE1/79OlTPH361H98enoKoB+4nZ2dzVXWZZPliLs8Qgj83d/9XaxlICIioufu3bsXdxEmmiuIBPqthUdHR3AcBwCQyWRw69atyJ8jhMDBwQGEEIHAdJJMJgMAUFUVAGCaJvL5/MTgs1Kp4K233hrZ/vDhQ1y7di1yec9Ts9mM9fsfP34c6/cTERHR6pirO/uFF17Azs4Odnd38eUvf3lphZGztI+OjkJ3VwshsLW1hW63O/Y941oib968iQ8//DBR3dnNZhO6rsfanf3aa6+xJZKIiChB7t27h/feey/uYow116CCVqsFRVHwn//zf8aLL76I3d1dfO9734v0GUIIGIYRSOmjaZo/+3qS4XQ+MnB0XXfs669evYrr168H/gOAK1euJOq/JJQp6pgdIQQsy5q474noYvBcJKI4zBVEZjIZfPOb34TneTg8PMTnPvc5FAoFvPjii/i93/u9UJ/hui5qtRo8z/O3yYByUiuk7PIevFDK98jubboYtm3Dtm1omgbXdVEsFuMuEtGlxHORiOKy8PSmTCaDarWKer2OO3fuoF6vh35fqVQKBH/7+/vIZDLQNA3A80BTUhRl5D2maSKXy809W5uiE0Kg2Wz6+13TNOi6HinRPBEtjuciEcVpoSDyO9/5DnZ3d/Hiiy9iZ2cH2Ww2UrLxcrmMWq3m/yeEwKNHj/znbdseCUqH33N8fBxqRjctj2z5GKRpGkzTjKlERJcTz0UiitNcE2t2dnbwzjvvYHNzEzs7OygWi/gP/+E/nEf5lo55IidbdPDuxsbGxAlORHRxeC4SrY8kT6yZK8VPKpXCw4cPcefOnWWXh1aEaZpIpVIAAM/z/CEInueh1WpBCAHP8/wVhYQQuHPnDtrtdmxlJlpH085FRVFg2zbPRyI6F3MFkd/85jeXXQ5aIbquo1qt+jk7AQQG86dSKaiqCl3X/ZuWbdv+jY6IlmPauaiqKlzX5flIROcmVBD5u7/7u8jn8/jCF74AoD8ucZpKpbJ4ySiRDMNAJpMJ3LSAftonOTtU0zTUarXAa2QeTCJajlnnIgCej0R0rkIFkYeHh4ELzrQuEK4Put5qtdrY4++6Lsrlsn/z2t/fDyxH2Wq1/LXRiWhxs85FADwfiehczTWxZpVxYs1kswbv2rYNXdcxXGUcx0E2m/W3y1WEBl+3sbEx8j4imk/YcxHg+Ui06pI8sWauFD+np6djt3/wwQf44IMPFikPJdy4pO6VSiWQisl13cDrHMfxH09bjYjoMhNC+KnLwghzLgI8H1eNZVkwTRPFYpHHZ82tw7Gea2LN1tYWPv7445HtnU4HtVoN3/3udxcuGCWPpmmBFYaA58tQygH7QD8p/GBqEZlEHpi8PCXRZWfbNo6Pj3Hjxo2Zrw17LgI8H1eJ4zgA+sdQCIFbt26h2+3GXCo6D+tyrOcKIid1g2xvb0dKNk6rp91uwzAM/0anKMpIsndVVbG9vY1arQZVVbG7u4tKpQLTNEducETUl8vl4Hmev5TrLGHORYDn4yrxPC+wAlEqlYLjOCOTp2j1rcuxjjQm8vOf/zw2NjZGukck13WRyWRweHi41EIuE8dETpbkcRezTKqTRKvENE0IIVAqleIuCiXA1tbWSrZOUXTTjnWS782RWiLr9Tp6vR5eeeUV3L9/f+R5VVVXZuUaWi/pdPpSThQQQuDg4ACNRgPNZjPUe2q1mt+9eREBiywj0B/y4rou9vb2Zq6mMu1vy+fz2N3dhaqqI58jf0zMs2/mLQ9w8ft1Fc1bX4F+3QEwMuZz1n4fHGN6fHwcmKU+j4uqB8ViEXt7e4sUNTZyyWJd16GqKprNJm7fvo1cLjfxPWGvE4ZhIJ1OA+jnJB78TB7rixcpiJQr1ORyOXzlK185lwIRRTU4UeAycRwnsDpQGPIiO5h0ulgsjtyYl8kwDBiG4R+jYrGIfD4/NYiY9bc5juOPARyUy+XQaDTm2jfTzPq8Wfu1Vqvh+Ph45H03btxY6WAzSg/APMfEMIxAIFAsFqHrul93Zu33fD4fSLJumubIZ0ZxUfXAsizouj416FrEeffcCCFg2zYsy4KqqjAMY+bfMus6IVdZevToERRFGclEwGMdk94cbNvuvfPOOyPb33zzzd4PfvCDeT7ywpycnPQA9E5OTuIuiu/Zs2e9d999t/fs2bNYy/Hqq6/G+v3zqlarvUKhEHcxYtNoNHqZTCbUaxVF6XW73cC2OS8DoWma1qtWq/7jarXaUxQl1Hsn/W2DnyfV6/XQ75/XpM9b1n6t1+tj/7akmue8C3tMut1uT9O0wH5tt9s9AL1Op9Pr9abv906n0wMQeL7b7Y5sm8d51oNms9lrNpu9Xq//98q/dZnO+3rZaDQi7+NZ14lCoTBybsj9tO7HOsn35rkm1rz55ptjo/vt7W0YhsHZ2XQhbNuG4zioVCrY2dlBrVZDLpeDqqowTROdTge7u7v+DNTDw0MUi8XAL/DB7ggg+KtyeL1h27bRaDRQr9f92XRHR0eB98vJC0C/W1UOkrYsy/9+ueRcs9kc2wJomiaq1arffbcsrutCCDG2G9m2bT8x9bINtzgeHh4u/F3Dv9pt28b29vZCnzmvZe1X27bRbDYhhICqqqvbMrFErVbLH2sPBIcqzNrv0uDz8t+Dq/osyzLqgeu6gSTwQojAMJ11Xgd91nVCXtNd1/VXYhpcmQlYr2O9KuYKItvt9tgLtqZp2NnZWbhQRGHIi4jsspAntOu62N7eRiqVQj6fR7vd9tOcGIbhz2DVdR31et2/MWWzWWxvb/sz5YbXG240Gv5YHEVRoKoqPM/zvzebzaLRaPifJ5elkzPu5EXj0aNHcF13Yl4wTdNCz9CNYlI6F0VRzuX7xrEsC0KIsbOIoxj8ITB4U4nDsvbr4E2R+vtveKKBPGdUVZ2YCUTudxl4jrvZn0dqo2XUA1VVJ06uWLV10A8ODpBKpeB5HjqdTqRu5eHrhNy3cuiSqqp+d7emaYEfF+twrFfJXEGkqqo4OjrCL//yLwe2e56HW7duLaVgRGHIi8rghUMGFPv7+376BKBfb+VNSAZ4g8HI9vY2bNtGJpOBpmkwTTNwUx9eKk5O7AD643e2t7f9x67r+qlXPM9DJpPxW0YVRfEfj6Oq6oWOk5MX+nGKxWKoz8hms1PTxcgB6kII5PP5mZNqoqhWq+c6pnNe0/YrzUcmU59Wf+R+V1UVmqbBtm2/VTeOhM7LqgertA76cMuxaZrI5/MzfzxOuk4MtjTKz65Wq35uxXU71qtkriCyUCjga1/7GizLwr//9/8eQH+1mp2dHa7HumYuai30cc34lmXBMIyp3bqtVmskr5YM/BzHgWEY/vbBCTjj1h12XRfpdNp/f71eD/x6dhwnEFQO3shM00SpVIJlWX6uPxkIyvfIgddJM+2it6zgTFGUwID3ra2tkaEA85AJexcVpq5FteybSZznIjD+B8W4On1ekwQMw8Du7u7M3JaD+73ZbMIwDHie57fiAeNX+wGSXQ/mXQd90eNmWRb29/dnlq9cLo8Ej9LOzg6KxeLELmBp0nVCGuwBla1+svt4nY71KpkriCyVSuh0Orh16xa2trYA9H9BvP7666hUKkstIMUrzjEag2MKJ5n2K7zVagUuOs1mE9vb23AcJ/CLVrJte2LQOC4JrOxCkr+Sp3XXyDFccSaSnXQxlWPwzoMQApVKBeVy2b95yO76wVaDedXrdX+IwSLC1LVp7x1n2fs17vFS435QnPfMfsmyLKTT6UAAGXa/D56Xsqtx0vjZpNcDIcTIj9nhx8MWPW65XC7yeWpZVuA9gy2K4/bvrOvEpGOiKEqga3mdjvWqmCuIBPoVs1ar+U3GmUyGXdm0VJlMZmb3h+M4KJfLAIIXLjl+aPBX78HBAdrttv/cIMuykMlk/IvKcNqgVqs1Mg5vcBzOrAvHeQzujkp2+49L7zGpbIt2Z7uui1qthmKxGMinBmDhVkigH/hns9mFPydMXZtknv1K4cl7zOBEEtldPWu/D//4kz9cJtW9pNeDWeugJ6G+ya7oTqcTuEYCk4OvWdcJOQ5yOAgVQvhB4rod61Xxwrxv/OEPf4j79+/DNE185Stfwa1bt/CNb3wDP/zhD5dYPEoaIQQsy0rMmruTftkOj4ep1WooFAr+2Jnhbod6vR64oAxfGIa7vgcvWJlMZuTzhBAwTdN/3Gw2Q//qlRfUsCZ1oYz7nHK5HNg3lmVN7R6s1+uh/pv0GZlMBqVSKbA/5drNgzMrJ/29s7qHZuW7W3b30qTPi7pflyFp52JYUeqr4ziBiWmu68I0Tf9H4Kz9ns/nA88PD1FZ9t9w3vVAThCUkrgOuqIoI+e8aZqBgG74WIe5TlSr1UC3umVZ0DTN//vX7VivikjLHkp7e3t48803cf/+fbz55pt+Qs133nkHpmkmOsUPlz2cbNbSSjK9hKZpaLVafrqbOA2m1BnsPikWi0in0/5zgykxgOfpgeSv20KhMPKLVV7k5K9g2XWqKEpgRiDQv9nJbhc57mfwF2k+nw+MGZom7Fgd13X98UqO46BUKgVWhZiUKmhwnx0eHi7lQjvNcEAtZ2rK/T2unLP+NimdTqPRaIzs17DvDyvM513kfk3KuRilWzRqfZVptMbNdh28bU3b7/I8VxQFnU5nJMVXVEmoB4PXNlVVUalUArO1w37GedaX4XN+ePWYcef8rOuEfJ+sD8OfuY7HWkrysodzZRn+/Oc/3zs6Our1er3e1tZW4LlUKjVvzsoLwWTjk01LaNrtdnulUimwrdFojGxLClVVzyVJL1HcknQuXuYk/6uMx221JDnZ+Fzd2cfHx376ksEZg0dHR7EP/qbzYdv2SBexTIOTROe9rBdRXJJ0Lg5mP6DVweNGyzJXEJnP55HP53F6eupvOz09RbFYvJRjAi6DXC43Mi5Qpli4qETVYU2bzUe06pJ0LvKH2mricaNlmWt2dr1eDyQCvX37NhzHQaFQwP3795dZPkqQwQHtnuf5Y/7kqi2WZQF4npvNtu2ljE2JwnEcVKtVf3wNf9TQOlqFc5GI1t/cKX4ajQZc18UPfvADAEzxs+50XUe1Wg208Mn0L3Kt6p2dHSiK4t+4Go0GdF0fSZdznjKZzMgarETrZFXORSJaf3MHkcDzWau03uQSgcNdxIO5DwdzMrqu69/UBgM62ToyuCQZEYXHc5GIEiXU7JsXXujt7e35jzc2NnovvPDCzP+2t7d7P/jBD85pTtB8ODt7skkzwAD02u32yHZFUXqNRmPs9m63G9jW6XQCMwI1TVussESXEM9FossnybOzQ7VE3r9/f2T5uDAODg6Qz+fxT//0T3OEt5QEgysSDXIcB0KIkbx7MuHtcM5F27YD2xRFScwKC0SrgOciESVNqCDyj/7ojwKP79y5E+rDt7e3/bW1aXWNG7JQqVTGJqsdvhnJx51Ox08LBfS73JI2q5so6XguElGSzL3s4QcffIByuYzd3V3s7u7iv/23/xZI+QP0x+nMu0IEJcO4JQLleCo589myLOi6DqDfSi1njc5KObLsJemI1hnPRSJKmrkm1rzzzjvI5/OBAdl/+Zd/iVKpBMdx8Mu//MsA+i2WYVstKbna7TYMw/BbLxRFGVlnWtd1WJaFvb09GIbh39zkj4h0Oh24ickB/UQUHs9FIkqSudbO/vznP49cLjeSE7JYLMJxHBweHi6tgMvGtbMnO8/1OV3X9VONAEA2mx1JmExE54/nItFqSfLa2XO1RHqehz/+4z8e2V6tVv3ukzCEEDg4OADQX2zddV3s7e2NDAQfVqvV/NcIIVAqlUJ/58r5+GPgH/8R+OlPgV/8ReDXfg148cW4SxWZqqrY3d2FZVnwPA/lcjnuIhFdSjwXiWhZ5goid3Z2cHR05HdbSx988EGkMZCGYcAwDL8rpVgsIp/PT539XavVADwfA2TbNorF4tiB5SvvO98Bfv/3gX/5l+fbPvtZ4M//HHjttfjKNSeOjyVKBp6LRLQMoYLIcb9Uv/CFL4wsKSdXSgjLdV1YluW3JKbTab9lcpJKpYKjoyP/saZp0HV9/YLI73wHyOWA4dEGP/5xf7tlrWQgSUREROshVBA5brzMuHE02Ww20pcPtzgeHh5OzVXmui6EEGO7u9cqz9nHH/dbIMcNV+31gI0N4A/+APjSl1aya5uIiIjC+fGPf4x79+7FXQxsbm7i7bffDmwLFUQ+fPjwXAo0yLIsCCECMw2HyeS5wxRFmZi+4unTp3j69Kn/WKYhOjs7w9nZ2fwFXiJZDr883/8+cHwMvPTS5Dd9+CHwD/8A/OqvLq0cc8yxIiIionP0mc98JhETa8YFsgutnb0McnKNEAL5fH7mpJpxUqnUxDxnlUoFb7311sj2hw8f4tq1a5G/6zwFWmb/+q9nv+H0FHjwYGnf//jx46V9FhEREa23SEHk6ekpKpUKbNuG4zgAnuclu3///lwpcxRF8cdWmqaJra0tHB0dRQompyXKLZfLeOONNwJ/w82bN/HKK68kKsVPs9mEruv9FD/f/z7wG78x+41///dLbYk8ODjAq6++urTPW7af/OQn+MxnPhN3MYiIiC7M5uZm3EWYKHQQ+b3vfQ+5XA6pVAq5XA6FQgFCCHQ6Hfyv//W/UK/XYds2/tN/+k+hPk8IgUqlgnK57AeMmqZBCAHbtsfOHpyUEFcIMfG5q1ev4urVqyPbr1y5EmtOxnH8Mv36rwM3bvQn0YzrYt7Y6M/S/vVfX+qYyL/6q79a2medh6i5smRdymQyTKZMRES0ZKGWPTw6OkIul0O1WsX777+P+/fv4/XXX8cf/dEf4Zvf/CY8z8PXv/51aJqGf/7nfw71xa7rolarBVoR5bjGSa2QqqpCUZSxYyPXZlIN0A8M//zP+//e2Ag+Jx//2Z9xUs0Utm37k61c10WxWIy7SERERGslVBD55ptvolAo4PXXX5/4mmq1iq997WuhE39nMhmUSqVAC9H+/j4ymYwfEMpAc1C5XIZt2/5jy7JGUg2thdde66fxGe6+/exnmd5nBiEEms0mcrkcFEXx00AZhhF30YiIiNZGqO5s27ZDLYtlGAZu374d+svL5XIgSBRC4NGjR4HvrdfrgcC0VCqhVqv568EeHh6uX45I6bXX+ml81mDFmoskWyEHaZqG119/HdVqNaZSERERrZdQa2enUik4joPPfe5zU193dHSE7e1tHB8fL6t8S8e1s1fXouuHbmxsoNvtzpUBgIiI6DIbdw8O1RKpaRreeecd/OEf/uHU15mmiTt37sxfQqIlMU3TX8fd8zx/iITneVAUBbZtQwgBz/P84RBCCNy5cydUqzsREdFlFyqIvH//Pra3t6GqKr785S+Pfc2f/MmfoFarodPpLLWARFHpuo5qtYpMJuNvkxNrVFWF67pIpVJ+eqrBddhl4ElERETThQoiVVXFwcEBXnnlFWSzWWiahtu3b8PzPHQ6HViWBdd1cXBwMLPLm+g8GYaBTCYTCCABoNVqBSZsaZqGWq0WeJ3M1UlERESzhc4TqWkaPM+DYRhoNBr+BAVVVaFpGlqtVqITYtLlUKvVxnZHu66LcrkM4Hk6qP39/cBEm1arhXw+fzEFJSIiWnGRVqxRFGV9Z0LTypMzsodbIR3HgRAikMBeCAHHcQL5RYcfExER0WSxr51NtEzjVqapVCojP35c1w281nEc/7FMUk7rQQgB0zQBIHQeWyKKzrIseJ6HdruNfD7P6+iKWOQaySCS1oYccjFI5hMdTkivKEog1Y9MdA9g7IpItLps28bx8TFu3LgRd1GI1pbjOADgL4l869YtdLvdmEtFYSxyjWQQSWul3W7DMAz/ZFAUBY1GY+R1qqpie3sbtVoNqqpid3cXlUoFpmmu5wpIl1gul4Pnef6yqkS0fJ7nBVYKk/mlh4cXUfIsco1kEElrRVXV0KvSDHdxjws2ieY1PGSCaJ1pmhbovvY8jwHkJcAgkmhFyWVBdV2HqqpoNpu4fft2YALRLLquo9lsBrYNLkV6fHwcCMrz+Tx2d3ehqurIyj/zBkxCCBwcHKDRaIyURZZHfpcQYmXGNabTaYRYEGztzDqe4yT9GM97rs3aF2HqPgA///KiE1sv6lwrFovY29tbpKhLMe/1yjRNCCGgKAo6nQ7K5XLg/dOukYPPr9pxmweDSKIVJYSAbduwLAuqqsIwjEgBpGVZI2uM5/P5QAJ20zRhGIZ/kXQcxx9nOiiXy83Vkus4Dlqtlr960DB5MR5MCF8sFv2Lcq1WG7vM6o0bN2INRAYnal0ms47nOLOO8SwX0eI7z7k2a1/Men7wvAP6gdm4H31hXdS5ZlkWdF2PdC2KIsrxnud6VavVUCgUAkHZ66+/7r9+1jVyVY/b3HqXzMnJSQ9A7+TkJO6i+J49e9Z79913e8+ePYu7KIn26quvxl2ERGk0Gr1utzvXe7vdbq9er/cGLwGdTqcHIPCZ3W43sK1arY58Vr1en6sMgxqNRi+TyYxsVxRl5G+c57JVr9fHlv28VKvVXqFQuLDvS5pJx3OcRY/xReznRc61Wfti3PPdbrenaVrgO9vtdg9Ar9PpzFWOWeVZxrnWbDZ7zWaz1+v1y7toWceJcrznuV5pmjZx26xr5Koet14v3DVy3D34hcXDUCJaNQcHB9jZ2Qlsk7PSB7tt5L9brRYAjLQu2LaN7e3tcymj67p+l9Kw4RbUaWzbRrPZRLPZHGmVkK1LMr0F0G95yGazc5XZtm3UajVUKhUA/VYAuV9li4VsHbEsC4ZhjGQDqNVqME3T/29aWWWrgyz31tbWyOD4Wq3mf5+cQQsg8P2macKyLP+zxjFNE+l0eq79MsmyjvE6arVagbohW9/OY4LYMo6D67rI5/PI5/PY2tpCNpsNtBgu+1wLY57rlaIo0HXd38+DLZ9hrpGrdtzkayddI2dhdzbRCjs4OEAqlfKXIA0zqWhSHszBi93wRUleFAdvCq7r+ktInodJqZYURYl0QR4e8D/4+cteQ11+l+zSkvvRdV1sb28jlUohn8+j3W77aabkKmBAf4xqvV7393M2m8X29rY/23W4rI1Gww/sFEWBqqrwPM//3mw2i0aj4X+eXBZUzpqVN/5Hjx7Bdd2pNx5N05Z+I1zWMb4I85xr81IUZSQ9jjw259F1v4zjoKrqxJQ+53GuhS3TYBnCXK/29vaQzWaxtbWFUqmEdDrtdw3PukZqmrZyxw2YfI0Mg0Ek0YKmtd4MymazS00fJGc+youTaZrI5/MzxyYKIaCq6shFRi5hatu2/wt+WlBRrVZjWcFK3sgXdV5rqMvxkIM3Gfld+/v7fgoUoL/P5T6WAd7gzWZ7exu2bSOTyUDTNJimGbjYDy/VKScRAP16ub29HWhFkamv5MzZTqeD3d1dKIriP55EVdULG2e6rGO8LPOea8skF00Y1+p0XpJ+rkUR9nolf9g1m03UajVomoadnR3/R1qUaySw2sctDAaRtNI2NjYu5Ht6U2bZLiOQsiwL+/v7M19XLpdHbmjSzs4OisXixO4NADPzYDabTRiGAc/z/JaDcd812C06iewunRaYzGNZF8d511Cf9Xe1Wq2R1CbyuxzHgWEY/vbBCTjj1n13XRfpdNp/f71eD5R1eKnOweNumiZKpZK/isjgrE35nsHu8CSZdIzHlXXc3zBtYsdFnWvLZBgGdnd3J56763quLeN4A+GuV5JhGNB1HY1Gw2+pz2az/r4Ne42UnzXtuAHnc+wu9AdY5NGXK44Ta1YXJ9YENRqNkW0Aeu12e+zr2+124Dk5IHya4Yk1UqFQmDkIu91u93K53NTXSOMGjctB7MMA+IP3FzVuH8zaJ7P+rlwuN3Hw/vAg+EKh0CsUCr12u91TFGXk9cPHc7Bs7XZ7ZJ/JOjFp3w0Kc/znFXZizTKO8UVNrBk27Vwbfm/UiTXDz8+aDLKu59o48xzvMNerXq+/H8Z9fiaTGVsHer3J18gwx63XC3/s4jpug8bdg9kSSbSgOLqzhRDI5/PodDojA7cnjb3xPA+O4/jdL/KXr1y1J5fLjawwIbtthltbbNueOSA+k8ks1N0nu4THpfRY1jjMedZQn/V3OY6DcrkMoN/KIFtI5LiwwX15cHCAdrvtPzfIsixkMhn/eAynDWq1WiNjvgbrwqwxWK1WK/a1jS/iGC9qnnNtWeS5Kq8bMs3L8Peu67m2LGGuV7KM41qWB6/xYa6RYY8bsNixS8L5w9nZtFaEELAs60LXv67X66H+W+Z4SEVRUCqVAhcO0zQDFzPXdQNJcTVNQ6lU8v+TF8ZSqeQHOvl8PjDGZ7j7VFp2br5J3S/lcjlQHsuylr4fl72Guuu6Y1fqGB47JfPRyXFWw/ugXq8Hbi7D+3u463vw5pbJZEY+TwgRmBnbbDYjrSgyXJ+mmXQ8x33GeR/jRc1zrg2a1bU46XnHcQIToOQs+kUnoqzTuRbWpOvVuGuk4zgj48Xb7Xboa+S6HbdZNv616fPSOD09xebmJk5OTnD9+vW4iwMAODs7w4MHD3D37l1cuXIl7uIk1r179/Dee+9NfN62bQghoGkaWq0WGo1GLBM/LspwUDC8coJpmqhWq2PH2shxYZZloVQqQdd1f8C44zj+Sg3FYnHsxTedTqPRaCy8rJnrun5ZHMdBqVQaWQlEtpQCwOHh4dJnxRaLRaTTaaiqClVVUalUAjNIoxos7+DfMfg9QP+mMPgdct+rqgrXdQMJjwc/G4Bf1nq9jnQ6DUVRoGnaSEuPnJQjx+4Ntk7k8/nAuL9ZwozdmnU8J9XJRY5xlMTk85rnXJu1L6Y9L4TArVu3xs6wnfeWvS7n2jzHe9L1atxxE0KgUqngxo0b/iznwXNx2jVyXY+bNO4ezCAyARhEhjMtiJQn/uDJY1nWuZ5QRFGk02k0m81LuZLNebqIIJKSg8c7PuPuwezOprVg2/ZId6FMiUKUBBexPN9lNDjbndYfj3eyMIiktZDL5UbGiMmuiKQlLabLR3Yr0/IxML9ceLyThbOzaW0MDl72PM8f/yVX8JDLOck8X7ZtTx33R7QMjuOgWq364+qSNGmEiGgRDCJpLei6jmq1GmjtkbOPVVWFaZr+qgMyiGw0GtB1fSR1CtEyZTIZNJvNuItBRLR0DCJp5cnl4oa7Cwfz4A3m53Nd1w8wB2/usqVS5vNKSp46IiKiJGIQSStv3HJxQD9YlEmfB9MhyPQ/w69tNpv+rD+Z8oaIiIjG48QaWmlyRvZwK6RMGDu8nqpMajtuBZbBbYqijMz2JiIioucYRNLKGzeesVKpjM0lNry81uASgDdu3PC3p1IpzuomIiKagkEkrbRxy8XJsY1yFqxlWdB1HUB/DKScwT0r/c+s5cqIiIguM46JpJXXbrdhGIbfkqgoysiaw7quw7Is7O3twTAMP9CU3d3pdDoQUMrJNURERDQeg0haeaqqTl3acHjm9rhubk3TAishuK7LiTVERERTxB5E1mo1APAXQJ+1JqZt26jX69B1HaqqotlsjixGThfo44+Bf/xH4Kc/BX7xF4Ff+zXgxRfjLlVkqqpid3cXlmXB8zx/VjcRERGNF2sQaRhGoAWpWCxC1/WpiXmFELBtG5ZlQVVVGIbBADIu3/kO8Pu/D/zLvzzf9tnPAn/+58Brr8VXrjmxHhEREYUXWxAphPDTsMjUKsViEdlsFq7rTh2PdnR0NJKihS7Yd74D5HJArxfc/uMf97db1koGkkRERBROrLOzW62Wn7cPeJ6qhalVEu7jj/stkMMBJPB82x/8Qf91REREtJZia4lUFAXdbjewTebsmzUr9uDgAKlUCp7nodPpTJ1U8fTpUzx9+tR/fHp6CgA4OzvD2dnZvMVfKlmOpJRnpu9/Hzg+Bl56afJrPvwQ+Id/AH71V5f2tb1xQSsRERHFIvaJNYNkguhpXdVylq0MNE3TRD6fH1nGbvAz33rrrZHtDx8+xLVr1xYv9BJNGwuaOH/917Nfc3oKPHiwtK98/Pjx0j6LiIiIFrPRS0jzjszzVyqVIr1PCIGtrS10u92xwee4lsibN2/iww8/xPXr1xct9lKcnZ2h2WxC13VcuXIl7uLM9v3vA7/xG7Nf9/d/v9SWyN/+7d/GyckJnjx5gmvXrmFjY2Npn72Oer0e91VI3FfhcV+Fx30VXph99cknn+D999/HL/3SL+GFF7hWykXb3NzE22+/HdiWiCBSplWRK4zMeu3wLNqNjQ202+2R9ZPHOT09xebmJk5OThIVRD548AB3795djSDy44+Bz32uP4lmXPXZ2OjP0j46Wnq6n5XbVzHivgqP+yo87qvwuK/CC7Ovknj/vuxiD+XlOEgZQAohApNtBgkhkM/nA8/LSThcXeQCvfhiP40P0A8YB8nHf/ZnK5kvkoiIiMKJNYh0HAeO4yCTycB1XbiuC9M0/bWNXdf1k5ED/ck4pVIpEDCapolcLseUPxfttdf6aXw+85ng9s9+lul9iIiILoFY80TeuXMHQojAcnMA/HGRcnWawXGS5XI5EFgeHx9PnFRD5+y114AvfWktVqwhIiKiaBKV4mdYoVAYGScpWyMpIV58EfiP/zHuUhAREdEFi31MJBERERGtHgaRRERERBQZg0giIiIiioxBJBERERFFxiCSiIiIiCJjEElEREREkTGIJCIiIqLIGEQSERERUWQMIomIiIgoMgaRRERERBQZg0giIiIiioxBJBERERFFxiCSiIiIiCJjEElEREREkTGIJCIiIqLIGEQSERERUWQMIomIiIgoMgaRRERERBQZg0giIiIiioxBJBERERFFxiCSiIiIiCJjEElEREREkTGIJCIiIqLIGEQSERERUWQMIomIiIgoMgaRRERERBQZg0giIiIiioxBJBERERFFxiCSiIiIiCJjEElEREREkTGIJCIiIqLIGEQSERERUWQ/F3cBarUaAKDT6QAA6vV6qPcoigIAEEKgVCqdW/mIiIiIaFSsQaRhGKhWq/7jYrEIXdfRbDYnvkcGnYVCAQBg2zaKxWKo4JOIiIiIliO27mwhBBzHgRDC31YsFmHbNlzXnfi+SqXiB5AAoGkaTNM8z6ISERER0ZBYWyJbrRZc10UmkwEAqKoKAIHAcpDruhBC+F3Zg2zbhqZp51VUIlrAV7/6VZycnMRdjIl6vR4eP36Mb33rW9jY2Ii7OInGfRUe99V0m5ubePvtt+MuBi0gtiBSURR0u93ANtu2ATwPJodNaqFUFGVi4Pn06VM8ffrUf3x6egoAODs7w9nZWdRinwtZjqSUJ8m4r8JL0r4SQuDv/u7v4i4GESXIq6++OnKdmna9SsK1jIJin1gzqFKpoF6vj21pnCaVSsHzvImf+dZbb41sf/jwIa5duzZPMc/NtLGgFMR9FV4S9tXjx4/jLgIRJczjx4/x4MGDwLZp16snT56cd5EoosQEkYZhYHd3NzDeMaxJASQAlMtlvPHGG/7j09NT3Lx5E6+88gquX78+V1mX7ezsDM1mE7qu48qVK3EXJ9G4r8JL0r761re+Fev3E1HyvPzyy7h79y6AcNcr2ZNIyZGIINKyLKTT6ZkB5KRubiHExOeuXr2Kq1evjmy/cuVK7DfWYUksU1JxX4WXhH01z3gwIQRs20Ymk5l4fhPR6trY2Bi5Nk27XsV9HaNRsScbl+MgZQAphJg49lFVVSiKMvZ5TqohWh+2bfuT5VzXRbFYjLtIREQ0JNYg0nEcOI6DTCYD13Xhui5M00QqlQLQn0gj80JK5XLZDzyBfivmPF3gRJRMQgg0m03kcjkoigJN06DrOgzDiLtoREQ0INY8kXfu3IFhGEin0/5/hmH4E2ts2x5JIl4qlSCEgGVZsCwLh4eHTDROtEZkK+Qg5oMlIkqeRKX4GVYoFMa2Mg4uc5jL5ZZeNiKKTy6XGzmvZRqvSXliiYjo4iViYg0R0aDBYS2e5/ljnj3PQ6vVghACnucFxlLfuXMH7XY7tjITEV02DCKJKFF0XUe1WvVXsgIQmFiTSqWgqip0XfeDSNu2/aCTiIguRuyzs4mIJMMwkMlkAgEk0F8iVc7UzmQysCwr8BqZX46IiC4Og0giSoxarYbd3d2R7TLNj+zW3t/fD7ROtlqtkcCTiIjOF4NIIkoEOSN7OBh0HAdCCH+yjRACjuMEcsMOPyYiovPHIJKIEmPcyjSVSiWQxst13cDrHMfxHw+nBqLVJoRArVYbyRdMycbjdnkwiCSiRNA0DZ7nBbZZlgUAgVRfiqIE0vzs7+/7rZeTVrui1WTbNo6Pj+MuBkXE43Z5cHY2ESVGu92GYRi4ceMGgH7A2Gg0Aq9RVRXb29uo1WpQVRW7u7uoVCowTZOrV62ZXC4Hz/MghIi7KBQBj9vlwSCSiBJDVVVUq9WZrxtepWo40CQiovPHIJKIKAZCCBwcHKDRaKDZbIZ6j2ma/qo9nU4H5XJ54go+uq4HPlcuI6vrOlRVRbPZxO3btxda9WvW31Cr1fzyCSECq42tonw+j93dXaiqOrLfx43nlcIcN7kEMNDPhTrpuAwf13lctuNG54dBJBHRBXMcJ7DyThi1Wg2FQiFwc3/99dfHtsJaljUyyUgIAdu2YVkWVFWFYRgLBZCz/gY5qWIwIXyxWPRbkWu12thxczdu3LjQoGV4otY0juP443QH5XK5ia3hs46bXG3p0aNHUBQFjuMgm82i1+uNfNa44xrVuhw3SgYGkUREF0wmVB8XkEzSbDYDN2m5nviwaYHp0dHR0tYen/U3VCoVHB0d+Y81TYOu634wkpSAo1qtjgyPmKRYLI6Ue9ZY3FnHzTAM7O7u+sclk8mMbR2M8oNjmnU5bpQMnJ1NRLQCFEWBrut+ADKpBe3g4AA7OzsXXLog13X97tthUVrSbNtGs9lEs9kcCXpkq6ppmv42IQSy2ezc5Z5luOXWtm1sb29Pfc+s42aaJnK5HFzX9ffNuJynF3FcL+K40XphSyQR0QrY29tDNpvF1tYWSqUS0un0SAuabdtTk64fHBwglUrB8zx0Op1Qk5jmMSnV0qTW00k0TRv797iuG8sa6oPBn+u6cF13ZpL7acdN7ieZ61RVVRSLReTz+cDnzjquy3Lex43WD4NIIrq0BpdOnCabzcaePkhRFBiGgWaziVqtBk3TsLOzE2g1EkJAVdWxN3yZS1MGQqZpIp/PX+jMdhnALkoGb7VaLbY11MN2g087bjJoUxTF/zuq1Spu3bqFbrfrf8a043oRlnXcaP0wiCSiWGxsbFzI94yboCCFHQs3jWVZ2N/fn/m6crm80PrehmFA13U0Gg24rot8Po9sNotOpwNg9ti84a7vnZ0dFIvFid2XlmXBMAz/85dhWYHI4Brqg62prVYL+Xx+4vvG/WiQE0cG6bo+ddKR4zihyzrruAEIdInLVj/Z+hg1/2mSjxutHwaRRBSLacHdKsnlcgvNcg5DjlWTwZOqqmi328hms/5s61lj8yzLCpRTBo6u644NblVVnTvonTTbWbaoLcM8a6iP+9EwOPM4rHq97qfjmWbWcZu0f2UrpeM4M4/rsKQfN1ovDCKJ6NJale5s13XHthbK8nueB8dx/MkPshVKruqjaRry+Tw6nY4fDMiu0UnBQSaTmburW+ZRHDf5Z1lj5WatoX6eY/Js2w41gWfWcZPjIIcDeSEEtre3Zx7XcT9ekn7caL0wiCSiRJBdeJlM5sJaPZbRnb2ISd2EruvCsiw/nYqmaahWqyNdz+122/8bhlvkTNMMpGMplUpjZwUvmvJn0t9QLpdh27YffFuWtdRAPM411CfNjJ/nuFWr1UDZLcuCpmn+41nHdV5xHTdaLwwiiSh2tm373X6tVitS7r5VJION/f19OI4DwzACq8fI1WUGg4VGo4FKpYIbN2744+bGza4eHKMpx+NpmoZyuewnkgaA4+PjhSbVzPobSqUSarWan+Ll8PBwqcc0zjXUVVUdOwt8nuMm15mWx+b4+HhsnshJxzWquI8brZeN3roMTArp9PQUm5ubODk5wfXr1+MuDgDg7OwMDx48wN27d3HlypW4i5No3FfhJWlf3bt3D++9997Y54QQqFQqgRurZVk4PDw8txQ0RNI8YyJpOQavC2GuV0m8f192TDZORLGybXskkbGclUp03gzDiLsIRCuLQSQRxSqXy6Hdbge2yW6/uPLi0eXBWcdE8+OYSCKKnWma/hgzz/P8sV6e5/kTEuQYrWazCcMwYNs2Op0OisUiAwEiohgwiCSiWOm6jmq1GkhxMpgCBegHmXKVDxlENhoN6LoeSO1CREQXh0EkEcXGMAxkMpmR5MitVisw8zSVSgWSY8sgc3AWq2yp9DzPz41IRETnh0EkEcWmVquNjIcE+oFiuVz2Hw8mVW61WiOpaVzXRbPZ9GfZzpv+hIiIwuPEGiKKhZyRPdwK6TgOhBBjV+OQSaSHE2Tbth3YpijKyIxvIiJaLgaRRBSbcWMZK5XKxLx9w8vZDS4Hd+PGDX97KpXizG4ionPGIJKIYqFp2sjSa3Jc4+CKI5ZlQdd1AP0xkHIW96wUQJOWdSMiouXgmEgiik273YZhGH4roqIoI+MdVVWFruuwLAt7e3swDMMPNmWXdzqdDgSUcnINERGdHwaRRBQbVVVnLm04PHt7XFe3pmmBlUdc1+XEGiKicxZrECmEwMHBARqNxtgF54fJxe11XYeqqmg2m4GF44kopJMT4Dd+A/i//xf4d/8O+Pu/BzY34y7V3FRVxe7uLizLgud5gZndRER0PmILIh3HQavVghAi9NglIQRs24ZlWVBVFYZhMIAkiurznwc6neePf/QjQFGAdBp4//3YirUoXguIiC5WbEGk7KKSY5vCOjo6GknvQUQhDQeQgzqd/vMrHEgSEdHF4exsosvi5GRyACl1Ov3XERERzbByE2sODg6QSqXgeR46nc7MQflPnz7F06dP/cenp6cAgLOzM5ydnZ1rWcOS5UhKeZKM+yq8kX315S8DL700+41f/jLw3e8utSy9Xm+pn0dEq6/X641cp6Zd23ndT56VCiLlDE2ZusM0TeTz+ZGUIIMqlQreeuutke0PHz7EtWvXzqegcwozuYj6uK/C8/fV7/9+/78wHjxYahkeP3681M8jotX3+PFjPBi61ky7tj958uS8i0QRbfRibiKwLAuVSmXs+rmzCCGwtbWFbrc7cZzkuJbImzdv4sMPP8T169fnLfZSnZ2dodlsQtd1XLlyJe7iJBr3VXgj++qLXwT+9/+e/cZf+ZWlt0T+9m//tt8LkES9Xg9PnjzBtWvXsLGxEXdxEo37Kjzuq+muX7+O//k//yeAcNf209NT/MIv/AJOTk4Sc/++7FaqJdKyrMAMTBk4uq47sv6udPXqVVy9enVk+5UrVxIXhCSxTEnFfRWev6/+5m/6s7Bn+Zu/AZa8b//qr/5qqZ+3bGdnZ3jw4AHu3r3LejUD91V43FfRTbu2cx8mz8pMrBFCIJ/Pw3XdwDZg/Pq7RDRkc7OfxmeadHql80USEdHFiT2InJQj0nVd1Go1/7GiKCiVSoGA0TRN5HI5pvwhCuv99ycHkiueJ5KIiC5WbN3ZruvCsizs7+/DcRwYhhFYfUauTlMqlfz3lMvlQGB5fHw8dVINEY3x/vtrt2INERFdvNiCSFVVUSqVAkHioEKhgEKhENgmWyOJaEGbm8D3vx93KYiIaIXF3p1NRERERKuHQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJiIiIKDIGkUREREQUGYNIIiIiIoqMQSQRERERRfZzcRfgovV6PQDA6elpzCV57uzsDE+ePMHp6SmuXLkSd3ESjfsqPO6r8LivwuO+Co/7Krww+0ret+V9nOJ36YLIjz76CABw8+bNmEtCREREUX300UfY3NyMuxgEYKN3yUL6Tz75BD/5yU/wqU99ChsbG3EXB0D/19XNmzfxox/9CNevX4+7OInGfRUe91V43FfhcV+Fx30VXph91ev18NFHH+HTn/40XniBo/GS4NK1RL7wwgv47Gc/G3cxxrp+/TovNCFxX4XHfRUe91V43FfhcV+FN2tfsQUyWRjKExEREVFkDCKJiIiIKDIGkQlw9epV/Jf/8l9w9erVuIuSeNxX4XFfhcd9FR73VXjcV+FxX62mSzexhoiIiIgWx5ZIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJFdumTjSVOr1aAoCgBACIFSqRRvgRJKCIGDgwM0Gg00m824i5N4tVoNANDpdAAA9Xo9zuIklqxXQH9fua6Lvb09/5ykyXRd57k4gW3bqNfr0HUdqqqi2Wzi9u3byOVycRctsQzDQDqdBgCkUinuqxXBIDJG8kZfKBQA9C88xWKRN/whjuOg1WpBCAHP8+IuTuIZhoFqteo/LhaLvOFPYBgGDMOAqqoA+vsqn89zX81gWRZs2467GIklhIBt27AsC6qqwjAMBkUTCCFw584dPHr0CIqiwHEcZLNZMHHMamB3dowqlYofQAKApmkwTTPGEiVTJpNBoVDwb/Q0mRACjuNACOFvKxaLsG0bruvGV7CEcl0XlmX5j9PpNFqtVowlSj7+mAvn6OgIvV4PnU4ncJ2nIMMwsLu767f+ZzIZ/ohbIQwiY+K6LoQQY7vN+AufFtFqtQIBowy+BwNL6ms2m4EhJIeHh9A0LcYSJd/BwQF2dnbiLgatCdM0kcvl4Lquf+/jObg62J0dk0mtQoqi8GZPc1MUBd1uN7BNXpjZkjudZVkQQqDRaMRdlMSybZs3+JAODg6QSqXgeR46nU5giAn1yfug4zhQVRWqqvpDSljPVgODyISRFx2iZalUKqjX65wsMoGcXCOEQD6f536aQggBVVX5Q3eGTCYD4PkPN9M0kc/n+QNliAwiFUXx91m1WsWtW7dGfgxTMrE7O2EYQNIyyfFGHJM1maIoKBQKfrf21tYWg6QxZLcjzSZb1aSdnR2/pZtGbW9v+/+WvXEc1rUaGETGZFLXovylT7Qoy7KQTqeZNmoCIQQMwwjc2DVN4w1sDMdxAjd6mm5wshYAv3Wbk9uCJt3rFEXhvloRDCJjoqrqxBOFY0FoUTIIki2QQghelIe4rotarRZo/ZcBJbu0gzzPg23bqNVqqNVqMAwDQD9N2XDAdNnJYRGD55usV2wgCJIttsPXJiEEf7SsCI6JjFG5XIZt2/6N3rIsdjtOwa7+cBzHgeM4/oxHgHVrnEwmg1KpFLix7+/vI5PJ8IfcEE3TAvvEcRyYpslW7jEURRmpV3IoAH+cjKpWq/55B/SvVZqm+Y8p2TZ6zOgZq1qt5l9sDg8POYNvDJnLb39/H47joFQqcfWHCYQQuHXr1tixVzzVRwkhArlZ5Sxa3uwnk+eiZVkolUrQdZ1B95DhenV8fMxr+xSmafrXLO6r1cIgkoiIiIgi45hIIiIiIoqMQSQRERERRcYgkoiIiIgiYxBJRERERJExiCQiIiKiyBhEEhEREVFkDCKJaCGu6yKfz2NrawtbW1sjq3VIuq77K52MU6vVsLGxcW7lzGazKBaL5/b5RESXDYNIIpqbbdvIZrO4ffs22u022u02VFVFNpuNvP60pmmo1+vnVNL+ClH5fP7cPn+QZVnQdf1CvouIKC5c9pCI5iKEgK7raDQagdWDqtUq0uk08vk8jo6OQq/+kslklrLUmW3bKBaL6HQ6ge0XscKRYRgwTROpVOrcv4uIKG5siSSiuRiGgUwmMzY4KxQKSKVSqFQqMZQsPtVqFd1ud2q3PRHRumAQSURzsW176prJuVxupEtbCIFisYitrS2k02lYlhX4vOExkYOvHVyLGOiPoUyn09jY2PC7z/P5PHRdh+u62NjYwMbGhr8m7+CYzGKxONK17ThO4PunfTcRETGIJKI5ua6L27dvT3w+nU7DcZzAtoODAxSLRRwdHSGXy02chAPAf+7o6AjNZhOGYfifVywWsb+/j0ajgW63i2q1CiEEGo0GGo0GVFVFr9dDr9cb252ez+cDASwA1Ot1v1V12ncTEVEfx0QS0dw8z5v4nGwBHFQoFPxxj9VqFZZloV6vo1qtBl7nui4sy0K324WiKFAUBdVqFfv7+1BVFaZpotPpQFVVAJjaIjqOpmlQFAWWZfmB48HBAfb29qZ+9zLGbBIRrQsGkUQ0F1VVRyavDBoM8ibRNG1sS6Rs9bt161Zg+/b2NmzbhqIoMz97lp2dHezv7yOXy8FxHAghkMvl/BbKcd9NRETPsTubiOaiadpIl/Cgg4ODyC2EgzKZDLrdbuC/ZrM59+cNKxaLfvllMHlR301EtA4YRBLRXKrVKlzXRa1WG3nOMAwIIUa6qYfZtj12XGUmk/FbB8c9J4SYOJYyrEwmA0VRYNs2LMvyE5FP+24iInqOQSQRzUVRFDQaDRiGAcMw4LouXNdFsVhErVZDs9kcmdRimqYfoBWLRbiui0KhMPLZqqqiUCgEJt5YloVarTbynBAClmX5M69VVfW327Y9NdgsFAp+MCxbTad9NxERPccgkojmlsvl0Ol04LoustksstksPM9Dp9MZ6cpWVRU7OzuoVCrY2tpCq9VCu92emIy8Xq8jk8kgm81ia2sL9Xrd/0z5b13X/ed2d3cBPE9afuvWrZktobu7u7BteySQnfbd05imiY2NDT9A3tjYQDqdnvk+IqJVtNHr9XpxF4KIyLZt6LoOXpKIiFYDWyKJiIiIKDIGkUQUK9u2/UThzMNIRLQ6GEQSUazq9Tq2trZg2zb29vbiLg4REYXEMZFEREREFBlbIomIiIgoMgaRRERERBQZg0giIiIiioxBJBERERFFxiCSiIiIiCJjEElEREREkTGIJCIiIqLIGEQSERERUWQMIomIiIgosv8P5ZGq1VnumWgAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "epde_search_obj.visualize_solutions()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e34505a2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "3ba9f78d", + "metadata": {}, + "outputs": [], + "source": [ + "systems = {'2' : epde_search_obj.get_equations_by_complexity(2)[0],\n", + " '4' : epde_search_obj.get_equations_by_complexity(4)[0]}" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "d444876c", + "metadata": {}, + "outputs": [], + "source": [ + "t, x = np.unique(grids[0]), np.unique(grids[1])" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "623edaca", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'u': array([[ 0.00000000e+00, 0.00000000e+00, 1.21893604e+00,\n", + " 1.99658704e+02],\n", + " [ 3.14429868e-02, 2.54933046e+00, 3.53417339e+00,\n", + " 1.84844299e+02],\n", + " [ 5.05541513e-02, 3.80805166e+00, 5.66891429e+00,\n", + " 1.70404493e+02],\n", + " ...,\n", + " [ 5.17249081e-01, -3.63689548e+00, -1.37043347e+01,\n", + " -1.09763264e+02],\n", + " [ 1.80894000e-01, -2.60133166e+00, -1.50330578e+01,\n", + " -1.25877305e+02],\n", + " [-0.00000000e+00, 0.00000000e+00, -1.65452194e+01,\n", + " -1.35648004e+02]])}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "epde_search_obj.saved_derivaties" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "a1cb38e2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "bop 1: torch.Size([81])\n", + "bop 2: torch.Size([81])\n", + "bop 3: torch.Size([41])\n", + "bop 4: torch.Size([41])\n" + ] + } + ], + "source": [ + "from epde.interface.solver_integration import BoundaryConditions, BOPElement\n", + "\n", + "bnd_t = torch.cartesian_prod(torch.from_numpy(np.array([t[train_max + 1]], dtype=np.float64)),\n", + " torch.from_numpy(x)).float()\n", + "\n", + "bop_1 = BOPElement(axis = 0, key = 'u_t', term = [None], power = 1, var = 0)\n", + "bop_1.set_grid(bnd_t)\n", + "bop_1.values = torch.from_numpy(data_test[0, ...]).float()\n", + "print('bop 1:', bop_1.values.shape)\n", + "\n", + "t_der = epde_search_obj.saved_derivaties['u'][..., 0].reshape(grids_training[0].shape)\n", + "bop_2 = BOPElement(axis = 0, key = 'dudt', term = [0], power = 1, var = 0)\n", + "bop_2.set_grid(bnd_t)\n", + "bop_2.values = torch.from_numpy(t_der[-1, ...]).float()\n", + "print('bop 2:', bop_2.values.shape)\n", + "\n", + "bnd_x1 = torch.cartesian_prod(torch.from_numpy(t[train_max:]),\n", + " torch.from_numpy(np.array([x[0]], dtype=np.float64))).float()\n", + "bnd_x2 = torch.cartesian_prod(torch.from_numpy(t[train_max:]),\n", + " torch.from_numpy(np.array([x[-1]], dtype=np.float64))).float() \n", + "\n", + "bop_3 = BOPElement(axis = 1, key = 'u_x1', term = [None], power = 1, var = 0)\n", + "bop_3.set_grid(bnd_x1)\n", + "bop_3.values = torch.from_numpy(data_test[..., 0]).float()\n", + "print('bop 3:', bop_3.values.shape)\n", + "\n", + "bop_4 = BOPElement(axis = 1, key = 'u_x2', term = [None], power = 1, var = 0)\n", + "bop_4.set_grid(bnd_x2)\n", + "bop_4.values = torch.from_numpy(data_test[..., -1]).float()\n", + "print('bop 4:', bop_4.values.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "33f84f57", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"0.039994341975063886 * d^2u/dx1^2{power: 1.0} + 0.0 * du/dx1{power: 1.0} + 0.0 * d^2u/dx1^2{power: 1.0} * du/dx1{power: 1.0} + 0.0 * u{power: 1.0} * du/dx1{power: 1.0} + -0.02059504177543492 = d^2u/dx0^2{power: 1.0}\\n{'terms_number': {'optimizable': False, 'value': 5}, 'max_factors_in_term': {'optimizable': False, 'value': {'factors_num': [1, 2], 'probas': [0.8, 0.2]}}, ('sparsity', 'u'): {'optimizable': True, 'value': 0.03388273808409926}}\"" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "systems['4'].text_form" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "79c0c2ad", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(41, 81)" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grids_test[0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "71cd6cda", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "target_form shape is torch.Size([3321, 1])\n" + ] + } + ], + "source": [ + "from epde.interface.solver_integration import SystemSolverInterface\n", + "\n", + "ss_interface = SystemSolverInterface(system_to_adapt=systems['4'])\n", + "form = ss_interface.form(grids = grids_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "80221cf3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['d^2u/dx1^2{power: 1.0}', 'C', 'd^2u/dx0^2{power: 1.0}'])" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "form[0][1].keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "90a8ce27", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'coeff': tensor([[-1.],\n", + " [-1.],\n", + " [-1.],\n", + " ...,\n", + " [-1.],\n", + " [-1.],\n", + " [-1.]]),\n", + " 'term': [0, 0],\n", + " 'pow': 1.0,\n", + " 'var': [0]}" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "form[0][1]['d^2u/dx0^2{power: 1.0}']" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "1b67cb73", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using explicitly sent system of equations.\n", + "dimensionality is 2\n", + "grid.shape is (41, 81)\n", + "In grid format prepare: grid shape is torch.Size([2, 41, 81])\n", + "Shape of the grid for solver torch.Size([2, 41, 81])\n", + "target_form shape is torch.Size([41, 81])\n", + "In grid format prepare: grid shape is torch.Size([2, 41, 81])\n", + "Grid is torch.Size([2, 41, 81])\n", + "torch.Size([41, 81])\n", + "before Model_prepare: torch.float64\n", + "after Model_prepare: torch.float64\n", + "[2024-02-01 13:55:31.058434] initial (min) loss is 389.6265869140625\n", + "[2024-02-01 13:55:31.245747] Print every 5000 step\n", + "Step = 0 loss = 389.626587 normalized loss line= -0.000000x+1.000000. There was 1 stop dings already.\n", + "[2024-02-01 13:55:34.843923] No improvement in 100 steps\n", + "Step = 139 loss = 363.974487 normalized loss line= 0.000014x+0.985185. There was 1 stop dings already.\n", + "[2024-02-01 13:56:38.009521] Print every 5000 step\n", + "Step = 5000 loss = 254.840408 normalized loss line= -0.000070x+1.014118. There was 2 stop dings already.\n", + "[2024-02-01 13:57:43.115613] Print every 5000 step\n", + "Step = 10000 loss = 172.440872 normalized loss line= -0.000055x+1.010856. There was 2 stop dings already.\n", + "[2024-02-01 13:58:51.059042] Print every 5000 step\n", + "Step = 15000 loss = 148.885696 normalized loss line= -0.000014x+1.002834. There was 2 stop dings already.\n", + "[2024-02-01 13:59:58.290442] Print every 5000 step\n", + "Step = 20000 loss = 143.162811 normalized loss line= -0.000004x+1.000789. There was 2 stop dings already.\n" + ] + } + ], + "source": [ + "pred_u_v_4 = epde_search_obj.predict(system=systems['4'], mode='mat', \n", + " boundary_conditions=[bop_1(), bop_2(), bop_3(), bop_4()],\n", + " grid = grids_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "4c8d6d0a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(3321, 1)" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pred_u_v_4.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "042d649c", + "metadata": {}, + "outputs": [], + "source": [ + "pred_u_v_4 = epde_search_obj.predict(system=systems['4'], boundary_conditions=[bop_1(), bop_2(), bop_3(), bop_4()], \n", + " grid = grids_test, mode='autograd')" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "d3f52374", + "metadata": {}, + "outputs": [], + "source": [ + "pred_u_v_4_sh = pred_u_v_4.reshape(grids_test[0].shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "e6c5edcf", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk8AAAGeCAYAAACNTeV5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABFoklEQVR4nO3deXhbV53/8Y+8yc5iy06aksVNqnRJ99ZJukELBXmgMMCUymGZQgsUm98MA0MABQMzpcwMxgYGZlitQhf22KIUKKvFULZuTtQl3VvLabO1TWxfK3Fsebu/PxwpduJFiixd3+v363nyJNLRkc7NTaJPzj33e1ymaZoCAABASvKsHgAAAICdEJ4AAADSQHgCAABIA+EJAAAgDYQnAACANBCeAAAA0kB4AgAASAPhCQAAIA0FVg/AiUZHR7V3714tXrxYLpfL6uEAAIAUmKapgwcPasWKFcrLm3p+ifCUBXv37lVlZaXVwwAAACdg165dWrVq1ZTthKcsWLx4saSx3/zS0lKLRwMAAFIRi8VUWVmZ/B6fCuEpCxKX6kpLSwlPAADYzExLbrIenpqamuTxeCRJhmEoEAhk3CfT9vGqq6vV1taWfGwYhlpaWiRJHR0dikajuuWWW5LvBwAA5res3m3X1NQkSaqtrVVtba2qqqpUV1eXUZ9M28cLhUIKh8MTntuyZYt8Pp9qa2vV2NioiooK1dTUpHnkAADAqVymaZrZevPy8nJ1dnZOmLVxuVya7iNn6pNpe0Jihqmurm5CW3V1taqrq5OzVU1NTWpoaFBPT0/Kxx2LxVRWVqbe3l4u2wEAYBOpfn9nbeYpGo3KMIxJL3cdO9uTap9M28draWnRpk2bjntdW1vbhMt87e3t8vl8k44XAADMP1lb8xSNRid93uPxyDCME+qTaXtCOBxOKRCFQiEZhqHW1tZpXxePxxWPx5OPY7HYjO8NAADsKecVxisqKtTd3T2rfdJtNwxDXq93ytcbhqFgMKhoNKqampoZF4s3NDSorKws+YMaTwAAOFfKM0+hUEhbt26d8XX19fWqqqqasj3d4JRKn3Tag8Ggamtrp329x+NJviYYDE66jmq8+vp6bd68Ofk4UScCAAA4T8rhye/3y+/3p/zGU83sTDfrM1OfTNsjkYg2bNgw5ZgNw1BDQ4Pq6+uTQcnn88kwDIXD4SmP3+12y+12T/m+AADAObJ+t9327dsnhJpU7rabrk8m7eFwWJFIJPl8R0eHgsGgGhsbk+Fr/fr16ujoSPaPRCJav3692traUl44zt12AADYj+V320ljl7PG3+UWCoUmXDKLRqPJukyp9smk3efzKRAIJH8k6j8FAgH5/X5VVVUpEAhMCF5bt25VVVUVd9wBAABJWZ55ksbqJCXCSHt7uxobG5NtiVmfjo6OlPvMRrt0dA1XKBRSIBBQdXV18hJdMBhMvq6jo0ONjY1pVRhn5gkAAPtJ9fs76+FpPiI8AQBgP6l+f7MxsI08uS+mzgN9WnvSIp35iul3fAYAANmR8zpPOHE/fvAF/dMPI/rVo3utHgoAAPMW4clGigvzJUkDw6MWjwQAgPmL8GQjifDUPzhi8UgAAJi/CE82Ulw4droGhghPAABYhfBkI8UFR2aeCE8AAFiG8GQjJUVH1jwNseYJAACrEJ5shMt2AABYj/BkIyWJu+0ITwAAWIbwZCPuZKkCwhMAAFYhPNlICaUKAACwHOHJRpJFMlkwDgCAZQhPNsKCcQAArEd4shEWjAMAYD3Ck40kt2cZGpFpmhaPBgCA+YnwZCOJ8DRqSkMjhCcAAKxAeLKRxJoniXIFAABYhfBkI0X5ecpzjf16gHIFAABYgvBkIy6Xi3IFAABYjPBkM+MXjQMAgNwjPNkM5QoAALAW4clm3EcWjTPzBACANQhPNsPMEwAA1iI82Uwx4QkAAEsRnmymhLvtAACwFOHJZtgcGAAAaxGebIZSBQAAWIvwZDMUyQQAwFqEJ5spplQBAACWIjzZTGLBeJzwBACAJQhPNsOaJwAArFWQ7Q9oamqSx+ORJBmGoUAgkHGfTNvHq66uVltb2wm35xp1ngAAsFZWZ56ampokSbW1taqtrVVVVZXq6uoy6pNp+3ihUEjhcHjKsczUbgUWjAMAYC2XaZpmtt68vLxcnZ2dyVkgSXK5XJruI2fqk2l7gmEYamlpUV1d3aTjmal9OrFYTGVlZert7VVpaWlafWfyowde0Kd+tkPVZ5+sW96zYVbfGwCA+SzV7++szTxFo1EZhjEhxCRMNZszU59M28draWnRpk2bphz/TO1WoUgmAADWytqap2g0OunzHo9HhmGcUJ9M2xPC4bB8Pt+UY5+p/VjxeFzxeDz5OBaLpdw3Xax5AgDAWjm/266iokLd3d2z2ifddsMw5PV6p3z9TO3HamhoUFlZWfJHZWVlyn3Txd52AABYK+WZp1AopK1bt874uvr6elVVVU3Znm5wSqVPOu3BYFC1tbVTvnam9snU19dr8+bNycexWCxrAcpNkUwAACyVcnjy+/3y+/0pv/FUMzfTzerM1CfT9kgkog0bpl5kPVP7VNxut9xud9r9TkQJl+0AALBU1tY8eb1eeTweRaPR40LNVOuJUumTSXs4HFYkEkkuHu/o6JA0Vt4g8dnTtacTHrOFNU8AAFgrq6UKEsUqE5fBQqGQ2tra1NzcLGlsgXgoFJpQxHKmPpm2jxeJRLR+/fopSxHM1D6VbJYq2HmgT6/50j1a5C7QYze/flbfGwCA+czyUgWSFAgEZBiGQqGQQqGQ2tvbJ4SYcDh8XKiZqU+m7QmhUEgNDQ2SpC1bthxXymCmdqsw8wQAgLWyOvM0X2Vz5qn38JAu+NzvJUnP/tfVKsxne0IAAGbDnJh5wuxL3G0nMfsEAIAVCE824y7Ik8s19mvKFQAAkHuEJ5txuVwqLhhb9xSnUCYAADlHeLKhYgplAgBgGcKTDVEoEwAA6xCebChRrqB/kPAEAECuEZ5sKFnraZg1TwAA5BrhyYYSa564bAcAQO4RnmyIKuMAAFiH8GRDLBgHAMA6hCcbYsE4AADWITzZEAvGAQCwDuHJhpJFMpl5AgAg5whPNpRc8zRMeAIAINcITzaUuGzH3nYAAOQe4cmGSopYMA4AgFUITzbkLjhSJJPLdgAA5BzhyYYoVQAAgHUITzZUQqkCAAAsQ3iyoWSdJ2aeAADIOcKTDZUUseYJAACrEJ5sqLiANU8AAFiF8GRDxUUUyQQAwCqEJxtKzDwNUCQTAICcIzzZUGJvOxaMAwCQe4QnGyrhsh0AAJYhPNlQ4rLd0Iip4REu3QEAkEuEJxtKzDxJFMoEACDXCE82lNjbTqJcAQAAuUZ4siGXy3V00fgQ4QkAgFwiPNlUYouWOIvGAQDIqYJsf0BTU5M8Ho8kyTAMBQKBjPtk2j5edXW12trako/D4bCam5tVXV0tr9ertrY2bdy4UX6/f8Zx51JJYb4MDal/kDVPAADkUlZnnpqamiRJtbW1qq2tVVVVlerq6jLqk2n7eKFQSOFweMJzhmEoHA6rrq5OdXV1Wrt27ZwLTtK4zYGZeQIAIKdcpmma2Xrz8vJydXZ2JmeBpLH1OtN95Ex9Mm1PMAxDLS0tqqurm9AWCoXk8/km9E9XLBZTWVmZent7VVpaesLvM503fPXPeurFg/re+y7WlWeclJXPAABgPkn1+ztrM0/RaFSGYUwaQo6d7Um1T6bt47W0tGjTpk2pHMqclCyUyYJxAAByKmtrnqLR6KTPezweGYZxQn0ybU8Ih8Py+XxTjr2lpUUVFRXq7u5WR0eHGhsbp3ytJMXjccXj8eTjWCw27etnQ6JQZj/hCQCAnMr53XaJUDKbfdJtNwxDXq930tdWVVXJ5/PJ7/ertrZWa9euVU1NzbTja2hoUFlZWfJHZWXlDEeUucTMU5zNgQEAyKmUZ55CoZC2bt064+vq6+tVVVU1ZXu6wSmVPum0B4NB1dbWTvnaY0PVpk2bVFdXN+XlQGnsmDdv3px8HIvFsh6gEnWemHkCACC3Ug5Pfr8/rbvOpprZmW7WZ6Y+mbZHIhFt2LBh2nGHQqEJx5kITNFodMpQ6Ha75Xa7p33f2Za8247wBABATmVtzZPX65XH41E0Gj0u1Ey13iiVPpm0h8NhRSKR5OLxjo4OSWPlDbxer3w+n2pqatTR0ZHsn1grNVUws8rR8MRlOwAAcimra57q6+sn3OUWCoUmXDKLRqPJukyp9smk3efzKRAIJH8k6j8FAgH5/X55PB4FAoEJQSkYDCbb5hIWjAMAYI2s1nmSjs7qSFJ7e/uEO9eCwaAaGxuTM0Cp9JmNdunoGq5QKKRAIKDq6mr5fD4ZhqFgMJh8XVdX14x32x0rF3Wevvi7p/SNP3bohsvX6LNvOScrnwEAwHyS6vd31sPTfJSL8PS1PzyrL7c9o3dsrNQXrj0/K58BAMB8YnmRTGQXRTIBALAG4cmm3IWseQIAwAqEJ5sq4W47AAAsQXiyKYpkAgBgDcKTTSVmnuKEJwAAcorwZFMUyQQAwBqEJ5vish0AANYgPNkUe9sBAGANwpNNFVOqAAAASxCebOrognHWPAEAkEuEJ5tKzDwNjoxqZJQddgAAyBXCk00lZp4k1j0BAJBLhCebchccPXWEJwAAcofwZFN5eS4VFVCuAACAXCM82Rj72wEAkHuEJxtLFMrksh0AALlDeLKxEgplAgCQc4QnG6NQJgAAuUd4sjE2BwYAIPcITzbG5sAAAOQe4cnG2BwYAIDcIzzZ2NH97QhPAADkCuHJxlgwDgBA7hGebIwF4wAA5B7hycZYMA4AQO4RnmyMIpkAAOQe4cnGuNsOAIDcIzzZGBsDAwCQe4QnG2NjYAAAco/wZGNuShUAAJBzhCcbY8E4AAC5V5DtD2hqapLH45EkGYahQCCQcZ9M28errq5WW1vbcc9v2bJFa9eulSRVVFTI7/fPOO5cO1okkzVPAADkSlbDU1NTkySptrZWkhQOh1VXV6fm5uYT7pNp+3ihUEjhcHjCc4Zh6HWve53+8Ic/yOPxKBKJaP369TJN8wR+B7KL7VkAAMg9l5nFVFBeXq7Ozs7kLJAkuVyuaYPITH0ybU8wDEMtLS2qq6ub0FZXV6e1a9dOmK0Kh8Py+XwpH3csFlNZWZl6e3tVWlqacr90bdvZLf+379PqJQv0p09clbXPAQBgPkj1+ztra56i0agMw5gQYhKOne1JtU+m7eO1tLRo06ZNx70uGAzK7/crGo0m+6QTnHKJOk8AAOReVsPTZDwejwzDOKE+mbYnTDWTlOgfiURkGIa8Xq/q6uqmDHtWS655GiQ8AQCQK1lfMH6siooKdXd3n1CfyWaU0mlPSASjY0NcIjx5PB5VVVVJkhobG3Xqqaeqp6dnyvHF43HF4/Hk41gslsJRZS5Z52mYBeMAAORKyuEpFApp69atM76uvr4+GTwmk25wSqVPOu3BYDC5mHwqGzZsSP46MWs13bqnhoYG3XzzzdO+ZzYkFowPDo9qdNRUXp4r52MAAGC+STk8+f3+tG7X93q9kz6fmPU5kT6ZtkcikQnBKNXP93g8U14SlMYC4+bNm5OPY7GYKisrp3z9bElctpOkgeERLSjK+UQiAADzTta+bb1ebzJ0HBtKpprBSaVPJu3hcFiRSCS5hqmjo0PSWHkDr9crv98vr9eraDQ6YfbMMIxpQ5fb7Zbb7Z6yPVsmhKehUS0oyvkQAACYd7JaYby+vn7CYutQKDThklk0Gk3WZUq1TybtPp9PgUAg+aOurk6SFAgEkrNqjY2NEy5PhkIh+Xy+aS9FWiU/z6Wi/LFTyBYtAADkRlbrPElHZ3Ukqb29XY2Njcm2YDCoxsbG5AxQKn1mo106uoYrFAopEAiouro6OXsVDAaTi8m7urom7T+dXNV5kqTzP/s7xQaG9YePvVprT1qU1c8CAMDJUv3+znp4mo9yGZ4u/q+wXj4Y193/8iqdu7Isq58FAICTWV4kE7lRUnRki5ZhLtsBAJALhCebKy5IFMqk1hMAALlAeLK5ZKFMFowDAJAThCebS+5vx2U7AABygvBkc+xvBwBAbhGebK4kOfPEmicAAHKB8GRzyTVPzDwBAJAThCebS5QqYME4AGA+2NV9WB9recTS7z12krU5d6JUAeEJAOBwT+6L6fpbH9TLB+NyF+bp89ecZ8k4CE82l7zbbog1TwAA57o/2qUPfG+bDg4M64yTF+lfXnuaZWMhPNlcCaUKAAAO99vH9unDP3lYg8Oj2rimXN95z0aVLSi0bDyEJ5tjwTgAwMl+cP/z+refPybTlKrPPllfe+dFyasuViE82VxywTgzTwAABzFNU18NP6v/+cOzkqR3Xlyp/3jruSrIt/5eN8KTzR3d247wBABwhpFRU5+56zH9+MEXJEkfft3p+qjvdLlcLotHNobwZHPFRSwYBwA4x8DQiD7844f0+ydeksslfe6t5+rdl662elgTEJ5srrhgbPqSUgUAALvr7R/SB+7Ypgd3dqsoP0//844LdfV5y60e1nEITzZHkUwAgBO82Dug6299UE+/dFCL3QW65foNutS7xOphTYrwZHNH6zwRngAA9vTcy4d0/a0Pao/Rr5MWu3XHey/W2StKrR7WlAhPNpdYMM6aJwCAHUVe6NH7bm+XcXhI3qULdcf7LlZlxQKrhzUtwpPNlRQdqfNEqQIAgM389rF9+shPHlZ8eFQXVHp06/UbtGSR2+phzYjwZHNuShUAAGzGNE1996+d+q9fPynTlK468yR9/V1VWui2RyyxxygxpcSC8fjwqEZHTeXlzY0aGAAATGZ4ZFQ3//IJff/+5yVJ1116ij775nPmRPHLVBGebG58ifr48GgyTAEAMNf0xYf1Lz9+SP/31MtyuaRPXX2Wbrzi1DlT/DJVhCebS9R5ksbuuCM8AQDmopdiA3rf7e16fG9M7oI8ffXtc7OGUyoITzZXkJ+nwnyXhkZM9Q+NqNzqAQEAcIynXozpvbe1a1/vgJYsLNIt129Q1Sn2/cYiPDlAcUG+hkaGqfUEAJhz/vzMfv3TDyM6FB+W96SFuv2Gi3XKkrldimAmhCcHKC7K18H4MLWeAABzyk8efEGfvusxjYyauuTUCjW/e708C4qsHlbGCE8OUFzI/nYAgLljdNTUl37/tL55T4ck6ZqLVuoL156XLK9jd4QnByg5csddnPAEALDYwNCIPt76iO5+dJ8k6cOvO10f9Z1uuzvqpkN4coBEuQJmngAAVuqLD+u9t7XrwZ3dKshz6QvXni//+lVWD2vWEZ4c4OjmwKx5AgBYY2BoRO+/Yyw4LS4uUPN163X5aUutHlZWEJ4cgJknAICV4sMjqvv+dt0f7dYid4F+8P5LdEGlx+phZU3Ww1NTU5M8Ho8kyTAMBQKBjPtk2j5edXW12trako9ramr09re/XV6vN/keCV6vd8axW6HkyIJxShUAAHJtaGRU//Kjh/SnZ/arpDBft713o6ODk5Tl8NTU1CRJqq2tlSSFw2HV1dWpubn5hPtk2j5eKBRSOBye8FwkElEoFDrutX6/X62trakcds4dvWxHeAIA5M7IqKnNLY/o90+8pKKCPN3yng3auKbC6mFlncs0TTNbb15eXq7Ozs4JMzgul0vTfeRMfTJtTzAMQy0tLaqrq5vQ1tTUdNxMVTAYTIaxVMRiMZWVlam3t1elpaUp9ztRW0KPauu2Xfr4352hD7329Kx/HgAAo6OmAj99VKHtu1WY71Lzu9frtetOtnpYGUn1+ztrWxhHo1EZhnHcpS9Jx832pNon0/bxWlpatGnTpuNe5/f7j+u3YcOGScc7VyT2s2PBOAAgF0zT1E2/eFyh7buV55L+9x0X2T44pSNrl+2i0eikz3s8HhmGcUJ9Mm1PCIfD8vl8k752/LqmaDSqaDQ65WsT4vG44vF48nEsFpv29bPNTZFMAECOmKapz//6SX3//uflcklf3nSBbTf4PVFZm3maSkVFhbq7u2e1T7rthmGktPi7sbExpct1DQ0NKisrS/6orKycsc9sKmHNEwAgR74Sfla3/KVTkvT5a87TNRc5r47TTFKeeQqFQtq6deuMr6uvr1dVVdWU7ekGp1T6pNOe6vqlSCSS2uA0dsybN29OPo7FYjkNUJQqAADkwjfveU7/+4dnJUk3vflsvfPiUywekTVSDk9+v/+49UDTmWpmZ7pZn5n6ZNoeiURSXr/U3NystWvXpvRat9stt9ud0muz4ej2LKx5AgBkx61/7VTTb5+WJG15wzq995WnWjwi62RtzVOiTlI0Gj0u1Ey33mimPpm0h8NhRSKR5OLxjo6xDQubmprk9XonhMNwOKz169ene9iWYGNgAEA2/eiBF/S5u5+QNLZX3f97TWqTC06V1TVP9fX1E+5yC4VCEy6ZRaPRZF2mVPtk0u7z+RQIBJI/6urqJEmBQOC4WbXJAthcRZ0nAEC23BnZrU/ftUOSVHulVx/1URInq+EpEAjIMAyFQiGFQiG1t7dPKFYZDoePK145U59M2xNCoZAaGhokSVu2bDmulIHX61VFhT0KfbHmCQCQDX986mV9vPURmab07ktXq/7qdXK5XFYPy3JZLZI5X+W6SOafntmv6299UGctL9VvPnJF1j8PAOB8j+/tVc2379PhwRG9rWqlvuS/QHl5zg5OlhfJRO4cXTDOzBMAIHP7evv1vtvbdXhwRJevXaIvvO18xwendBCeHIAF4wCA2XIoPqz33b5NL8XiOm3ZIn3ruvUqKiAujMfvhgNQJBMAMBuGR0b1oR9F9OS+mJYuKtJtN2xUWUmh1cOacwhPDsCCcQBApkzT1Gd/+bjueXq/igvz9J3rN6qyYoHVw5qTCE8OcLRUwahY/w8AOBHf/WunfnD/C3K5pK++/SJdWOmxekhzFuHJARJrniQpPkyVcQBAen772Iv6r18/KUn61NVn6Q3nvsLiEc1thCcHSMw8Sax7AgCk5+Fdhv5160MyTem6S0/RjVfM321XUkV4coDC/DwVHLmFdID97QAAKdrVfVg33tGugaFRvebMk/TZN59DEcwUEJ4cgkXjAIB09PYP6X23t+vAoUGdtbxUX39XlQryiQWp4HfJIdjfDgCQqsHhUf3TD7fr2ZcP6eRSt269YYMWuQusHpZtEJ4cgkKZAIBUmKapz9y1Q397rksLivL13es3anlZidXDshXCk0NQKBMAkIpv3tOhlm27leeSvvGuKp27sszqIdkO4ckhuGwHAJjJ9+7bqS/+7mlJ0s1vOUdXrVtm8YjsiQucDpG4bMfddgCAY5mmqS/+7ml9854OSVLtlV69+7I11g7KxghPDpG8226QmScAwFFDI6P65E936KeR3ZKkj1WfoQ+99jSLR2VvhCeHSF62GyY8AQDG9MWH9f9+GNGfn9mv/DyXGq45T5s2Vlo9LNsjPDlEybj97QAA2H8wrvfd3q4de3pVUpivb/zjRXrtupOtHpYjEJ4c4uiaJ2aeAGC+23mgT9ff9qCe7zqsioVFuvWGjWz0O4sITw5BqQIAgCQ9ssvQ+25vV1ffoCorSnTHey+W96RFVg/LUQhPDsGCcQDAH59+Wf/0g4j6h0Z07spS3XbDxTppsdvqYTkO4ckh3CwYB4B5rXXbLn3yzh0aGTV1xelL9a3r1rPlSpbwu+oQJcmZJxaMA8B8YpqmvnlPR7L45TUXrVTjteerqIA62NlCeHKI5IJxZp4AYN4YGTX12V88ru/f/7wk6YOvXqstbzhTLpfL4pE5G+HJIRIzT3EWjAPAvLCvt18fa3lE93Z0yeWSbvr7s3XDK0+1eljzAuHJIZILxglPAOB4v3p0nz71sx3q7R9SSWG+vlRzgd50/nKrhzVvEJ4copgimQDgeAcHhnTTLx7XnZE9kqQLVpXpK2+/kFIEOUZ4cojEmidKFQCAM23b2a2PtjysXd39ynNJ//Sa0/QR3+kqzGdheK4RnhyihFIFAOBIQyOj+tofntXX//icRk1pVXmJvvL2C7VxTYXVQ5u3CE8Okbxsx8wTADhG54E+/evWh/XILkOS9LaqlfrsW85RaXGhtQOb5whPDpEMT8OseQIAuzNNU1vbd+lzdz+hw4MjKi0u0Offdp7+/vwVVg8NIjw5RgnbswCAI3T3DeqTP31Uv3/iJUnSZd4l+vKmC7TCU2LxyJCQ9fDU1NQkj8cjSTIMQ4FAIOM+mbaPV11drba2tgnPBYNBGYYhj8ejjo4O1dfXJ99vrhpfJNM0TQqkAYAN/fmZ/fpY6yPafzCuwnyXPvH6M3Xjq7zKy+Pf9Lkkq+GpqalJklRbWytJCofDqqurU3Nz8wn3ybR9vFAopHA4fNzn19bWTghfH/jAB9Ta2prm0edWcdHYzJNpSoMjo3IX5Fs8IgBAqoZGRvXfbc/oW/d0SJJOW7ZI//OOC3XOijKLR4bJuEzTNLP15uXl5ers7Jwwa+NyuTTdR87UJ9P2BMMw1NLSorq6ugltk81ETfbcdGKxmMrKytTb26vS0tKU+2VicHhUZ3zmN5KkR/7971S2gMWEAGAHu3sO68M/fkiRFwxJ0nWXnqLPvOns5FpW5E6q399ZKw4RjUaTl76OdexsT6p9Mm0fr6WlRZs2bTrudR6PR9XV1TIMIzkmr9c76XjnksJ8l/KPTOtSrgAA7OG3j72oN/7PXxR5wdDi4gJ96x+r9J//cB7BaY7L2mW7aDQ66fMejycZTNLtk2l7Qjgcls/nm/S1t9xyi9avX6/y8nIFAgGtXbt22suMkhSPxxWPx5OPY7HYtK/PBpfLpeKCPPUNjrBoHADmuIGhEX3+10/qe/eNbeh7YaVHX3vnRaqsWGDxyJCKnJclraioUHd396z2SbfdMIwpZ5M8Ho+2bNkiv9+vpqYmtba2Thn2EhoaGlRWVpb8UVlZOf0BZUkxhTIBYM6L7j+kt33z3mRwqnu1V60fvIzgZCMpzzyFQiFt3bp1xtfV19erqqpqyvZ0g1MqfdJpDwaDycXkk9myZYuqq6vV2tqqaDSqmpoarV+/Xh0dHVP2qa+v1+bNm5OPY7GYJQGqmHIFADCn3RnZrc/c9ZgOD46oYmGRvrzpAl115jKrh4U0pRye/H6//H5/ym881czOdLM+M/XJtD0SiWjDhg1TjjmxZipxSc/r9Wr79u1av369QqHQlMfvdrvldrunfN9cSZYrYHNgAJhT+uLD+vefP66fRnZLGqvd9NV3XKiTS4stHhlORNbWPHm9Xnk8nkkXXE+13iiVPpm0h8NhRSKR5OLxxGxSU1NT8rMnW2xeV1eX4lFbq+RIuYKBIWaeAGCueGJvTB/6cUTR/X3Kc0n/6jtD/3zVacmbfGA/WV3zVF9fP+Eut1AoNOGSWTQaTdZlSrVPJu0+n0+BQCD5IxGKAoGA/H6/fD6fIpHIcWuctm/fntasm1WKCwhPADCX/OTBF/QP3/ybovv79IrSYv34A5fqw687neBkc1mt8yQdndWRpPb2djU2NibbgsGgGhsbj1tPNF2f2WiXjq7hCoVCCgQCqq6uls/nk2EYamho0JIlS5J36Y0vmpkKK+o8SdK7v/uA/vLsAX3l7RfomotW5exzAQDH+8Yfn9MXf/e0JOl165bpizUXqGJhkcWjwnRS/f7Oeniaj6wKTzfesU3hJ1/S5685T++65JScfS4A4CjTNPXF3z2tbx6pFv7h152uj/pOZ9ssG0j1+5uNgR2ENU8AYK3RUVOfu/sJ3X7vTknSp994lj5w5dwvtIz0EJ4cpLhgbAlbP+EJAHJuZNTUp+7coa3bdkmS/vMfztV1l662eFTIBsKTgyTqPMUJTwCQU0Mjo9rc8oh++che5bmkL/ov0LXrWXvqVIQnB0lctmPmCQByZ2BoRB/60UMKP/mSCvJc+t93XqQ3nrfc6mEhiwhPDpK4bEeRTADIjf7BEdV+f5v+8uwBFRXk6dvXVem16062eljIMsKTgxSzYBwAcubgwJDef/s2PbizWwuK8vWd92zQ5acttXpYyAHCk4MkimRy2Q4Asss4PKjrb31Qj+zu1eLiAt3+3o1av7rC6mEhRwhPDnK0VAGX7QAgW/YfjOvd331AT714UOULCvX991+ic1eWWT0s5BDhyUGObgzMzBMAZMO+3n794y0PKHqgTyctduuHN16iM05ebPWwkGOEJwdhbzsAyJ7nu/r0j995QLt7+rXSU6If3niJ1ixdaPWwYAHCk4MUU6oAALLiqRdjevd3H9T+g3GtWbJAP7jxEq0qX2D1sGARwpODMPMEALPvoRd6dMNt7ertH9K6VyzW995/sZYtLrZ6WLAQ4clBWDAOALPrb88d0Ae+t02HB0d00Ske3XbDRnkWFFk9LFiM8OQgLBgHgNnz+8df1Id+9JAGR0b1ytOWKPjuDVro5msThCdHKSnksh0AzIY7I7v1idCjGhk19Xdnn6z/fedFyf1DAcKTgyT+YvcPjcg0TblcLotHBAD2c8e9O3XTLx6XJL2taqWarj1fBfl5Fo8KcwnhyUES4WnUlIZGTBUVEJ4AIFWmaeobf3xOX/r9M5KkGy5fo3//+7OVl8e/pZiI8OQgiTVP0tjsU1EB/1MCgFSYpqmG3zyl4J+jkqQPv+50fdR3OjP4mBThyUGK8vPkckmmKcWHRqSSQquHBABz3sioqU//bId+0r5LkvSZN52lG6/wWjwqzGWEJwdxuVwqKczX4cERCmUCQAoGh0f10ZaH9atH9ynPJX3hbedr08ZKq4eFOY7w5DDFR8ITtZ4AYHq9/UP6yE8e0j1P71dhvkv/846L9Mbzlls9LNgA4clhKFcAADP7/eMv6jN3PaaXD8ZVXJin5ndv0KvPOMnqYcEmCE8O4z6yaJzLdgBwvP0H4/rsLx/Xrx7dJ0k6delC/femC3TRKeUWjwx2QnhyGGaeAOB4pmnqrof36OZfPiHj8JDy81yqvdKrj7zudIpfIm2EJ4cpJjwBwAR7jH59+mc7dM/T+yVJZy8vVZP/fJ27sszikcGuCE8Oc3R/OxaMA5jfRkdN/fCB5/WF3zylvsERFeXn6SO+01V7pVeFVAxHBghPDlMybosWAJivovsP6ZM/3aEHd3ZLktavLlfjtefrtGWLLB4ZnIDw5DBuLtsBmMeGR0Z1y1869ZXwMxocHtWConxtecM6vfvS1WyzgllDeHIYZp4AzFfbn+/RTb94TI/tiUmSrjh9qT5/zXmqrFhg8cjgNIQnh2HNE4D55ukXD+qLv3ta4SdfkiSVlRTq3/7+bF1btZK96ZAVhCeHScw8xZl5AuBwu3sO6yttz+rOh3bLNKU8l1SzvlIfe/0ZWra42OrhwcGyHp6amprk8XgkSYZhKBAIZNwn0/bxqqur1dbWdlz/hK6uLjU2Ns445rmimMt2AByu61BcX//jc/rh/S9ocGRslv3qc1+hj/3dmSwIR05k9V7NRAipra1VbW2tqqqqVFdXl1GfTNvHC4VCCofDE56rqamRx+NRIBBQIBDQ2rVrtWXLljSP3DrUeQLgVIfiw/pq+Bld2fRH3fa3nRocGdXla5forn9+pb513XqCE3LGZZqmma03Ly8vV2dnZ3IWSJJcLpem+8iZ+mTanmAYhlpaWlRXV5dsi0ajWrt2rXp6eibMXJWXl094biaxWExlZWXq7e1VaWlpSn1my3f/2qn/uPsJvfmCFfraOy/K6WcDQDbEh0f0w/tf0Nf/+Jy6+wYlSeetLFPgDWfqVactZV0TZk2q399Zm3mKRqMyDGPSwHHsbE+qfTJtH6+lpUWbNm067vMlTeif+PW2bdsmHfNcc3TBODNPAOxtZNRUaPtuvfZLf9Ln7n5C3X2DOnXpQn3jXVX6+T+/UlecfhLBCZbI2pqnRBA5lsfjkWEYJ9Qn0/aEcDgsn8933Ou8Xq8kTRrApnpvSYrH44rH48nHsVhsytdmG3vbAXCCx/f2qv7OHXp0d68k6eRSt/7Vd4b861dRHRyWy/mfwIqKCnV3d89qn3TbDcNIBqXxvF6vfD7fhFmqqWbJxmtoaFBZWVnyR2Vl5Yx9soU1TwDsrH9wRA2/eVJv+frf9OjuXi0uLtAnr16nez5+ld558SkEJ8wJKc88hUIhbd26dcbX1dfXq6qqasr2dINTKn3SaQ8Gg6qtrZ3ytW1tbdqyZYu6u7tVUVGRDFmTha2E+vp6bd68Ofk4FotZFqAokgnArv7y7H596mc7tKu7X5L0pvOW66Y3n61lpZQdwNyScnjy+/3y+/0pv/FUYWOqWZ9U+mTaHolEtGHDhhnHPr40QeJy33T93G633G73jO+bC26KZAKwme6+Qf3n3U/ozof2SJKWlxXrP956rnxnn2zxyIDJZW3Nk9frlcfjUTQaPS7UTLbeKNU+mbSHw2FFIpHkpbiOjg5JY+UNvF6v/H6/IpHIhJmzcDgsv9+f8p12VmPNEwC7ME1TP3toj/7j7ifUc3hILpd0/WVr9PHXn6lFbmo4Y+7K6sXj+vr6CWuGQqHQhEtm0Wh0QkHKVPpk0u7z+ZL1mwKBQLL+UyAQSM6q1dTUTOjf3NxsyyKZhCcAc9kLXYf1nlsf1OaWR9RzeEjrXrFYd/6/y/XZt5xDcMKcl9U6T9LRWR1Jam9vnxBEgsGgGhsbkzNAqfSZjXbp6BquUCikQCCg6urqCbNTHo9HHR0dqqurm3a902SsrPPUeaBPV33pHi1yF+ixm1+f088GgJkMj4zqO3/t1FfDz2hgaFTugjx9xHe6PnCFl8XgsFyq399ZD0/zkZXh6cXeAV3a8Afl57nU8fk35vSzAWA6j+3pVSD0qJ7YN1bO5fK1S/T5a87TmqULLR4ZMCbV72/mRh0mUSRzZNTU0Mgo/5MDMCf8/OE9+kToUQ0Oj8qzoFCfedPZurZqJUUuYUuEJ4dJrHmSxsoVEJ4AWMk0TX0l/Kz+9w/PSpJ8Zy3TF649X0sXzY07lIETQXhyGHdBnlwuyTTHFo2XFhdaPSQA89TA0Ig+3vqI7n50nySp7tVebXn9OuXlMdsEeyM8OYzL5VJxQb76h0YUp9YTAIu8HBvQB76/XY/sMlSY79J/XXOeNm2wbvcFYDYRnhyouDBP/UMjVBkHYInH9/bqxju2aV/vgDwLCvXt69brUu8Sq4cFzBrCkwOVFOarR0PUegKQc21PvKSP/OQhHR4c0dqTFuq712/kbjo4DuHJgRKLxvsHCU8AcsM0TQX/HNUXfvuUTFO64vSl+vq7qlRWwrpLOA/hyYHciSrjw6x5ApB9g8Oj+vTPdqh1+25J0nWXnqKb3nwOd/vCsQhPDlRypNYTM08Asq27b1Af/MF2PdjZrTyX9O9/f7auv3wN9ZvgaIQnB0pctosPE54AZM9zLx/S++9o1/Ndh7XIXaCvv+sivebMZVYPC8g6wpMDlbDmCUAWmaap1m279bm7n9Ch+LBWlZfo1hs26oyTF1s9NCAnCE8OlJh54m47ALPt5diA6u/coT889bIk6eI1FfrWdVVaQsVwzCOEJwcqZsE4gCy4+9G9+sxdj8k4PKSi/Dx97O/O0I1XeJVPxXDMM4QnBypmwTiAWdTTN6h/+/ljyW1WzllRqv/edKHOfAWX6TA/EZ4c6OjME+EJQGb+76mXtOWnO7T/YFz5eS7981Wn6UNXnaaiAsoQYP4iPDlQYsH4ADNPAE7QwYEh/cfdT6hl21jtprUnLdR/b7pQF1R6rB0YMAcQnhwocdlugI2BAZyAezsO6BOtj2qP0S+XS3r/K0/Vx19/ZnJWG5jvCE8OlNyehbvtAKShf3BETb97Srf9backqbKiRF/yX6BL2NQXmIDw5ECUKgCQjv7BEd318B41/6lDO7sOS5Ledckp+tQbz9IiN18TwLH4W+FAJZQqAJCCXd2H9YP7n9dP2nept39IknRyqVuN155PpXBgGoQnBypmwTiAKZimqfuiXbrj3p1qe+IljZpjz1dWlOj6y9Zo08ZKlRYXWjtIYI4jPDlQcsE4pQoAHJG4NHf733bq6ZcOJp9/1WlLdcPla3TVumUUuwRSRHhyIPa2A5Aw2aW5ksJ8Xbt+pa6/bI1OZz86IG2EJwdyUyQTmNdGRk39+dn9+vEDLyj85NFLc6dULNB7Llutmg2VKivh0hxwoghPDnR05okF48B80nmgT63bdunOyB69GBtIPn/F6WOX5l5zJpfmgNlAeHKgxJqnOKUKAMfriw/r1zv2qXXbbj24szv5fPmCQr31wpW67tJTdNoyLs0Bs4nw5EAlRRTJBJzMNE1tf75Hrdt26+5H96rvyPrGPJf06jNO0qYNlXrtWcvkLqAiOJANhCcHKj7yD+bwqKnhkVEV5LOBJ+AEL8cG9NPIHrVu26Xogb7k86cuXaiaDav0totW6RVlxRaOEJgfCE8OlJh5ksYKZS4iPAG2ttfo15d//4zueniPRo6s/l5QlK83nbdcmzZWasPqcrlcrGUCcoXw5EDugqNhqX9whO0VAJuKDQzpm3/s0K1/69TgkR0DNq4pV82GSr3pvOVayN9twBL8zXMgl8sld0Ge4sOj7G8H2NDg8Kh+cP/z+tr/Pauew2O1mS45tUKfeuNZuqDSY+3gAGQ/PDU1Ncnj8UiSDMNQIBDIuE8m7eFwWM3NzaqurpbX61VbW5s2btwov9+f0ZjnmpKifMITYDOmaepXO/ap6bdP64XusQ16T1u2SPVXr9Nr1y3j0hwwR2Q1PDU1NUmSamtrJY0Fl7q6OjU3N59wn0zbDcNQOBxWKBSS1+vVli1bjgtO6Y55LhpbND6kgSFqPQF28EC0S5//zVN6ZJchSTppsVubq89QzfpV3PQBzDEu0zTNbL15eXm5Ojs7k7M40tglpek+cqY+mbaHQiH5fL4J7ZmO+VixWExlZWXq7e1VaWlpyv1m01VfukedB/rUUneZLj61wpIxAJjZcy8f1Bd+87TCT74kaWwheN2Va3XjFaeypgnIsVS/v7P235loNCrDMCYNKeFw+IT6ZNqejTHPVYlF41y2A+am/Qfj+tTPduj1X/2Lwk++pPw8l/7xklN0zydeo4/4Tic4AXNY1v52RqPRSZ/3eDwyDOOE+mTantDS0qKKigp1d3ero6NDjY2NJzxmSYrH44rH48nHsVhsytfmSqJcAeEJmFtM09QvHtmrf//548mNeqvPPllb3rBOpy1bZPHoAKQi5/+1SYSWE+kz1aW2VNslqaqqSpLk9XolScFgUDU1NWptbT3hMTc0NOjmm29O4UhyJ1EokyrjwNzRdSiuf/v5Y/r1jhclSWcvL9VNbz5bl3iXWDwyAOlIOTyFQiFt3bp1xtfV19cnA8pk0g1OqfRJpz0RmhI2bdqkurq6aWeWZnr/+vp6bd68Ofk4FoupsrJy2j7ZdnR/OxaMA3PB7x5/UZ/+2Q4dODSogjyXPvTa0/TPV52mQhaDA7aTcnjy+/0T7kqbybEhJcEwjCnbZuqTabs0FgLHH0ditioajZ7QmCXJ7XbL7XZP2W4F9rcD5obe/iHd/IvHdedDeyRJZ5y8SF+uuVDnrSqzeGQATlTW/svj9Xrl8XgmXUfk8/lOqE+m7YZhqKamZkJ7YsYpEb7SHfNclbhsx5onwDp/ema/Xv+VP+vOh/YozyXVvdqrX3zoVQQnwOayOl9cX18/4S61UCiUrJ8kjc32JOoqpdonk3aPx6NAIDBhFikYDMrv9ydnoGZ6f7soZuYJsMyh+LA+9bMduv7WB/VibEBrlixQ6wcvU/3VZ6m4MH/mNwAwp2W1zpM0VnQyEVba29uTd7ZJY8GlsbFRHR0dKffJtN0wDAWDweTjrq6utN9/JnOhztPnfvmEbv1bpz746rX65NXrLBkDMB/dH+3SJ0KPaFd3vyTphsvXKPCGM7WgiNIDwFyX6vd31sPTfDQXwtMXf/eUvvHHDt1w+Rp99i3nWDIGYD4ZGBpR02+f1m33dso0pZWeEn3Rf74uP22p1UMDkKJUv7/5r5BDseYJyI2evkG1bNul7933vPYYY7NN79hYqU+/6SwtLi60eHQAsoHw5FAUyQSy6/G9vfrevc/rrof3KD48VhJk2WK3Gq89X1etW2bx6ABkE+HJodyFLBgHZtvQyKh++9iLuuPendr2fE/y+bOXl+qGy9foLReuYEE4MA8QnhyqOLm3HUUygUy9fHBAP35gl374wPN6+eDYVkwFeS5dfd5yXX/Zaq1fXS6Xy2XxKAHkCuHJoSiSCWTGNE1FXjB0x7079ZvH9mloZOzempMWu/Wui0/Ruy45RSeXFls8SgBWIDw5VGLBeJzwBKRsX2+/tu3s0fbne3RfR5eefulgsm396nK957LVuvrc5SoqYEsVYD4jPDkUM0/A9EZGTT31Ykzbn+9JBqbE3XIJRQV5eusFK3T95Wt07kqqggMYQ3hyqMTGwKx5Asb0xYf10AuGtj3fre3P9+ihFwwdig9PeE2eSzpreak2rC7X+jUVuuK0pSpfWGTRiAHMVYQnhyrmbjvMc4cHh7VtZ4/ui3bpvo4u7djTq5HRiTWBF7kLdNEpHq1fXa4Nqyt04SkeLXLzzyKA6fGvhEMlwhN1njBfDAyNKPJCj+7v6NK9HV16ZLeRXOSdsNJTMhaU1pRr/epyrXtFqfLzuEsOQHoITw6VCE9xLtvBoQaHR/XwLkP3dXTpvugBRV4wNDg88c/7irJiXbp2iS7zLtFla5doVfkCi0YLwEkITw5VciQ8DY6MamTU5H/XcIzOA30K/jmqux7ac9xl6WWL3bpsXFg6pWIB9ZcAzDrCk0MlFoxLY5czFrKOAzb3yC5D3/5Th377+ItKbGe+ZGGRLvUuSc4urT1pIWEJQNbxjepQiTpP0tiiccIT7Mg0Tf3pmf369p86dH+0O/n869YtU+2VXl18agVhCUDO8Y3qUHl5LhUV5GlweJRF47Cd4ZFR/WrHPn37T1E9uS8maWw7lLdcuEJ1V67Vma9YbPEIAcxnhCcHKynMJzzBVg4PDqulfZdu+UtnsmDlgqJ8vfPiU/T+V52qFZ4Si0cIAIQnRysuzFNvP4UyMfd19w3qjnt36nv37VTP4SFJY+uZ3vvKNbru0tXyLKBQJYC5g/DkYCXUesIct6v7sL7zl6i2btuVDPmnVCzQB670qmb9qmTJDQCYSwhPDkaVccxVO3b3qvnPHfr1jn1KFP0+d2WpPvjqtbr63OWU1gAwpxGeHMydnHnish2sZ5qm/vLsATX/uUN/e64r+fyVZ5ykD17p1WVrl3DnHABbIDw5WMmRWk/MPMFKQyOj+vUxd87l57n0lgtW6ANXeHX2ilKLRwgA6SE8ORj728FKffFhbW3fpe/+deKdc+/YeIre96o1bJUCwLYITw7GgnHk0uHBYT314kE9uS+mx/fG9KtH96m3f+zOuaWLinTD5dw5B8AZCE8OxswTssE0Te3tHdCTe2N6cl9MT74Y05P7DmpnV19y25SEU5cu1I1XnKprq7hzDoBzEJ4cLHm33SALxpGagaERxQaGFOsfUm//sGL9Q4oNDKm3f0jR/X1jYWlfTLGB4Un7L1vs1lnLS7Vu+WJtXF2hq9Yt4845AI5DeHKwxObAA8PMPM13/YMj6jzQp+iBQ4ru79POA33qPjyo3v6hIwFpWL39QxocTi1oF+S5dNqyRTpreanOWr5YZy8v07rli7V0kTvLRwIA1iM8ORiX7eaX0VFT+2IDiu4fC0jR/YcUPdCn6P6+5ILtVOS5pMXFhSorKVRpSYFKiwtVWlyoleUlybB02rJFchdwGQ7A/ER4cjAWjDvf7p7DujOyR79/4kU99/KhaWt6eRYUyrt0obwnLZL3pIU6aZH7SEAaC0dlCwpVWlyghUUFyuNSGwBMifDkYMnLdhTJdJTDg8P6zY4XFdq+W/dFuya0Fea7dErFgmRAWrt07GfvSYtUsZC73ABgNhCeHCwx8/TQCz267W+dOn+VR+esKOWuJxsyTVMPdnYrtH23fr1jn/oGj84mXr52id5WtUrrV5ersrxEBfl5Fo4UAJyP8ORgqyrGihDu7Dqsm3/5hKSxhb7rli/W+as8unCVR+dXlun0ZYu5I2qO2tU9dlnup5HdeqH7cPL51UsWyF+1StdUraTYJADkmMs0j63MMruamprk8XgkSYZhKBAIZNwnk/ZwOKzm5mZVV1fL6/Wqra1NGzdulN/vT77GMAy1tLSotbVVbW1taR6xFIvFVFZWpt7eXpWWWrf1hGmaeqCzWw9Eu/XIbkOP7DLU1Td43OsWFOXr3BVluqCyTBdUerSqfIEWuQu0uLhAC90FWlCYzxqYHOrpG9T/PfXycZflFrkL9Kbzlsu/YZU2rC5nHzgAmGWpfn9ndeapqalJklRbWytpLLjU1dWpubn5hPtk2m4YhsLhsEKhkLxer7Zs2TIhOEUiEW3btk2GYai7u3sWfhes43K5dKl3iS71LpE0Fqb2GP16ZFevHt1t6OFdhh7b06u+wRE9uLNbD+6c/HhdLmlR0ViQWlRccDRYFY09Ll9QqBWeEi0vK9FKT4mWe4q1ZGGRJV/upmnq8OCI+uLDOhQfTv768OCI+gaHdTh+5OfBkbH2I22lJYWqLC/RqvIFqqxYoFXlJVrozs3E7F6jX+07u/VgZ7fad3brmZcOJdtcLumVa5fq2vUr9fpzXqEFRUwWA4DVsjrzVF5ers7OzuQskDT2hT7dR87UJ9P2UCgkn883oX0yoVBIDQ0N2r59e2oHO85cmXlKxcioqY79h/TwLkOP7ja0Y3evDhwa1MGBIR2KD2v0BP90FBXkaUVZsZaXlWiFp0QrPMVHAlaxli5yqzA/T/l5LhXkuVSQ71JB3tHH+flHfs4be37UNNXTN6gDhwbV1RdX16FBHTgUH3t8KK6uvrGfDxx5Pp5iraKZVCws0qryElWWj4WpVUdCVWX5Ai0rdWvRCdyVZpqmOvb3qX1nt9o7u/VAZ/ekZQROX7ZIb71wha6pWqWVnpJZOR4AwPQsn3mKRqMyDGPSkBIOh+Xz+dLu4/V6M2qf7DPnu/w8l844ebHOOHmxNm2onNBmmqYGhkZ1MD6kQwPD6ouPHP314LAODQzrYHxYXYcGtdfo197eAe0z+vXywbgGh0e1s+uwdnYdnuKTsyvPJS10j82OLXDnj/1clK9F7gItcBdoYVG+FhQVaKE7XyVF+erpG9Su7n7tNg5rV3e/evuH1N03qO6+QT26u3fKz1hcPLEWUvLXJRMf9xweVPvObm3b2XPcpdM8l3TuyjJtXFNx5Ee5llBsEgDmrKyGp8l4PB4ZhnFCfTJtT2hpaVFFRYW6u7vV0dGhxsbGqQ8kBfF4XPF4PPk4Fotl9H5zhcvlUknRWLhYtjj1foPDo3opNqA9Rr/29fZrrzEwFq6Mfu3rHVB336BGRk0Nj5pHfh7VyKipoZGpp7nyXFLFQreWLirSkkVFWrLQraWL3FqyqGjsuYWJX4/9XFKYn9Flw9jAkHZ392t3z2Ht6jny85HHu3v6k7Nyvf1DRza/Tb0IpbsgTxdWenTxqWNhqWp1uRbl6BIhACBzOf8XOxFaTqTPVJfaUm2XpKqqKkmS1+uVJAWDQdXU1Ki1tTWtMY3X0NCgm2+++YT7O01RQZ4qK8bWDqVrdJJQZZpSWUlhThetlxYX6uwVhTp7xeTTtkf3gBtO7gUXGzi6F9yxzxflu7R+dYUuPrVc564sozo3ANhYyuEpFApp69atM76uvr4+GVAmcyKLsGfqk057IjQlbNq0SXV1dVNe7ktFfX29Nm/enHwci8VUWVk5TQ9MJS/PpaJkSJq7AaO4MF/FhenNyAEAnCHl8OT3+yfclTaTY0NKgmEYU7bN1CfTdmksBI4/jkRgikaj04a+6bjdbrndrFEBAGA+yFopYq/XK4/HM+k6pKkWbs/UJ9N2wzBUU1MzoT2xFmqq4AUAADBeVvdxqK+vVzgcTj4OhULJ+kvS2GxPoi5Tqn0yafd4PAoEAhOCUjAYlN/vP+6Snd1rPAEAgOzISYXxRFhpb2+fcGdbMBhUY2OjOjo6Uu6TabthGAoGg8nHXV1dE9qj0WhyfVckElEgEDiuAvlM7FTnCQAAjEn1+zvr4Wk+IjwBAGA/qX5/s/06AABAGghPAAAAaSA8AQAApIHwBAAAkAbCEwAAQBoITwAAAGkgPAEAAKSB8AQAAJAGwhMAAEAaCqwegBMlirbHYjGLRwIAAFKV+N6eafMVwlMWHDx4UJJUWVlp8UgAAEC6Dh48qLKysinb2dsuC0ZHR7V3714tXrxYLpdr1t43FoupsrJSu3btmjd75nHMHLNTccwcs1PZ+ZhN09TBgwe1YsUK5eVNvbKJmacsyMvL06pVq7L2/qWlpbb7A5kpjnl+4JjnB455frDrMU8345TAgnEAAIA0EJ4AAADSQHiyEbfbrZtuuklut9vqoeQMxzw/cMzzA8c8P8yHY2bBOAAAQBqYeQIAAEgD4QkAACANhCcAAIA0UOfJJpqamuTxeCRJhmEoEAhYO6AsC4fDam5uVnV1tbxer9ra2rRx40b5/X6rhzZrDMNQS0uLWltb1dbWdly7E8/5dMfs5HPe1NQkSero6JAkNTc3H9futHM93TE78Vwn/mxLY8ccjUZ1yy23JM+r5LzzPNMxO/E8J5mY8xobG83Gxsbk47a2NrO2ttbCEWVfa2ur6fF4TEmm1+s1m5ubrR7SrNq+fbvZ3NxsNjY2mlVVVce1O/Gcz3TMTj3ngUBgwuPa2lrT5/MlHzvxXM90zE4817W1tWZHR8eEx04/zzMdsxPPcwLhyQY8Ho/Z09Mz4Tmn597W1tbjjtmJWltbJw0STj7nUx2zE895T0+P6fP5JhzX9u3bTUnJLx2nnetUjtmJ59rn800IR42NjabH40k+dtp5Ns2Zj9mJ5zmBNU9zXDQalWEYE6Z+E8LhcO4HhKzjnDvLtm3bFI1Gk4+9Xq+ksUseTj3X0x2zU7W1tU24DNfe3i6fzyfJuX+npztmp2PN0xw3/h+g8Twej6P/IZKklpYWVVRUqLu7Wx0dHWpsbLR6SDnBOXfOOfd4POrp6ZnwXOLL0uv1atu2bVP2s+u5numYE5x2rscLhUIyDEOtra2S5sff6WOPOcGp55nwZFOJP4xOVVVVJenoP7bBYFA1NTXH/cWcTzjnztDQ0KDm5uZJZyESnHaujz1mp57rxAJqwzBUU1Mz7TmWnHGepztmp55niVIFtmX3v3Az8Xq9E/6XumnTpuT/bOYrzrn9bdmyRW9/+9tVW1s77eucdK4nO2annmuPx6Pa2trkpazy8vJpj8kJ53m6Y3bqeZYIT3Pe+D944xmGMWWbE4RCoQmPE/+bmWr620k452Ocds5DoZDWrl07YY2I08/1ZMeceH48u59rwzC0ZcuWCaHA5/PJMAyFw2FHnueZjlly3nkej/A0x3m9Xnk8nkn/sDl1YV5i+nf8MY//n4zTcc6PPic545wnvkwSsy+JxeJOPtdTHbMTz3U0GlVTU9OEmaTEMXk8Hkee55mO2YnneTzCkw3U19dPuCMjFArNOO1vZx6PR4FAYMJfsGAwKL/fP+MaAruZatreyed8smN28jmPRCKKRCKqqqpSNBpVNBpVMBhURUWFJGee6+mO2Ynnuqqq6rhj2rp1q6qqqpLhyGnneaZjduJ5Hs9lmqZp9SAws6ampuQfwvb2dsfcsTAVwzAUDAaTj7u6uhx1zNFoVKFQSFu3blUkElEgEDiu8q7TzvlMx+zEc24Yhk499dRJ13iM/6fXSec6lWN26rkef0yJO8uOrTDulPMszXzMTjzPCYQnAACANHDZDgAAIA2EJwAAgDQQngAAANJAeAIAAEgD4QkAACANhCcAAIA0EJ4AAADSQHgCAABIA+EJAAAgDYQnAACANBCeAAAA0kB4AgAASMP/BwXhhDs6iLGYAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "plt.plot(data_training[:-2, 30] - 2*data_training[1:-1, 30] + data_training[2:, 30])\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "id": "937b2c9d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(41, 81) (41, 81)\n", + "0.02205541645507997\n" + ] + } + ], + "source": [ + "print(data_test.shape, pred_u_v_4_sh.shape)\n", + "print(np.mean(np.abs((data_test - pred_u_v_4_sh))))" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "b9485f0b", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAowAAAIACAYAAAAIQT11AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAA9hAAAPYQGoP6dpAABNbElEQVR4nO3dTXMb2Z7n938CJCGpJCqLNW5Xd1/ddkGecdizMqXeObwpaO8Fu/QG3OS+FkBwVVUrBvEOyOvtLFTCwnvh7rxTCeGVwxEeocJX0T23e1xUivUg8QGZXugCI4jnd4iTzCSJxPcTgajSSZzMBPIg8/Dkwy/KsiwzAAAAQKhd9QoAAADgeqPDCAAAAC86jAAAAPCiwwgAAAAvOowAAADwosMIAAAALzqMAAAA8KLDCAAAAC86jAAAAPCiwwgAAACvpZA3J0li33//vT19+tSePXs2U51ut2txHE/qt9vt4JUEAADA1Zm5wzgYDOyHH36wJEns4OBgpjrdbtfMzDY3N83MrN/v29bWlu3t7eVYVQAAAFyFKMuyLKRCr9eznZ0de/Hixbnv/fTTT+3HH3+cjDCamUVRZIGLBAAAwBUq7RrG4XBoSZJMdRbH+v1+WYsFAABAwYKuYQwxHA6d5XEcW5IkzmlHR0d2dHQ0+XeapnZwcGCfffaZRVFUxmoCAICCZVlmP//8s/3N3/yN1WqXf3/tu3fv7Pj4uLT5r6ys2I0bN0qb/3VUWodRWVtbk9dA7uzs2HfffXfJawQAAMrw6tUr+93vfnepy3z37p3dvLNmdvq2tGV8/vnn9uOPPy5Up/HSO4y+G2a2t7ft66+/nvz7zZs39vvf/97+n//4H+3OnTtT7/3nn0+c83j+T4dy/v/3v/zsLK/X3KOX//3nd5zl/6Mo/9vb+uusH/4nd/lvibM8PfzJPaM0dRcf/SqXnf78xlmenRw5y5XaJ3ed5fX438g6UfxfO8tHtz9zlp/ecpe/fnvqLj8ayWX/p5/dn++fD985y/8pUeXunc6f37jfb2b286F72e9+dZf/9ou7PZ+8c5cf//qLXPbJW/dv4PStu87oxP05Rsei/MS9TmZmWebeHlkaVh7V6kHlZmZR5J621HDv0GtLy87y+o1P3PNZuSmXvXxz1Vm+8slt9/tvuJd945a7/ObtFbns23cazvK/+dS9vn8bu8vvrbnL//qOPiD+9R33en3acO8L4xtiG7197Syv/+a5wfIX97TRwb8ElWdH7nYeiXZjZla7697nqX1h7aa7TWU33MeSbEkve3TrU2f5v566t8X/+Wf37/7Fq8RZ/uat/n3/u8/d7fl//m/WzpT9+svP9ujBvz9z7L4Mx8fHZqdvbel/+Mqs7v5NXcjoxP78f31vx8fHdBiL0Gw2neVJkshpjUbDGo2zO787d+7Y6ur0DvnQ3I365m19Q03jZ/c01WG8ddvd0G/fcR8cVu94OoyZ+0dbr7s7QmkqOiPiAJsu6c+dpu5h+ew47DR/7RP3AaV+270zNDOL7rh3MCPx3Z5+4v5uT5fd39PJO91hvJW5v8ObqXsH0jhxb7+VY/dBblmUm5ktHbvnVT8V5SfubTRK3e28dqI/d+3UPa/o1P0dRuImtCgV5Zmn3Yj2aVFYea4Oo6qz5O5QRaLDWFsWHUxRbmZWE53J2sotZ3m9ITqronzphvszmJkt3XRPW7nlXnZD/I5v3na//5Pb+nOrzuqdG+52vqo6jOL3Xa/7Tim6//gaHbk/x+id+3Nkokl5O4yfiO0q9oWyw3jTvX/0dhjFPvKt6DDe+sX9e23ccn/nK5HuMN74JOyYaGZXejlZtHzDorr+YyuvzLMfqrLSLixoNpsWx7HzWsZWq1XWYgEAAFCw4A6jOqU8HA4nz10c297enrojutfrTZ7JCAAAUJaoVi/ttYhm7jCOO4R7e3s2GAys0+lYr9ebTO/3+2ceyN1uty1JEuv1etbr9ez58+c8tBsAAJSODmOxZr6GsdlsWrvdltF+m5ubztHDD9+/sbGRYxUBAABwlS79LmkAAICyRVFJo4HpYo4wXv7TNAEAADBXGGEEAACVE9VrFtVLGA3MFnOsbTE/NQAAAGbGCCMAAKicWkl3NC/qg7vnosMYZdmZNIp3I3dE3psj/ZT6347dqRKfiditW8vuRrFSdz+5PhrpVIIodT9V3zL351CJGdmp+/Nlnrg2mb6hykN/DAUGy6cidUR8S4VSiT+q3CcSX0mkllF3V1Dv951mqS2523Nt2V2uY/vCv/VUzWtUUDSg73MHpsOo7yn0/b71Ct3e9SXRDjxpGaHtM097DpWJ33GhCjpoqzZovghMEZspy8VvT0W9+qhjyUrdnbojj2Oirb0Vx0kzs4Nf3Me4n4/OrtOvnthWzKe56DACAACEKO2ZiYwwAgAAVAMdxmJx0wsAAAC8GGEEAACVE9VqFhV4jf1EGfOcA3QYAQAAStLtdi2OYzMzS5JERiyH1ul0Onb//n0zM1tbWys9fpkOIwAAqJzrcA1jt9s1M7PNzU0zM+v3+7a1tWV7e3u56yRJYl9++aX98Y9/tDiObTAY2IMHD0p/OsFijqsCAACUbGdnZ9LxMzNrtVq2v79/oTqdTsceP348GYFcX1+3Z8+eFbviDnQYAQBA5by/hrFewmu2rtNwOLQkSSYduw/1+/3cdfb3921jY8OGw+GkrNVqzfalXAAdRgAAgECHh4dTr6Ojo6npw+HQWS+OY0uSxDntvDrj6YPBwJIksWazaVtbW7IDWiSuYQQAAJUTRSVdwxi9n+e9e/emir/55hv79ttvz62+trZmBwcHQYsc1xl3GOM4tvX1dTMz293dtS+++MJev34dNM9Q89FhzNIzEXquKCIzsze/6TinYxEnqCKSwqMBPfF8KgJQvl1FA7qjmVQk1fs6Ik5QxLXJ4DAVoeWjIs2u8LEEtYKi0XwRa2oZMuovMELOF1OnptVFuY4GDN/ekZpXzV2uogRDY/5802pLy+5yEdemvqdc33lo5KMsl4suNNLyusk8H7z0T+dp/8HRgCs3nOXyM/iOFyoacNk9t9BowJEnrjARx9efHTGAv3oiBi9Nve6NE80rS9/P89WrV7a6ujopbzTc8YwfC+0suuo8fPhw8v/j0cd+v1/qqen56DACAABcI6urq1Mdxo81m01n+fhUcp46anocx/J0dlG4hhEAAFROOTe8zH6au9lsyo6cGgk8r8640/jx9CRJpkYdy0CHEQAAoATb29tTN6T0er2pR+YMh8PJcxdnrbO7u2tPnjyZmt5qtSbXNJaFU9IAAKByynpwd8g82+22dbtd6/V6Zmb2/PnzqYd29/t929vbm0pyOa/OxsaGHRwcTDqaP/3006U8h5EOIwAAQEk+7Ax+HN+3ubk5NXo4S51xvctGhxEAAFROrVaXT1y42IxLmOcc4BpGAAAAeDHCCAAAKmccDVjGfBfRYn5qAAAAzIwRRgAAUDnX4S7pKqHDCAAAKocOY7HmtsPoyq4082dJj9LMWa6zpN3lKkvaTt0Zn2ams0FFucx/PhGfT2RMm5nORBWZoe5vSfNmdaprPXzhuFckNJPXl9Ubmg1dXwrMmF7SP91UZCTXTkW5yn8WWeM+Wer+fOPs1Y+p7Gm1Q/ZdO6SzpMOyoWW5+F7N9PZQ209ljddE9npd7KPMimu3dZX7XqA0E3uXHPuDoq4jy9R+0JOpXDt6665z5M6Sthtiv62+Dx+xXsviuHSn4f5d3Fxxl6vjpJnZL0fuY9wvx2fLf3OUYb7NbYcRAABAYYSxWNdvmAcAAADXCiOMAACgcqKopBHGiBFGAAAA4AxGGAEAQOVE9br/pswLzHcRMcIIAAAAL0YYAQBA5RANWCw6jAAAoHJ4rE6xFrObDAAAgJkxwggAACqHEcZizW2H0RVFZGb2s4gu8rm57N74t1fcX8+SioQb5YhCkrF9olxEAKooQTNPnKBahro+4xJ+JDmCsoKpCLTQyDRvNKCoo6Li1PtVlOCpN5YwLCKvrqIBVfvwUHX0vJaD5u/bUcvPLSL96oHRgL5l11TkoyiviRi3mogAVO3D7Gqj/kqXJ0K0oP1XNtLRgKaiW4/d0YBq/xypyFgPVUcdl1ZEG1THPd9+7a047v5yfPb3/dZRhvk2tx1GAAAApVaLZG77xWZcgT/GcuAaRgAAAHgxwggAAConqkXyMqCLzncRMcIIAAAAL0YYAQBA5URR5L1h7CLzXUR0GAEAQOVEJd30knFKGgAAADiLEUYAAFA5UVTSTS8LekqaEUYAAAB4McIIAAAqh8fqFIsRRgAAAHjN7QijK7vSzJ9feXPFnZ3ZEPmtKyLvVZVHqc6SVvmfWSrKRf6zzCo9cWdMm5nMjFb5vlFgvq8vozVTWbCBGbFZjpDpWkHXmahsVdVuzMzqYpq6Y0++X5SrjGIzs5HIQg7NeVYZ0z5yGaNicmWjeniWdGhmtMqeVu8309tVbr/A9uG701NlCMuMabWMHJnphcmTGV0yX5Z0qrKh1f5ZlMvjgnfF3HVqI/c6qePVLZklrbfFb8fuZfx24siSPr36LOlaFBV2HPhQxjWMAAAAwFlzO8IIAACgcA1jsegwAgCAyqHDWCxOSQMAAMCLEUYAAFA5NaIBC8UIIwAAALwYYQQAAJUT1cp5YtM1fArUpVjQjw0AAIBZMcIIAAAqJ4oii0p4yHYZ85wHjDACAADAaz5GGB0XIriiiMzMjj1xRLcb7o+rIpJ0BKBYhi+/TkQAqtg+GecnIqlMlZsvEs69ToX+7XQNL/YIjUALLfdNq6k2FRgJpyLnzMzqI/e0LBWRgSK2T7Ubn1S1tVpB0YCeGMqamCYjAAPLVZzf+zphUX9yewe2D7Ni2+2149t/qLbgaSMh1P7RzGz0zh3FWlsJjG5VcbIi/u/9NHGcEb+9lbr7uKeiTX3t4/jUvV5vHcfjd+IYfZlqNX+sZl7Z9TusXYr56DACAAAE4MHdxVrQfjIAAABmxQgjAAConCgqaYSRm14AAACAsxhhBAAAlVOLInmT40VkjDACAAAAZzHCCAAAqqeku6SNu6QBAACAsxhhBAAAlcNzGItFhxEAAFROrRaVkvRSxjznAaekAQAA4DW3I4y/HLkzOI9E1qWZzsiUWdIqB3Ykcpt9+Z9KYGa0yiRNPVnShSkyu1VkxKaeOO6yqfaxJDN5PXnOgdnQ9brIIhbZwur9ZmapmJYtuX/uMmM6R5a0ylnPMy/n/H1Z0iIDWtWpLavMaPf35PvOi9p+qn2s+LLDA0c7ZMb0JQyayJ/3ZWTOF5QxbWaWjdz7+vTYnSVdE+Vq/59HJHKpl2vLznJ13Lspyn3eHjuypI9zHA8LFkVRKQ/Z5sHdAAAAgMPcjjACAAAoUa2cwevLGBC/jhb0YwMAAGBWjDACAIDK4S7pYjHCCAAAAK/gEcZut2txHJuZWZIk1m63z62zv79vSZJYHMf28uVL297enswDAACgaNflwd15+k0hdR49emTPnj0LWqc8gjqM3W7XzMw2NzfNzKzf79vW1pbt7e1562xubk598H/8x3+0p0+f5lxlAAAAv+vwWJ28/aZZ6/R6Pev3+zOvz0UEnZLe2dmZfAAzs1arZfv7+946z549mxpNjOPYkiQJWkkAAIB5k6ffNGudJEns4OCguJU9x8wdxuFwODmt/DFf7zaOY3v06NGkkzgcDq3ZbAavKAAAwKzGN72U8ZpFnn5TSJ3vv//evvrqq5nWpQgzn5IeDofO8vNGDP/whz/YgwcP7NNPP7V2u23379+XQ7FHR0d2dHQ0+ffh4eGsqwcAAHBpPu6jNBoNazQak3/n6TfNWqff71ur1Qpb4Qu68GN11tbWvEOicRxbp9OxZ8+eWbfbtVarZV999ZWz97yzs2PffffdmfIsiiz76JoBVxSRmdnIky3XEPFaDRHTtSKysqKRO54v8kUDZu71ykYiGkpFRqnIQBFV5RN5os7c73dHRkWeiLxMPOFUlRcpNAJNvj+w3ExHub0V33ldvF+VpyPdzkcjETsnfhsqUk/F+fni+dJT92/jKqMBVQSgfL+K7fPF8wVuP3XRvFq2r62piEoVaXkZCguF8+0nxDTVRtR+SpX79qmjY/d+uLYs4jfVfltFuuaJmRV11HHs9oqIxPW0c+XYEcl74onpvSxRVNJNL3/pj9y7d2+q/JtvvrFvv/323Prn9ZtmqZMkiTWbzUu9xO/CR+3zPnSn07Fms2lPnz61ly9f2sHBgT148MD53u3tbXvz5s3k9erVq4uuHgAAQOFevXo11WfZ3t6eqV6e6w4/rLO/v28bGxvB87iomUcY1XWH416uy/hc/HjYtNls2osXL+zBgwfW6/XOfOCPh3MBAADyqNci7+h8Xtlf5rm6umqrq6vyfXn6TefVGQwG9vDhw8A1LkZQhzGOY+dNK+o8+nA4dJ563traCltLAACAOZKn33RenX6/b4PBYHIDzMuXL83s/aN4ms1mqSOPQaekt7e3p+7S6fV6U7d+D4fDyfODzN5/uMFgcOYc+4sXL65kOBUAACyG2l9GGIt+hUQDhvabzqvTarWs3W5PXuMBuHa7XXq/Kuiml3a7bd1u13q9npmZPX/+fOqO536/b3t7e1NPJH/69Knt7OzYZ599NrnLZ3d3t6DVBwAAOKusU9JpwDzz9JvOqzPW6/XsyZMnZvb+fpFHjx6Veud08F3SH36oj3uzm5ubUz1ns/d3SdNBBAAAiyi033RenQ/LL/Ns7YUfqwMAAHDdXIcRxiop/2F4AAAAmGuMMAIAgMphhLFYjDACAADAixFGAABQOUu1ciIyswUdapvbDqMru/I8N0V25q1ld7lsaCen7nJvlrSYlrrLZfaoyORN1TqZJ0NV5NaqzGjJk+/rzYK9IuoZWkVmSatpatlRJJYhttGortuaqpMtiTxzkXNudtNZqvKi8wjNq/a1TVVHZUaH5j+r79XMrCbyelUdVa7ahy/fV01T5aFtc0HPvnllYr+t9sPZ0Vt3+UlxvyV1jFHHMXXcu3NDdwtU2xk5cupHcr+CeTW3HUYAAACFaxiLRYcRAABUTq2kDuNoQTuM1+98IQAAAK4VRhgBAEDl1KOa1cU1/Bed7yJazE8NAACAmTHCCAAAKqesm17KmOc8YIQRAAAAXowwAgCAymGEsViMMAIAAMCLEUYAAFA5jDAWq3IjjFe6IX3RgGUvenR1y15UV9nWVIzbpSxbRO1VfdkqMvAylJGHOw+yBX18SXSFx5LbnmhALLa5aBlZ9v71IV+WtDqQh2ZJL4t8WDsKz4xWOwCZyyvKfZnRpXcai3yelTgQ6FzjcEUdY/NkRqsD/JLodJwuud9/eqKXLTOxxTIyR96rr1wZnaay46bynFVmdB6hyw7NhvZlRqt5yYxwuY3C2oeZblNFZqBfFdUxjDzTZE692k+pfHJR7qP2taPjE3eF08By08cM+WsV71eHMXXcM9OdxqAs6cD9ShnqUWT1qIQRxhLmOQ/mosMIAAAQoqxowKs8w3OVFnO8HwAAADNjhBEAAFQON70UixFGAAAAeDHCCAAAKmepFpXyhIERI4wAAADAWYwwAgCAyuEaxmIxwggAAAAvRhgBAEDlMMJYLDqMAACgcupRSR1Gkl7my6mIHfLdEaWiAW+IOC41rygV8Xy+/E81LRXxTyN3lJqKBvTFAkaeqDMnFZWlyj15r2Vnweb53dZEJbUTUDucFc/3uqLi6MS8aiqmTsxnNNLLronfRk3F2hUYx5il7s+XpuHxay6+hAUZwye+W/l9qHKVseapI6ME1fZWbc0TGRjabtX7Q38XZmY1C/sBXkZanIz6Cy33UPvb1Nz751REAKbH75zldbH/z0Pvv9zltxu6W3BLHEOxGOa2wwgAAKAQDVgsbnoBAACAFyOMAACgcrjppViMMAIAAMCLEUYAAFA5jDAWixFGAAAAeDHCCAAAKqdeK2c0MPRJdVVBhxEAAFQOp6SLtaD9ZAAAAMyKEUYAAFA5jDAWixFGAAAAeFVuhNGXufrJsjsHU2Vq1kbu/E8T2btRjizp7PTY/X6ZMS0yTD3Zo7XQzSyyVWVGqy8vWk27hPB2mbErFi2zdFUmr+evzHrN/blV+zySGdNh+chmZtmSu31mot1maXF/N6aR+G3UwkKEoxztQ+U2h2ZM15fE+315ziqvWu1bAjOjVXvy1wnNjJaLKJ/cT1xhTr3vN6b2z6I8PXZnTJvImFbHi3OnOUSp+9igjnu3xHHSzGxlab6ypIkGLBYjjAAAAPCq3AgjAABAPYrkmaaLzncRMcIIAAAAL0YYAQBA5dSiSF6ze9H5LiI6jAAAoHLqVs7NXPN1609xOCUNAAAAL0YYAQBA5dRqUSmPwOGxOgAAAIADI4wAAKByeKxOsRhhBAAAgFflRhh90UUq8khFJFkq4pxUNJOIhXo/TUT3qYgpERmoogFVJJWvTiTixlS5eeLJ5kloNJqKXlvyXMei6jRE+VtRPhq5y9ORjtrLUhUBKMpDkyM9f12nMn4wLBpQUTF/Znq9VJ2lFRGppyIDPVFxKjZQlS8Ftg9f5KlqhzLSsuqDIzLatPz9WqqiW0/cx5LsxL2fj9SxJwcVWavaze0VfQy93XDvLFxtrYxIvlA8VqdY1egBAAAAoDSVG2EEAACoReWMqF+DwdMrQYcRAABUDo/VKRanpAEAAODFCCMAAKgcbnopFh1GAACAknS7XYvj2MzMkiSxdrt94TrdbtfMzF6+fGlmZnt7e8WtsECHEQAAVE69pJteQuY57thtbm6amVm/37etrS1vB++8Op1Ox3Z3dyfv39raskePHtmzZ8+CPkcormEEAAAowc7OzqTjZ2bWarVsf38/d50kSWwwGFiSJJPpW1tb1u/3bTgcFrvyH6HDCAAAKmd8DWMZr1kMh0NLkmRyavlD/X4/d50ffvhhqnPYbDbNzKY6kWXglDQAAECgw8PDqX83Gg1rNBqTf6sRvziOZefuvDpxHNvr16+npo07kuOOY1kYYQQAAJVTr0WlvczM7t27Z3fv3p28dnZ2ZlqvtbU1Ozg4CPosvjo7Ozu2t7fnHJUs0tyOMKoczJsiL9pM57Sqeak8T5XN6ZOdnrjLRZaoyoZOR+5MapUX7ROJzFWdxeouzyLP3x1q6F7VCYwcrll4trB8f2Amry/fV+b4Bs7r9FRkTNf19q6JedVEzrPaCahNNDrVGylSedVR+VnSalp9SZSLbGj5/Xm2d01cBa+WobZ3aLvJU0e1c/V7uZRHiMj9hGfZqoHKzGixX6vrY0YotR+W5cfv3DMauY8XZvr4I39h4v3LNfcv//aK7hbcueGe5mrPqSd7/bKU/VidV69e2erq6qT8w9FFn9DOoq9Op9Oxx48fT13zWJa57TACAABcldXV1akO48fUKeIkSeS0kDq9Xs/u379/KZ1FM05JAwCACho/VqeM1yyazabFcey8LrHVal2ozvi6xXFnMUkS7pIGAACYR9vb21N3RPd6vakRweFwOHnu4qx1BoOBDQYDW19ft+FwaMPh0Pb3921tba3ET0KHEQAAVFBU0iN1ooDrItvttiVJYr1ez3q9nj1//nzqod39fv/MQ7x9dZIksS+//NI6nY7dv39/8up0Otz0AgAAMK8+jPXb2NiYmra5uem8BlHVcT1W57LQYQQAAJXz4SNwip7vIuKUNAAAALwYYQQAAJVTM7MyBgMXdaSNDiMAAKicehRZvYQHd5cxz3mwqB1lAAAAzGhuRxjVRae3VnTM0y0RG7isus0qAlCWe+LPUhHppyIDVZSUiAz0iUIjmgIjA2VE13nTHESyXC7qY8soNfFXY564thUZCef+DleW3Nv1WETIjUb6e9XNtqgvV7fBLA37yzsV66Ti63xUUwuNAFTvV+VmZnUVOyrKVTSgbB+eZet5ic8R2M595mqgJTgKtbjxlNGJ2M+r/b8ofz8x8BigIkHVMdQTr3tbRAO6pJ4ozctSdjTgorn6LQoAAIBrbW5HGAEAAJR6TZ9puuh8F9GCfmwAAADMihFGAABQObWonOsNF/S53YwwAgAAwC94hLHb7U4CrpMkmco79BkHZZuZra2tnclTBAAAKEqtpOcwLupd0kEdxm63a2Y2Ccru9/u2tbVle3t7sk6SJPbll1/aH//4R4vj2AaDgT148MAy3yNoAAAALoDH6hQr6JT0zs7OpLNoZtZqtWx/f99bp9Pp2OPHjyejkuvr6/bs2bPwNQUAAMCVmLnDOBwOLUmSScfvQ/1+X9bb39+3jY0NGw6Hk/e1Wq3wNQUAAJjR+LE6ZbwWUVCH0SWOY0uSxFtnMBhYkiTWbDZta2tLdjCPjo7s8PBw6gUAAICrdeHH6qytrdnBwYFz2rjDGMexra+vm5nZ7u6uffHFF/b69esz79/Z2bHvvvvuoqsEAAAWHNcwFuvCHUbVWfzQw4cPJ/8/HpHs9/tnTk1vb2/b119/Pfn34eGh3bt3zzlPmZPqeUCSyshUmZp2cuouF1meUSreb2apyBKVGdMqS1qU5xKaoarKL+HHoxaRZ9Hqx67K82RJq2mq3TZEucqSPj31ZEkvuW8oy7JizqNEns+t8qplZnQ9bAP6MqbVeqlymRmt9i1Lnu0ts8NFxnRg+8jT1lR5ePuXiw4WfLOjN6debO+62K+Jcjn7Aj+43G+fHrvff+Iufz9RzCuVIfLO4mXx8e409Pd09+ays/ztydnjWOr5vWA+zdxhbDabzvLxqeaQOnEcO09xNxoNazQas64SAACAUxSVM56xoAOMs1/D2Gw2ZUdP3cTSbDat2WyeqZMkydSoIwAAAK6voDH37e3tqRtWer3e1GN2hsPh5FmNY7u7u/bkyZOpOq1Wa3JNIwAAQNFqFpX2WkRB1zC2223rdrvW6/XMzOz58+dTD+3u9/u2t7c3lf6ysbFhBwcHk47kTz/9xHMYAQAA5kjwTS8fdwY/tLm5OTXi+GE5AADAZeEaxmJd+C5pAACA66YWvX+VMd9FtKDPKwcAAMCsGGEEAACVwynpYjHCCAAAAC9GGAEAQOWU9QgcHqszZ26uuOOLVLSWmY4GlLFbKmpJUZFNZpapCKhTERl4GWQEYDHRWmZmmYj2UuVFklFnYnPr8rAYNzMd9ff2JCzGTS1DRQaa6Ri+0B+7itRLT3U7T2vuZUdinUL5Ygll5J34rlTM4JLITKt5ouKWxDLU9guNAFTtyTuvwPavXEpubpH7A7H/Kuz95olPDZSpyFjfccFznAkh9zmedn57JWAvIo63mF9z22EEAACQSrqGcUEHGLmGEQAAAH6MMAIAgMrhOYzFosMIAAAqJ7Jyzh4vaH+RU9IAAADwY4QRAABUTi2KSrnL/1KeHHANMcIIAAAAL0YYAQBA5URWUjRg8bOcC4wwAgAAwIsRRgAAUDk1K2dUbFFH2hb1cwMAAGBGczHCGDnifVSW9I0lnV95Q2TEqmzVSGV2hpab6VzqdCRmVUxeqJnOPZXZ0MFZrPP1d4fMHBZPY10W5TKD3DwZwiKndUW025HIYPZlSas6itoJjE7D859lZvQlxMrWxXcSqUzlJdEOxDZS8zfT21tlQKvtrduHXrZqh6HtPDRj+jJ4M+dD9ztqPyj3j3r+vmku2ci9P89Oj53l6fE7Oa/66DRo2XI+on2seBrCLXEMPUnPlqeB31EZoiiSv/+LzncRzUWHEQAAIARJL8W6+j8BAAAAcK0xwggAACrHdTlbUfNdRIwwAgAAwIsRRgAAUDk8VqdYi/q5AQAAMCNGGAEAQOXwWJ1iMcIIAAAAL0YYAQBA5fAcxmLRYQQAAJW0oH27Usxth1FFAN5a1hlkwRFvIupPRgaKmD8zHQHlq1MUGWO1tOJ+//Ky+/0iMrDQ+K5Avr/0VPSV2t6qfagotTxxbSoq7njkLh+lIkJuKTw68m1wDdH+PdtUpmZm4TGDzmV7rh1SzVBtv9AIQBVHauaJglQRgIFRgnliKJdFxJuMDFS/F893rlbrSq91UtGmgeW+dl4UGQF7eqIriWNGaJSt2qy+aMDbK+4uw4krEtRzLMZ8mtsOIwAAgMIp6WJx0wsAAAC8GGEEAACVw2N1isUIIwAAALwYYQQAAJXDNYzFYoQRAAAAXowwAgCAyomsnOcwhs6z2+1aHMdmZpYkibXb7QvXyTPPi2KEEQAAVE4tikp7zarb7ZqZ2ebmpm1ubtr6+rptbW1dqE6eeRaBDiMAAEAJdnZ2bHNzc/LvVqtl+/v7F6qTZ55FoMMIAAAqJ4rKe81iOBxakiSTU8cf6vf7uerkmWdRuIYRAAAg0OHh4dS/G42GNRqNyb+Hw6GzXhzHliSJc9p5dfLMsyhz0WGMssyij7Job4j81NuevNclcS98FJrnLPI/M0/+Z3binibLR2FZwTIv2jNNZqWGZrHmIfOni8kcNvNl5oa9PzST2syXLSzKT0WW9JL7+7jlaefKyJX3amYi5VxmMI88bTMTyyjn0vOPlqCy4mX7D8sI92WHq+0h5yXWKc+yVTuU7VZsCl97LpvKo/etkapTE/u1KDhLWv/G5D7Vsx92Ufv57ET9Ks2i9FTMLPCYIY57y552cKfh/k5+Ozk7r1Hgd1EGV9+hqPmamd27d2+q/JtvvrFvv/323Ppra2t2cHAQtMxxHdfIYt55hpqLDiMAAMB18urVK1tdXZ38+8PRRZ88Hbvz6pTdWTSjwwgAAKooS4NHXmeer5mtrq5OdRg/1mw2neVJkshp59XJM8+iXP2YMQAAQMU0m02L49h53WGr1cpVJ888i0KHEQAAVE6UpaW9ZrW9vT1193Kv15t6JM5wOJw8V3HWOudNLwsdRgAAUD3jU9JlvGbUbrctSRLr9XrW6/Xs+fPntre3N5ne7/en/j1LnfOml4VrGAEAAEryYWzfxsbG1LRxWktInVmml4EOIwAAqJ4se/8qY74LiFPSAAAA8GKEEQAAVE/Jj9VZNIwwAgAAwIsRRgAAUDnvowGLHw0sI25wHsxHh9ExrHxz2Z1peUuUm+mMTNmgVKMQ5b78TzsV00JzrAWZC+2bFpihquajMl3/UklPC6DmIuJyzcwsNMpUZemqdrPs+c5DM4EbolzlP4+WdDtXdRT1uY9P3b8LlcFsprOk08B1UlS+tZknS7qgzGi1jd7XEVnSohGqeeXJklbtULZbsU6hGetm/t9fITy/saLmFbp/NDOrLy87y0d1935eZUzLLOnTE7lsG4lp6lgijm/quLfk+dzq+OoqzzzHYsyn+egwAgAAhOAaxkLRYQQAANVDh7FQ3PQCAAAAL0YYAQBA9TDCWChGGAEAAODFCCMAAKieLDVLGWEsCiOMAAAA8GKEEQAAVE6UpSU9uJsRRgAAAOAMRhgBAED1cJd0oea2wxgSUTS2pLKvssB4PtVYPBfXZmJaVlA0YK2uP7eK+pPRV6FRWQXF/12WusgzUxFoKo5uua5z0VQcXWhM3amKBvRkmY7SciO5fNGDobGERSrqO9exffp7DY30U+Whn8FMt0PVblU7V7+L0IjNS6P2O4GRp8H7QdNRf76IVhd1XJBRsqZjA9VpUvmLFO+vR552Lj63MxrQ83u5NFmmI34vOt8FdF13BQAAALgm5naEEQAAQOKUdKEYYQQAAIAXI4wAAKByoiwr6bE6XMMIAAAAnMEIIwAAqB6uYSwUI4wAAADwYoQRAABUDyOMhWKEEQAAAF6MMAIAgOphhLFQdBgBAEDlRFla0mN16DBeW66Nfmt59kzLMVFF50KKRqEaS+rJ//Rlg4aQGaa+wFeZraryUAvMjC45Z7pmOmNXZkPLLF33fJZVvq+Yj5nOXFUZwiqDObTczGy0VMwzwlT2usq3Nsu3viF8mcpqmvocOmPa3f7VtjPT21vnUgeWe37fqh3qduueT+jvxcz/+3MJbga+/YeaFrrPUfvHuj6WqP1tbaWYQ6rKizYzy47euSekp6JCWOfG9xtbEY3nhqPdni6FtQ1cf3PRYQQAAAiSpu9fZcx3AXHTCwAAALwYYQQAANWTZfqSs4vOdwExwggAAAAvRhgBAED18FidQgV3GLvdrsVxbGZmSZJYu90Oqv/o0SN79uxZ6GIBAABwRYI6jN1u18zMNjc3zcys3+/b1taW7e3tzVS/1+tZv98PXEUAAIAwPIexWEEdxp2dHfvxxx8n/261Wvbo0aOZOoxJktjBwUH4GgIAAITilHShZr7pZTgcWpIkk9PRH5pl1PD777+3r776KmjlAAAAcPVmHmEcDofO8jiOLUkSb91+v2+tVuvcZRwdHdnR0dHk34eHh7OuHgAAwH+RZSWNMC7mY3UufJf02trauaeakySxZrN5bsdyZ2fHvvvuu5mWqyIAXRFFYzLyKLRBqfd7nv6eiWmqXEYAqjg/T3RYtLzsnrC04i4PjQYsOf7PzCwS8WRRpH+4KtFMfVXLKu7r1L2NVPSab9plRAMqqv2r8uPTkfv9OaIBL0Po5wuNAPTF8xUWASjK87S14GjMq3zI2iXsQ4IjANV+0Mxqy+5Dp9o/18QyspE6lrh/e2ZmmYiZralowJEoFxGivnag2toNR+7uqczixby68BY9r7O4v79vGxsbM81re3vb3rx5M3m9evXqoqsHAAAWUTZ63/ku+pXpDn2VzTzC2Gw2neXj0UOXwWBgDx8+nHllGo2GNRqNmd8PAACA8gV1GOM4tuFweKaDqK5PPDg4sMFgMLkp5uXLl2b2/vE8zWZz5pFHAACAEFmaysu+LjrfRRR0DeP29rb1+/3Jcxh7vd7k/83e3xjT6/UmD/NutVpTncnBYGD7+/vBD/sGAADA1Qm6hrHdbluSJNbr9azX69nz58+nnsHY7/flMxl7vZ7t7OyYmVmn0+EB3gAAoDxlXL84fi2g4LukPxwd/PiU8ubm5tSI44c2NjY4BQ0AAC5HWZ27Be0wct87AAAAvC78HEYAAIDrJhuNLBsVPxpYxjznASOMAAAA8GKEEQAAVE+aehPYLjTfBcQIIwAAALzmY4QxS8/kN6ss6ZW6zlxVkyKV56wyo0W5yvg0MzMxTWaJCiqTVGWY/qWSu47MUHXPK7uMvNcC1cy9wZdV3mvk3hYqi7jmy/cVgawrKhta5LoWmSUdSn1u37JD12skfnt1X3sWQrOkZca02HYqL9qsuMxotWzVnsx0O1Sfb1nsCFXGtMpk901T2e+FCs22D21TnizpSGyP+spy2DKE7ORETzt+555w6q6jjmOZKFftwMxsSbUpR7kv//zSpGlJd0kzwggAAACcMR8jjAAAAAGydGRZCSOMZcxzHtBhBAAA1ZOVdNOLulyt4jglDQAAAC9GGAEAQOVwSrpYdBgBAACuiW63a3Ecm5lZkiTWbrcvXKfb7ZqZ2cuXL83MbG9vL3i96DACAIDqSUclPVanvBHGccduc3PTzMz6/b5tbW15O3jn1el0Ora7uzt5/9bWlj169MiePXsWtG5cwwgAAHAN7OzsTDp+ZmatVsv29/dz10mSxAaDgSVJMpm+tbVl/X7fhsNh0LrRYQQAANUzjgYs41WC4XBoSZJMTi1/qN/v567zww8/THUOm82mmdlUJ3IWnJIGAAAIdHh4OPXvRqNhjUYj9/zUiF8cx7Jzd16dOI7t9evXU9PGHclxx3FWc9thVBGA3mhAFVWkLkeQEYDuCCZvnNMo7JoHFfWnIqlqK3pTRksirkouQ0Riicgtb2RgYJyg2kSh5f5liCg1Ua4irnzRVyeB8xqJqLirjAA8PnW3/1EtfJ305whrH/I37KsjtoWK51PxZ+r9ZmY3V0RUaWA0oGofvri20Papvg9VriI2L0WOOFK1PwqNQpX7TTOriWm15VP3OoWOSHmukVPHmSh1Lzv0ejtfqqNKqHQdd1UE5WXKRqPgY++s8zUzu3fv3lT5N998Y99++23hy1tbW7ODg4PC6uzs7Nje3p5zVNJnbjuMAAAAUslZ0q9evbLV1dVJ8ceji71ez548eXLu7La3t219fV1OD+0s+up0Oh17/Pjx1DWPs6LDCAAAEGh1dXWqw/ixjY0N29jYmHl+6hRxkiRyWkidXq9n9+/fz9VZNOOmFwAAUEXjx+qU8SpBs9m0OI6d1yW2Wq0L1RlftzjuLCZJwl3SAAAA82h7e3vqjuherzc1IjgcDifPXZy1zmAwsMFgYOvr6zYcDm04HNr+/r6tra0FrRsdRgAAUDlZmpb2Kku73bYkSazX61mv17Pnz59PPbS73++feYi3r06SJPbll19ap9Ox+/fvT16dToebXgAAAObVh7F+H18Dubm56bwGUdVxPVYnLzqMAACgeuYwGvA645Q0AAAAvBhhBAAA1ZOVNMKYLeYIIx1GAABQOWXdoFLmTS/XGaekAQAA4DW3I4wqM1rlwJrprFSVGS3L1RC3b+g78C8SmRktNpnKNn0/0Z2hGi2tBL1/3qjNrfJQVV6vKl9WMzKzZZGdnGYiM1pELY+WisuSDs2MVr+lU0++tcyMvoQmpT6f+hzq/aH5z3nqLIv8YtWmfLm8qk54e3aX+7KF82S5F0VmRqsKar8WWu6ZVltx75+zkfs3lp6o7GnPseT02F3n5MhZrjKmI3F8izL9+1bHUNdvbMnXcC5LydGAi4YRRgAAAHjN7QgjAACAxGN1CsUIIwAAALwYYQQAAJWTjUaWjYofDSxjnvOAEUYAAAB4McIIAACqJ03LuaN5Qe+SpsMIAACqh5teCsUpaQAAAHgxwggAAConS0f+h6BfYL6LiBFGAAAAeM3tCOOKivXyRGjJpKLLiAYMpKIBJU80YKSmiXgyGYmlvkAR0XUZfOFTKrYsNBptOXWXn4hyM7NlsfBUlI/EslWUYJFCIwPrnmhARUYGBlLrmqeOiu0Lfb9v2kpg1J9qN6rcLEd7FuU6SlAuWv7+9G9Pz6swan+kIlJlud7e0bJ7n1oTx4CsJspFZKBPdnriniAiA01EA8rjm4c6vLoiA2UU7yXK0tSyEm5QKWOe84ARRgAAAHjN7QgjAACAkqVZrlHcWea7iBhhBAAAgBcjjAAAoHKyUVrOCGMJ85wHdBgBAEDlcNNLsTglDQAAAC9GGAEAQOVwSrpYjDACAADAixFGAABQOYwwFosRRgAAAHgxwggAAConG40sHRUX2fvhfBfRfHQYo9qZfNAlmcXqmY3KehaZmpEoz07cWZ4y49MsOGda5ZjK8rrIfzYLz1AV2dOZyla9hMxQtQiVf/u+jvtp/KqKyj5V5b5831RkCI9EQMCyCg4o8BcanBmtcq89KQdFZUbnodY3tDw0Y9pMZ0Y3xLyWxW9pSWZP6x2bzp8Wn0O0ZxkVf5WRwL6cejUtdIXVfk3sH83MbGnFXS46ElEq9rV1d86z77Et6jiTHb0Tyw7MkvZkTKv9rat5epos5tR8dBgBAAACZFlJz2H0dKqrjA4jAACoHG56KRaDxgAAAPBihBEAAFQOI4zFYoQRAAAAXowwAgCAysnSrJybXq7waRBXiRFGAAAAeDHCCAAAKicdpZaWcL1hGfOcB4wwAgAAwIsRRgAAUDncJV2syo0w+uK7ShcY/1coFVWF0iyLeMrLoCLnLsPNFU9kWoWXfWPp6pbti6EsW82uMhvw6kTL7FOBD83tCKNvB6o6jVEmOnTqLipZ7ukYimlZcJa0ODjlyDeVOz41L5HRmvlyXa8hdaBTn0LlpKqO4eg0k9PSTNQRbTOVnU/32p6kqew0qs9xItpzaGb0KM1kx+0ysqTVsovKkvb90ak6jaodqDxn/X69bJkZLealsnxV+/B1DOVvKTCXWskuI49ebAu1r81M7zujpWX3QtR+XpRHxzn2qWJe6bE7S7ousqdtJDKmPSKVJe0oVpnll4kRxmLNbYcRAABAIUu6WPM1ZAQAAIBLxwgjAACoHE5JF4sRRgAAAHgxwggAACqHEcZiMcIIAAAAL0YYAQBA5aRpamkJd0mXMc95wAgjAAAAvBhhBAAAlcM1jMWiwwgAACrnfYex+MheOozXWBbVzsTSLYmYLFVuZmYnIp5JPbU9MOYvzxPlI5nf5Y6ripbdkVQyqsrMTEViiXmpaEBZ7hEaJ6iip2rmjpzzpU+paapcx/yJeL5Ix+Ati+06ElXSLDRSz/e9ighAsS3U5xjVdDRgqKIiA/NkxcsIQNXWAiMizYqLAFTtRpV7lx24TqG/l/OmXRnxuWUUqizX33loNGCWupdRE/NJVZyfmWUnx+4Jok6UuiMA1XFPHg/NrCb2Ia4mdYXx5yjJXHQYAQAAQmRpSdGA3PQCAAAAnMUIIwAAqJwsLemmF0YYAQAAgLMYYQQAANVT0mN1jLukZ9Ptdi2OYzMzS5LE2u32THXMzF6+fGlmZnt7e6GLBQAAqLy8/axZ6zx69MiePXsWvF5BHcZxx29zc9PMzPr9vm1tbXk7gJ1Ox3Z3dyf/3trayr2yAAAAs0hHqaUljAaWMc+xPP2skDq9Xs/6/X6udQu6hnFnZ2eyQmZmrVbL9vf35fuTJLHBYGBJkkzKtra2rN/v23A4DF9bAACAGYwfq1PGqyyh/ayQOkmS2MHBQe51m7nDOBwOLUmSyZDnh3y91R9++GGqc9hsNs3MpjqRAAAAiyxPPyukzvfff29fffVV7vWb+ZS0GhGM41h2/uI4ttevX0+VjT/AuOP4oaOjIzs6Opr8+/DwcNbVAwAAmCg7S/rjPkqj0bBGo5F7vnn6WbPW6ff71mq1cq+bWQGP1VlbWwsa4tzZ2bG9vT1nb3hnZ8fu3r07ed27d++iqwcAAFC4e/fuTfVZdnZ2SllOaD/LVSdJEudAXYgLP1Yn5EN0Oh17/Pjx1Ln2D21vb9vXX389+ffh4eH7TmNUO5NjvCxCTCOR5WlmZiojMzBTM/PkfBZF5jwvrbjf78mSjkQdlaEamv+cJ2O6KL4lqyxTVa5ycfNkC49ENrTKEE5lVnCev47d80pFZnRNZEanIv955PnSZSa2iOstktpOajPVCsqYNiswM1q93xPMK+Po1T4y8HeRJxO4qD2Cb1+kcudVnUjNS2RGy/2mmWW1d+4Jav+sMqbFPjiqeY5j4vq57Oitu/zkyFmucq99VFNw5bXnyX0vWjbKLBsVk2P/8XzNzF69emWrq6uT8o9HF3u9nj158uTc+W1vb9v6+rqcnue6ww/r7O/vy35XiJk7jKpnOmuvtdfr2f37970rfdHhXAAAgMuwuro61WH82MbGhm1sbMw8vzz9rPPqDAYDe/jw4czr4BPUYYzj2IbD4ZkVPO+8+Pi6xXFncXynzkWHRwEAAFzStKTH6pR0l3SeftZ5dfr9vg0Gg0k/bPw87G63a81mM6hDG3TmYHt7e+qum16vNzViOBwOJ88DGhsMBjYYDGx9fd2Gw6ENh0Pb39+3tbW1kEUDAABUWp5+lq9Oq9Wydrs9eW1tbZmZWbvdDuosmgV2GNvttiVJYr1ez3q9nj1//nzqwZD9fn/q30mS2JdffmmdTsfu378/eXU6HedNLwAAAEXI0qy0V1lC+1mz1Bnr9XqTG3M6nU7wA7yjLFNXqV+9w8NDu3v3rv3Ln/985jqBVFx+W8v0hbzRqfvi39o79+N76r/8f87y03/5f53lo5/+LJed/vzaWa4eAFr/5I57RuKi6tqNW3LZtTufivLYXeGm+5qMbOWmKP9ELjtt3HbXWXbP60iM9B+duiccey5oPhY/alXnRL3/1F1+NNJt7Z1Y3xOx7CNx2uRElXt2WGoZ6oYUdYOOvOnFs8eQN71cgirc9NIQ5TeW9N/2atmNuvuGipWlsHVd8dzctSTq3BB11Lwa4vPVjn+Ty1b77ej4V/f7j9zl6S+Jszx7p5ed/upetrwhUtxgkv4sln16LJetbsapf/pfOcuXfvffupf96e/c5bf/jVz2SeS+is21Tzg8PLTf/83n9ubNG+91fmUY9x3+j/+lZbeXL3xv7xm/nJza//S/96/ks12lq7u9FQAAAHOh+K43AADAFctGqWW18h7cvWgYYQQAAIAXI4wAAKByslFmmQgmuOh8FxEjjAAAAPCaixHGLIos++gOxZq4GzNKT+V85DQVkaSiBNX780QtiVgqFdsnIwA9MVayjliGjPq7wghAFQPmuXHVInO3EXUXbCrmtSy+pjTT34eOz3O3qTRTH0TcPSpi/nzT1B3M8q5qMR8dQqnvuL4MarsqoXdVL8n4xgIjAMXCvXdoi32Impf6nlS571udqxEHFRm47N53Zsci/s9TR0rVst2/Jt9d0uo4o+7QVpGB8niojntmsjG4mvM1SAa0dJRZWsIIY8oIIwAAAHDWXIwwAgAAhOAu6WLRYQQAAJWTZpkMILjofBcRp6QBAADgxQgjAAConlFmmecGwYvMdxExwggAAAAvRhgBAEDlpKPU0qj4G1TSBb3phRFGAAAAeDHCCAAAKicr6RpGogEBAAAAB0YYAQBA5TDCWKy56DBm2fvXh2qhudBmZqKOzNQcubM5LQ3MmPaIAjOjZbnIJDUznUstMmg/zu3+oIJ4f3ED1UXmj6r83UjsQNSy1dehsnrNzEaB2dDql1gTO6YT705Q5VWLZahsbfFwWt+DcJe9ycNnqf2u56sNVhMbNjRL2pvnXFBmdGgutG+aWt3Qdu773DrjPaw8l8DMe7mfkvtHESJvZpnaD6v3j8SxIc+yA7OkTZXLLGlfTv3s+4prECXNTS8F45Q0AAAAvOZihBEAACBElmWWlRANmBENCAAAAJzFCCMAAKicdJRZasWPBqYLetMLI4wAAADwYoQRAABUTjbKLBNPjbjofBcRI4wAAADwYoQRAABUzvsRRh7cXRQ6jAAAoHK46aVYnJIGAACA11yMMKZZdiZ6qC7ikaLRsZxPNFJRSO6LYlXUkox58pBRT6p8aSWoPFLvN09sYF2Uy8jAq/v7QsWZ+aIEi4pGq4sJMv3PdMSbiu3Tf7uFX7BdF9tJ/VGsIgBH4oG3aS38r2u17NAdUJ7IQF+0nXMZKs7P09jUMoqKANTtSa+vardF/S58dUJFRT4IOTAyUEakivg/M7NIxcCq/fmp+7ikol5zfRtiGeo4puJ1ZVSuedqOIwgw9HdXhixNddTtBee7iBhhBAAAgNdcjDACAACE4BrGYjHCCAAAAC9GGAEAQOVkaUmP1RHXd1cdI4wAAADwYoQRAABUzyi1zPc4i7wW9C5pOowAAKBy0tHZR/IVMl9OSQMAAABnMcIIAAAqJxtllpUwwshNLwAAAIADI4wAAKByXLHCRc13Ec1FhzEzR7amyH82lRdtZqbyp1V2psoLVeU5qJxnlTEa+n4z0/mmIlv1KjOjQ3lzblUdUa4zo907B/X+9xND1yosY7ou1snM7EScLqmpbGgxqzxZsGpH6mmdhQnOjJaZyiKb2ROcrHKmy86YNnPn+JqZ1UVTkxnTcv5aaC51kdR+KnjRYv8Y1eWP2GxpxV0eemxQ81H7bNPZ0NnIvezs+J17RuL98thqOkM7cmyLWgnPP8TVmosOIwAAQIhRltmohNHAMuY5D+ZnKAkAAABXghFGAABQOaPs/auM+S4iRhgBAADgxQgjAACoHK5hLBYdRgAAUDmcki4Wp6QBAADgxQgjAAConLSkU9KL+uBuRhgBAADgxQgjAAConJGVdA1j8bOcC3PRYUyzs/FlKs5Pxvz5pqmopRN3ea5oQBX1FFgeqSgpTzSgig2UEYCh5SLOzEvET0Vi0DtfbFlYZFomMvJkBGDNsydKRZ3AyEAVAXiS6vguFUeXivVVp2xUZKDPKE+lkqlIPxVfp7a3L3pQtSkV9RcaGeiLoVTLVpGB8nOLCep3lMdlRAbq/ZdYuHq/2tea3h9lo7D9V6T2nXn2qWKfoCIDdSSu5xgq9tvOX70nYhDzaS46jAAAACFGWWajEjKtF/WxOlzDCAAAAC9GGAEAQOWMsnKuN1zU5zDSYQQAAJVDh7FYnJIGAACAFyOMAACgcrjppVh0GAEAAK6JbrdrcRybmVmSJNZutwup0+l07P79+2Zmtra2ZhsbG0HrRYcRAABUTlrSNYxlPm622+2amdnm5qaZmfX7fdva2rK9vb3cdZIksS+//NL++Mc/WhzHNhgM7MGDB5YFjpRyDSMAAMA1sLOzM+n4mZm1Wi3b39+/UJ1Op2OPHz+ejECur6/bs2fPgteNDiMAAKicUZaV9irDcDi0JEkmHbsP9fv93HX29/dtY2PDhsPhpKzVagWvHx1GAACAQIeHh1Ovo6OjC81vOBw6y+M4tiRJctUZTx8MBpYkiTWbTdva2pIdUJ/5vYZR5mB6rlgYHTuLVaZmliczWhHZoCrnOVoW5fL9OvdU5lUHZ0lfRhCsm8qg9eXc1kQOs/rjMDgzWuVF56kjNpG+Vkb/rac+t5pXzfc5nPPRf10vX0pYcBhfBrSLzGb25TmHZkOrfGuROqzWybdsVUdmZYv5+zapqnOVrSAT+y+Z26zeL/a1ZiI72cyimvuYIX8xIq86qsvQeU0dr07dxz07dXduIvV+M8uWRS51dDY3WmZVX6Kyn8N47969qfJvvvnGvv3228KXt7a2ZgcHB7nqjDuMcRzb+vq6mZnt7u7aF198Ya9fvw6a5/x2GAEAAIT3HcYyHqvz/r+vXr2y1dXVSXmj0Zh6X6/XsydPnpw7v+3t7UlnziW0s+iq8/Dhw8n/j0cf+/1+0KlpOowAAACBVldXpzqMH9vY2Ah6dE2z2XSWj08l56mjpsdxLE9nK1zDCAAAKmeUlfcqQ7PZlB05NRJ4Xp1xp/Hj6UmSTI06zoIOIwAAwDWwvb09dUNKr9ebemTOcDicPHdx1jq7u7tTp8Z7vZ61Wi3vaXAXTkkDAIDKmcdowHa7bd1u13q9npmZPX/+fOqh3f1+3/b29qaSXM6rs7GxYQcHB5OO5k8//ZTrOYxRFvqo70t0eHhod+/etT/985/PXCdw88h9d090/FbOLzp95yyvHf3qLE+T/+wsH73+V/f7f0nksrOR+16t2s1P3OV3P3OX33C/PxLzMTOLVm6412lJlK/cFO9viHJ9h7ZchpjXqRj0PhW3+J6cvTnv3DojVS5+CWrnMPIsO7SOer+6I/nEc04kdF6+zxEyn+uKu6RnfL+6gd9zm7SqsyTqqO9jWT0F4cSzPz/+zV0u7v6tnYj3n4hHoXju8s1OT0QdcZf0ifvO49Fr9zEmTdzHGDOz0a8/O8tr4q7u+mefO8uXf//v3POP/1YuO70Zuyc42tTh4aH91d/+3t68eeO9zq8M477DduMLu6Ge+nEB77LUdo5+vJLPdpUYYQQAAJWTmVng38Mzz3cRcQ0jAAAAvBhhBAAAlTOP1zBeZ3QYAQBA5ZSd9LJo5rbDGI3cFyRHIv7PV8dG7guY1YXNWRp+VYSMehKxfZG6kURFA3pirKwmNrNap9CLhAu8qFjfWOD+hfpiy9SsVJxgJJYhkva8NyLICEARGRhl7verHZPvBozQm1tSsU76r+jw4LeibpQJvYHFR0bkBd5E4qujVlcvO+z9vvVSNYJ/F97fWNjnCN56vn2LivoTVGSg2j9Gmd7Py/1tGrZOKgJWxrmajjhUUbbqeBV6446ZPr5mru+wyGhdXAtz22EEAABQOCVdLG56AQAAgBcjjAAAoHK4hrFYjDACAADAixFGAABQOVzDWCxGGAEAAOAVPMLY7XYtjmMzM0uSZCoAu8g6AAAAeaUlXcOYLuYAY9gIY7fbNTOzzc1N29zctPX1ddva2iq8DgAAwEWMsqy01yKKsmz2T/7pp5/ajz/+OBktNHv/4FbfLPLUGTs8PLS7d+/an/75z7a6ujo17dZv/9lZJzr+Vc4vOnU/dDQ6+sVZPnrtXsbozU/O8uy3Q7lsuU43PnGW1z/9K/H+W87y2k33fMzMbKnhLM6WRfnSDXd53f2Q2WzZ/f73ddwPIM/EOqWR+4G1x+Kp07671U7En4HqAdZqJzAS8/G14NBlqL9Y1ft9P5/gB3er9xe4U+TB3bMuO+z9vvVSNepiIWoZvs+t6iyLOkti2UvqQd+nR3LZ0clv7vLjt2Je70S5+wHWvhAIUw/1Vg/PFssYvf5Xd/lPf5aLTn8NO87UP/trZ/ny7+675792T84ru3HHXe54cPfh4aH91b0v7M2bN2eO32Ub9x3+V7tnKwUGS4wdZ6n9b/bqSj7bVZr5mxwOh5YkyVTHb6zf7xdWBwAA4KJG9pdH6xT9uuoPdkVmvoZxOBw6y+M4tiRJCqlzdHRkR0f/5S/KN2/emJnZzz//fOa9p2/PlpmZRcfuvzrNPCOMYlRy9Kt7XqNf3X/BZm/df8H6RCN3n72+Iv56PhUReb4WXBcRh8uifMkdoZjV3c0lWxIRU+YZlSxohNF3LUlhI4xqlE8v2lR65DyNMBY1Kvh+XsXMxxcFGT6vaowwqoQ8OcKoPl+BI4xLlzLCKEYSVbmYV74RRtGgs8ARRnWM+U0fS1LPNJf6Dff3sfyL+7iXLruPrWZmmfhKMkeU4fi4HXASs3DHFh7je5Xzve4u/FidtbU1Ozg4KKTOzs6Offfdd2fK//1/929zrx8AALgaP/30k929e/dSl7mysmKff/65/Yc//1Npy/j8889tZcV9yVVVXbjDGNpZ9NXZ3t62r7/+evLvJEns7/7u7+xPf/rTpTc4nHV4eGj37t2zV69eLdR1G9cR2+L6YFtcH2yL6+PNmzf2+9//3tbW1i592Tdu3LAff/zRjo89o8QXtLKyYjdu6Ov3q2jmDmOz2XSWJ0kip4XWaTQa1micPVV59+5dfvzXyOrqKtvjmmBbXB9si+uDbXF91NQ1EyW7cePGwnXoyjbzlmw2mxbHsfO6xFarVVgdAAAAXC9BXf/t7e2pu5t7vZ5tbm5O/j0cDifPXZy1DgAAAK63oA5ju922JEms1+tZr9ez58+f297e3mR6v9+f+vcsdXwajYZ98803ztPUuHxsj+uDbXF9sC2uD7bF9cG2qJ6gB3cDAABg8VzN1agAAACYG3QYAQAA4EWHEQAAAF4XfnD3RXW73UnWdJIk1m63S6mD8+XdFmZmL1++NDOb+YYmnO+i7fzRo0f27NmzEtZs8eTdFp1Ox+7fv29m7xOuNjY2ylrFhZFnW+zv71uSJBbHsb18+dK2t7cn80A+SZLY999/b0+fPp15P8Oxe85lV2h3dzfb3d2d/PvZs2fZ5uZm4XVwvjzfa7vdnvr35uZm1mq1Slm/RXPRdv706dPsin/elZFnW7x+/TpbX1/PXr9+nWVZlr148YLtUYC8x4zxdsiy99tmY2OjrFVcCC9evMj29vay3d3dbH19faY6HLvn35XuweI4nvohZ1l27k41Tx2cL/R7ff36ddZqtabqjA+KL1++LGktF8dF2vnr16+zvb09fhcFybMtNjc3pw6OWfb+AImLybMtXH/E8odtMZ4+fTpzh5Fj9/y7smsYh8Ph5BTBxz580PdF6+B8eb/XH374YSrFZxz3mCRJ0au4UC7azr///nv76quvSlizxZN3W+zv79vGxoYNh8PJ+0i3upi82yKOY3v06NFkvzQcDmVsLcrBsbsarrTD6BLHsexw5KmD8+X5XuM4ttevX9v6+vqkbPzDZ2d8MRdp5/1+n45JgS6ynxoMBpYkiTWbTdva2uLAeEF5fxd/+MMfbDgc2qeffmqdTscZMIFyceyuhmt3l/Ta2podHByUXgfnC/1ed3Z2bG9vj4vJSzLL9hh3UFAu37YYHxzjOLb19XVrNpu2u7tr//AP/3CZq7gwzvtdxHFsnU7HNjY2rNvt2tOnT+mkXBMcu+fLtesw5mk8NLhyhHyvnU7HHj9+TE54ic7bHuPToCjfLL+Nhw8fTv5/PJLCKGPxztsWnU7Hms2mPX361F6+fGkHBwf24MGDS1o7+HDsni9X1mFUoyC+EZI8dXC+i36vvV7P7t+/zyMSCpJnewwGg6kOCopR5H4qjmN5ag7ny7MtxtfOjS/TaDab9uLFC4vj2Hq9Xmnrimkcu6vhSjuMageqrsHKUwfnu8j3Oh4xGY8sJknCQfGC8myPg4MD6/f71u12rdvtWqfTMbP3zz3jwJhf3v1Us9k8UydJEjr1F5BnWwyHQ+clMltbW0WvHjw4dlfDlZ6S3t7enjpF0+v1pk5pDofDyYOhZ62DfPJsi8FgYIPBwNbX1204HNpwOLT9/X1bW1u7tPWuqtDt0Wq1rN1uT17jA2K73eY09QXl+W3s7u7akydPpuq0Wq2pm8QQLs/vYnzz0YdevHjB76IAvut4OXZXT5RlWXaVK9DtdidD0s+fP7fd3d3JtP39fdvd3Z2kiMxSB/mFbIskSeyLL75wXjx+xU2qMvL8Nsze74ifPHlivV7P2u22PXr0iL/iLyjPthini5iZ/fTTT+ynChK6LZIksZ2dHfvss88m15Jubm5yc94FDIfDyX5mMBhYu922v//7v590wjl2V9OVdxgBAABwvV27u6QBAABwvdBhBAAAgBcdRgAAAHjRYQQAAIAXHUYAAAB40WEEAACAFx1GAAAAeNFhBAAAgBcdRgAAAHjRYQQAAIAXHUYAAAB40WEEAACA1/8PcP0+JRqbDMEAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Heatmap(pred_u_v_4_sh - data_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "0636b07e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", + " [0.0125, 0.0125, 0.0125, ..., 0.0125, 0.0125, 0.0125],\n", + " [0.025 , 0.025 , 0.025 , ..., 0.025 , 0.025 , 0.025 ],\n", + " ...,\n", + " [0.975 , 0.975 , 0.975 , ..., 0.975 , 0.975 , 0.975 ],\n", + " [0.9875, 0.9875, 0.9875, ..., 0.9875, 0.9875, 0.9875],\n", + " [1. , 1. , 1. , ..., 1. , 1. , 1. ]])" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grids[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "73bb035e", + "metadata": {}, + "outputs": [ + { + "ename": "ValueError", + "evalue": "cannot reshape array of size 3240 into shape (81,81)", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m~/epde/EPDE_main/examples\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mpred_u_v_4\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgrids\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mValueError\u001b[0m: cannot reshape array of size 3240 into shape (81,81)" + ] + } + ], + "source": [ + "pred_u_v_4.reshape(grids[0].shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a4fa73b2", + "metadata": {}, + "outputs": [], + "source": [ + "from epde.interface.solver_integration import BoundaryConditions, SolverAdapter, SystemSolverInterface" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32be88d3", + "metadata": {}, + "outputs": [], + "source": [ + "adapter = SolverAdapter(var_number = len(systems['4'].vars_to_describe))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b11adb26", + "metadata": {}, + "outputs": [], + "source": [ + "ssi = SystemSolverInterface(systems['4'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b02429f", + "metadata": {}, + "outputs": [], + "source": [ + "ssi.form()" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "id": "f9dc8bcc", + "metadata": {}, + "outputs": [], + "source": [ + "import epde.globals as global_var" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "id": "972b5708", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[array([[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", + " [0.0125, 0.0125, 0.0125, ..., 0.0125, 0.0125, 0.0125],\n", + " [0.025 , 0.025 , 0.025 , ..., 0.025 , 0.025 , 0.025 ],\n", + " ...,\n", + " [0.4625, 0.4625, 0.4625, ..., 0.4625, 0.4625, 0.4625],\n", + " [0.475 , 0.475 , 0.475 , ..., 0.475 , 0.475 , 0.475 ],\n", + " [0.4875, 0.4875, 0.4875, ..., 0.4875, 0.4875, 0.4875]]),\n", + " array([[0. , 0.0125, 0.025 , ..., 0.975 , 0.9875, 1. ],\n", + " [0. , 0.0125, 0.025 , ..., 0.975 , 0.9875, 1. ],\n", + " [0. , 0.0125, 0.025 , ..., 0.975 , 0.9875, 1. ],\n", + " ...,\n", + " [0. , 0.0125, 0.025 , ..., 0.975 , 0.9875, 1. ],\n", + " [0. , 0.0125, 0.025 , ..., 0.975 , 0.9875, 1. ],\n", + " [0. , 0.0125, 0.025 , ..., 0.975 , 0.9875, 1. ]])]" + ] + }, + "execution_count": 84, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "global_var.grid_cache.get_all()[1]" + ] + }, + { + "cell_type": "markdown", + "id": "172f8f0b", + "metadata": {}, + "source": [ + "Solver testing" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "97e46647", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import scipy\n", + "import time\n", + "\n", + "import pandas as pd\n", + "from scipy.integrate import quad\n", + "import sys\n", + "import os\n", + "\n", + "os.environ['KMP_DUPLICATE_LIB_OK'] = 'TRUE'\n", + "sys.path.append('../')\n", + "sys.path.pop()\n", + "sys.path.append(os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..')))\n", + "\n", + "from epde.solver.input_preprocessing import Equation\n", + "from epde.solver.solver import Solver\n", + "from epde.solver.solution import Solution" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "d4d21fe4", + "metadata": {}, + "outputs": [], + "source": [ + "def solver_burgers(grid_res, cache, optimizer, iterations):\n", + " exp_dict_list = []\n", + " start = time.time()\n", + " mu = 0.01 / np.pi\n", + " x = torch.from_numpy(np.linspace(-1, 1, grid_res + 1))\n", + " t = torch.from_numpy(np.linspace(0, 1, grid_res + 1))\n", + " h = (x[1] - x[0]).item()\n", + " grid = torch.cartesian_prod(x, t).float()\n", + "\n", + " ##initial cond\n", + " bnd1 = torch.cartesian_prod(x, torch.from_numpy(np.array([0], dtype=np.float64))).float()\n", + " bndval1 = -torch.sin(np.pi * bnd1[:, 0])\n", + "\n", + " ##boundary cond\n", + " bnd2 = torch.cartesian_prod(torch.from_numpy(np.array([-1.], dtype=np.float64)), t).float()\n", + " bndval2 = torch.zeros_like(bnd2[:, 0])\n", + "\n", + " ##boundary cond\n", + " bnd3 = torch.cartesian_prod(torch.from_numpy(np.array([1.], dtype=np.float64)), t).float()\n", + " bndval3 = torch.zeros_like(bnd3[:, 0])\n", + "\n", + " bconds = [[bnd1, bndval1, 'dirichlet'],\n", + " [bnd2, bndval2, 'dirichlet'],\n", + " [bnd3, bndval3, 'dirichlet']]\n", + "\n", + " burgers_eq = {\n", + " 'du/dt**1':\n", + " {\n", + " 'coeff': 1.,\n", + " 'du/dt': [1],\n", + " 'pow': 1,\n", + " 'var': 0\n", + " },\n", + " '+u*du/dx':\n", + " {\n", + " 'coeff': 1,\n", + " 'u*du/dx': [[None], [0]],\n", + " 'pow': [1, 1],\n", + " 'var': [0, 0]\n", + " },\n", + " '-mu*d2u/dx2':\n", + " {\n", + " 'coeff': -mu,\n", + " 'd2u/dx2': [0, 0],\n", + " 'pow': 1,\n", + " 'var': 0\n", + " }\n", + " }\n", + "\n", + " model = torch.nn.Sequential(\n", + " torch.nn.Linear(2, 20),\n", + " torch.nn.Tanh(),\n", + " torch.nn.Linear(20, 20),\n", + " torch.nn.Tanh(),\n", + " torch.nn.Linear(20, 20),\n", + " torch.nn.Tanh(),\n", + " torch.nn.Linear(20, 20),\n", + " torch.nn.Tanh(),\n", + " torch.nn.Linear(20, 1)\n", + " )\n", + "\n", + " equation = Equation(grid, burgers_eq, bconds).set_mode('autograd')\n", + " if type(optimizer) is list:\n", + " for mode in optimizer:\n", + " print(f'Grid shape is {grid.shape}')\n", + " model = Solver(grid, equation, model, 'autograd').solve(lambda_bound=1, verbose=0, learning_rate=1e-3,\n", + " eps=1e-6, tmin=10, tmax=iterations,\n", + " use_cache=cache, cache_dir='../cache/',\n", + " patience=2,\n", + " save_always=cache, no_improvement_patience=100,\n", + " optimizer_mode=mode)\n", + " else:\n", + " print(f'Grid shape is {grid.shape}')\n", + " model = Solver(grid, equation, model, 'autograd').solve(lambda_bound=1, verbose=0, learning_rate=1e-3,\n", + " eps=1e-6, tmin=10, tmax=iterations, use_cache=cache,\n", + " cache_dir='../cache/', patience=2,\n", + " save_always=cache, no_improvement_patience=100,\n", + " optimizer_mode='Adam')\n", + " end = time.time()\n", + " time_part = end - start\n", + "\n", + " x1 = torch.from_numpy(np.linspace(-1, 1, grid_res + 1))\n", + " t1 = torch.from_numpy(np.linspace(0, 1, grid_res + 1))\n", + " grid1 = torch.cartesian_prod(x1, t1).float()\n", + "\n", + " u_exact = exact(grid1)\n", + " error_rmse = torch.sqrt(torch.mean((u_exact - model(grid1)) ** 2))\n", + " end_loss, _ = Solution(grid = grid, equal_cls = equation, model = model,\n", + " mode = 'autograd', weak_form=None, lambda_operator=1, lambda_bound=1).evaluate()\n", + " exp_dict_list.append({'grid_res': grid_res, 'time': time_part, 'RMSE': error_rmse.detach().numpy(),\n", + " 'loss': end_loss.detach().numpy(), 'type': 'solver_burgers', 'cache': cache})\n", + "\n", + " print('Time taken {}= {}'.format(grid_res, end - start))\n", + " print('RMSE {}= {}'.format(grid_res, error_rmse))\n", + " print('loss {}= {}'.format(grid_res, end_loss))\n", + "\n", + " return exp_dict_list" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "4db32dc5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Grid shape is torch.Size([441, 2])\n", + "before Model_prepare: torch.float32\n", + "after Model_prepare: torch.float32\n", + "Time taken 20= 11.31909990310669\n", + "RMSE 20= 0.7666455618134793\n", + "loss 20= tensor([0.0116], grad_fn=)\n" + ] + }, + { + "data": { + "text/plain": [ + "[{'grid_res': 20,\n", + " 'time': 11.31909990310669,\n", + " 'RMSE': array(0.76664556),\n", + " 'loss': array([0.01157083], dtype=float32),\n", + " 'type': 'solver_burgers',\n", + " 'cache': False}]" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def exact(grid):\n", + " mu = 0.01 / np.pi\n", + "\n", + " def f(y):\n", + " return np.exp(-np.cos(np.pi * y) / (2 * np.pi * mu))\n", + "\n", + " def integrand1(m, x, t):\n", + " return np.sin(np.pi * (x - m)) * f(x - m) * np.exp(-m ** 2 / (4 * mu * t))\n", + "\n", + " def integrand2(m, x, t):\n", + " return f(x - m) * np.exp(-m ** 2 / (4 * mu * t))\n", + "\n", + " def u(x, t):\n", + " if t == 0:\n", + " return -np.sin(np.pi * x)\n", + " else:\n", + " return -quad(integrand1, -np.inf, np.inf, args=(x, t))[0] / quad(integrand2, -np.inf, np.inf, args=(x, t))[\n", + " 0]\n", + "\n", + " solution = []\n", + " for point in grid:\n", + " solution.append(u(point[0].item(), point[1].item()))\n", + "\n", + " return torch.tensor(solution)\n", + "\n", + "solver_burgers(20, False, 'Adam', 2000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7886f8e6", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/projects/pic/data/ac/AC.mat b/projects/pic/data/ac/AC.mat new file mode 100644 index 0000000..cd18945 Binary files /dev/null and b/projects/pic/data/ac/AC.mat differ diff --git a/projects/pic/data/ac/ac_ann_pretrained.pickle b/projects/pic/data/ac/ac_ann_pretrained.pickle new file mode 100644 index 0000000..5f35f05 Binary files /dev/null and b/projects/pic/data/ac/ac_ann_pretrained.pickle differ diff --git a/projects/pic/data/ac/ac_data.npy b/projects/pic/data/ac/ac_data.npy new file mode 100644 index 0000000..bbf0abb Binary files /dev/null and b/projects/pic/data/ac/ac_data.npy differ diff --git a/projects/pic/data/kdv/data.csv b/projects/pic/data/kdv/data.csv new file mode 100644 index 0000000..8ae7959 --- /dev/null +++ b/projects/pic/data/kdv/data.csv @@ -0,0 +1,81 @@ +0.,-0.0019739670361023504,-0.006482645226773692,-0.011103392763830326,-0.015700143444398944,-0.020235802943216728,-0.02470395778230241,-0.02908901432257807,-0.03337537892496579,-0.037547630513554545,-0.04159453933875758,-0.045507815908937335,-0.04927920397573234,-0.05290044729078114,-0.05636348686561739,-0.059664981522786986,-0.06280622683491628,-0.06578869461806558,-0.06861385668829521,-0.07128318486166546,-0.07379815095423667,-0.07616127837011323,-0.07837848856315749,-0.08045548051635663,-0.08239791319189388,-0.08421144555195238,-0.08590173655871532,-0.08747444517436587,-0.08893523036108718,-0.09028981269053701,-0.09154353377389204,-0.09270123247737576,-0.09376773786557728,-0.09474787900308573,-0.09564648495449028,-0.09646838478438005,-0.09721840755734416,-0.09790092667096612,-0.09851884711905769,-0.09907517369741872,-0.09957292859520192,-0.10001513400156002,-0.10040481210564577,-0.10074498509661187,-0.10103867516361106,-0.10128851304740741,-0.1014960077630678,-0.101662872440963,-0.10179083695609314,-0.10188163118345839,-0.1019369849980589,-0.10195862827489481,-0.10194829088896627,-0.10190745563427599,-0.10183693429131115,-0.10173769883724228,-0.1016107322959189,-0.10145701769119056,-0.1012775380469068,-0.10107327638691715,-0.10084521573507116,-0.10059419245983903,-0.10032066112805348,-0.10002518535297272,-0.09970833551722458,-0.09937068200343686,-0.09901279519423736,-0.09863524547225389,-0.09823860322011427,-0.09782334909376511,-0.09738974000378742,-0.09693810792105838,-0.09646878908513885,-0.09598211973558969,-0.09547843611197177,-0.09495807445384592,-0.09442137100077302,-0.09386860184491955,-0.09329991110711766,-0.09271550848552544,-0.09211560677215941 +0.,-0.0016827356743069305,-0.005699306484276352,-0.009862637312572349,-0.014046124125652351,-0.018211274499067086,-0.022347489387504985,-0.02643612766243583,-0.030458548195329407,-0.03439650771858643,-0.03823670027881357,-0.04196875865371007,-0.04558234267214657,-0.0490671121629937,-0.05241295908821429,-0.05561496929442621,-0.05867304502799478,-0.061587237982828696,-0.06435759985283668,-0.0669841823319274,-0.06946703711400962,-0.07180738806938917,-0.07401033070454667,-0.07608078364254015,-0.07802362197896917,-0.07984372080943328,-0.08154595522953201,-0.08313520033486485,-0.08461633122103139,-0.08599441462908822,-0.08727452825807816,-0.08846119390477515,-0.08955891822493882,-0.0905722078743288,-0.09150556950870478,-0.09236350978382633,-0.0931505353554531,-0.09387076710813631,-0.09452705480893615,-0.09512230903444867,-0.09565945472598184,-0.09614141682484363,-0.09657112027234205,-0.09695149000978504,-0.09728545097848058,-0.09757556704588378,-0.09782335901123775,-0.09803052884864064,-0.09819879386937155,-0.09832987138470968,-0.09842547870593413,-0.09848733314432402,-0.0985171520111585,-0.09851641692463536,-0.09848596320963444,-0.09842677374604705,-0.09833984187123594,-0.09822616092256384,-0.09808672423739354,-0.09792252515308772,-0.09773455700700917,-0.09752367055156447,-0.09729033986925088,-0.09703514043236126,-0.09675865422417143,-0.09646146322795723,-0.09614414942699451,-0.09580729480455906,-0.09545148134392677,-0.0950772030647288,-0.0946847299245921,-0.09427440147377768,-0.09384656138655564,-0.09340155333719617,-0.09293972099996942,-0.0924614080491455,-0.0919669581589946,-0.09145665471802553,-0.09093064758371086,-0.09038915127754803,-0.08983238340580608 +0.,-0.001397935068260118,-0.004934068651071725,-0.008650822211266718,-0.012430776899261295,-0.01623407285466089,-0.0200460373916657,-0.023845061975867752,-0.027609538072859083,-0.03131847422879924,-0.03495654223308178,-0.038511351174607515,-0.041970531169094105,-0.04532171233225922,-0.048552790783568354,-0.05165731910321899,-0.05463384109360421,-0.05748102400965097,-0.06019753510628628,-0.06278204163843709,-0.0652332108610304,-0.06755099945347037,-0.06973969664434265,-0.07180345948036179,-0.07374639807700152,-0.0755726225497355,-0.07728624301403741,-0.07889136958538093,-0.08039211237923978,-0.08179289992389929,-0.08309855186499249,-0.08431328016034283,-0.08544127642213398,-0.08648673226254962,-0.08745383929377341,-0.088346789127989,-0.08916977337738002,-0.08992666612530759,-0.09062026292401384,-0.0912533820372255,-0.0918288531359577,-0.09234950589122567,-0.09281816997404457,-0.0932376750554296,-0.09361085080639589,-0.09394019551263179,-0.09422724126299119,-0.09447367889068997,-0.0946812131903396,-0.09485154895655161,-0.09498639098393746,-0.09508744406710866,-0.09515641300067672,-0.0951947780345639,-0.09520339733632081,-0.095183264261791,-0.09513538204761275,-0.09506075393042429,-0.0949603831468639,-0.09483527293356983,-0.09468642652718033,-0.09451470857719622,-0.09432061213202708,-0.09410472413407142,-0.09386763778330219,-0.09360994627969234,-0.09333224282321483,-0.0930351206138426,-0.09271917285154863,-0.09238490651297732,-0.0920326042467503,-0.09166261293635179,-0.09127528344706533,-0.09087096664417459,-0.09045001339296316,-0.09001277455871466,-0.08955960100671273,-0.08909078320141925,-0.08860647658457328,-0.0881069003375254,-0.08759227671632984 +0.,-0.0011189839837563134,-0.004186276636480559,-0.00746727725886084,-0.010853422827001373,-0.01430350728207466,-0.017798900532834033,-0.02131510702623893,-0.024827631209248812,-0.028312807538264033,-0.03175333923284621,-0.03513486576468753,-0.038443041785249585,-0.04166352194599397,-0.04478225971152079,-0.047791313623718995,-0.050687903746799344,-0.05346934850349645,-0.05613296631654499,-0.05867607560867959,-0.061095994802634904,-0.06339144557232525,-0.06556593029573408,-0.06762286315204634,-0.06956560809367067,-0.07139752907301569,-0.07312199004249005,-0.07474235495450234,-0.0762619877614612,-0.07768469435551198,-0.07901504219822143,-0.08025694061032061,-0.08141427349544951,-0.0824909247572481,-0.08349077829935636,-0.08441771802541423,-0.08527562783906169,-0.08606814073803118,-0.08679799912617772,-0.0874679308581138,-0.0880806723074831,-0.08863895984792922,-0.08914552985309583,-0.08960311869662657,-0.0900144627521651,-0.0903819960269634,-0.09070726143884915,-0.09099193869980983,-0.09123772013808258,-0.09144629808190458,-0.09161936485951298,-0.09175861279914492,-0.09186573422903759,-0.09194220784929678,-0.09198891398755085,-0.09200685605222476,-0.09199704676788481,-0.09196049885909727,-0.09189822505042845,-0.09181123806644462,-0.09170055063171208,-0.09156704081451754,-0.09141122009293151,-0.0912336864915935,-0.09103504404390113,-0.09081589678325205,-0.09057684874304386,-0.09031850395667423,-0.09004146645754077,-0.08974625577917493,-0.0894331669169813,-0.08910255384052929,-0.08875477436104322,-0.08839018628974758,-0.08800914743786672,-0.08761201561662509,-0.08719914863724709,-0.08677084382608694,-0.08632726206866947,-0.08586862703659613,-0.08539516546458017 +0.,-0.0008453497095434117,-0.0034553244919101373,-0.006311375457070504,-0.009313420346417502,-0.012418919892155229,-0.015605406494484166,-0.01884557811776108,-0.022112132726342756,-0.025378804837595324,-0.02862638263707249,-0.031838589987069246,-0.03499916026363336,-0.0380918268428126,-0.04110065362947476,-0.04401624401091277,-0.04683452884894447,-0.04955151322675397,-0.052163202227525435,-0.05466560093444299,-0.057054714430690774,-0.059328061410497526,-0.06148837663965373,-0.06353835005900975,-0.06548061819859864,-0.06731781758845339,-0.06905258475860701,-0.07068755623909255,-0.072225368559943,-0.07366922050145783,-0.07502343321014208,-0.07629162054432508,-0.07747736600572229,-0.0785842530960492,-0.07961586531702133,-0.08057578617035413,-0.08146759915776312,-0.08229470190361042,-0.08305978476731192,-0.08376548709616827,-0.08441445393594436,-0.08500933033240504,-0.08555276133131517,-0.0860473919784396,-0.08649586731954317,-0.08690055839567933,-0.08726301851919446,-0.08758491630493902,-0.08786793166874568,-0.08811374452644717,-0.08832403479387621,-0.08850048238686553,-0.08864476722124781,-0.08875836627534232,-0.08884218137007988,-0.08889722554682251,-0.08892452061005422,-0.08892508836425896,-0.08889995061392074,-0.08885012916352351,-0.08877664581755129,-0.0886803915923032,-0.08856189587028956,-0.08842176737213857,-0.0882606205827227,-0.08807906998691446,-0.08787773006958632,-0.08765721531561078,-0.08741814020986033,-0.08716103644814979,-0.08688621103755084,-0.08659402478631704,-0.0862848422059891,-0.08595902780810774,-0.08561694610421366,-0.08525896160584757,-0.0848854388245502,-0.08449668173933755,-0.0840928565475631,-0.08367419123725664,-0.0832409168460627 +0.,-0.0005765463689142343,-0.0027406535697998653,-0.005182531289462095,-0.007810163672629213,-0.010579684102660256,-0.013464910414488883,-0.016435814638377795,-0.019462368804589722,-0.022515781651465366,-0.025574979787076894,-0.028621825329475506,-0.03163818442474857,-0.03460592321898347,-0.03750726899718959,-0.04033140863418387,-0.04307301815782038,-0.04572682465839316,-0.04828755522619628,-0.05074993695152378,-0.05310869692466971,-0.05536018272293,-0.05750638059054207,-0.0595492747747366,-0.061490793040938574,-0.06333286315457295,-0.06507741288106472,-0.06672636998583885,-0.06828166223432032,-0.06974589675730584,-0.07112315424592841,-0.07241676024423865,-0.07363000513108593,-0.07476617928531963,-0.0758285730857891,-0.0768204769113437,-0.07774518114083283,-0.07860585372814088,-0.07940513411602497,-0.08014557504457458,-0.08082973219844419,-0.08146016126228821,-0.08203941792076107,-0.08257005785851719,-0.083054636760211,-0.083495464019297,-0.08389410292804604,-0.0842522110327628,-0.08457145589479499,-0.08485350507549033,-0.08510002613619655,-0.08531268663826137,-0.08549315414303252,-0.08564290374345451,-0.08576285810006276,-0.08585403947149574,-0.08591747833783556,-0.08595420517916429,-0.08596525047556408,-0.08595164470711705,-0.0859144183539053,-0.08585447491617085,-0.08577236116464768,-0.08566869613176836,-0.08554410437378786,-0.08539921044696118,-0.08523463890754326,-0.08505101431178914,-0.08484896121595377,-0.08462902308823404,-0.08439151861913098,-0.08413681520851317,-0.08386528382274608,-0.08357729542819523,-0.08327322099122604,-0.08295343147820407,-0.08261829785549475,-0.08226813054917811,-0.08190310094229404,-0.08152344116282127,-0.0811293863726999 +0.,-0.00031213325129466935,-0.0020417506970298884,-0.004080199015764125,-0.006343081216768771,-0.008785203125502176,-0.011376793416543641,-0.014085178627653903,-0.016877685296593715,-0.019723070500124232,-0.02259845268706429,-0.025483885887963326,-0.028359422852030535,-0.03120511632847512,-0.03400140971357309,-0.03673611184262304,-0.03940267810980384,-0.04199459278665877,-0.04450534014473114,-0.04692840445556422,-0.04925726999070132,-0.05148714490558614,-0.053619285896008784,-0.05565498997093673,-0.057595494699856316,-0.059442037652253865,-0.0611958563976157,-0.06285818850542815,-0.06443027154517754,-0.0659141363902884,-0.06731362911968178,-0.06863179408450706,-0.06987163579224667,-0.07103615875038302,-0.07212836746639857,-0.07315126644777574,-0.07410786020199697,-0.07500109270079074,-0.07583355361112402,-0.0766077109641923,-0.07732603304784551,-0.07799098814993352,-0.07860504455830623,-0.07917067056081352,-0.07969033444530531,-0.08016628528035653,-0.0806000959383834,-0.08099341293020038,-0.08134789152466695,-0.0816651869906426,-0.0819469545969868,-0.08219484961255905,-0.08241052730621884,-0.08259546072929477,-0.08275059273907977,-0.08287695440000477,-0.08297558446739163,-0.08304752169656218,-0.08309380484283832,-0.08311547266154191,-0.08311356390799482,-0.08308899410833694,-0.0830423269126887,-0.0829741912835441,-0.0828852214707425,-0.0827760517241233,-0.08264731629352588,-0.08249964942878965,-0.08233368537975394,-0.08214997900115982,-0.08194886034383139,-0.08173070315300236,-0.08149588460506339,-0.08124478187640523,-0.0809777721434185,-0.08069523258249396,-0.08039754037002227,-0.08008501217625326,-0.07975782444746908,-0.07941621327372539,-0.07906041776108208 +0.,-0.0000517131638285203,-0.0013581463627927166,-0.0030038709814081237,-0.004911634021052078,-0.007034908473095571,-0.009340461164040822,-0.011793053369668818,-0.014357446365760546,-0.01700001958492582,-0.019696136710534946,-0.02242409707984237,-0.025162193609608495,-0.027888719216593756,-0.030582385885471935,-0.0332296627616844,-0.03582281863411951,-0.03835412993530336,-0.040815873097762065,-0.043200324554021716,-0.045499760736608406,-0.04770828190086978,-0.04982643407039221,-0.0518548453769814,-0.053794081667904586,-0.055644708790428994,-0.05740729259182185,-0.05908239891935039,-0.06067059362028184,-0.06217334662550562,-0.06359427522268596,-0.06493614966384284,-0.06620169580828819,-0.06739363951533392,-0.06851470664429198,-0.06956762305447425,-0.07055511460519272,-0.07147990695532154,-0.07234454114183626,-0.07315140238319973,-0.0739028735321763,-0.07460133744153022,-0.07524917696402585,-0.07584877495242745,-0.07640251425949936,-0.07691258495380457,-0.0773805690990216,-0.07780810220787336,-0.07819682732280615,-0.07854838748626633,-0.07886442574070021,-0.07914658512855419,-0.07939650869227459,-0.07961566729178304,-0.0798050233473632,-0.07996561632206806,-0.08009849285025922,-0.08020469956629828,-0.08028528310454688,-0.08034129009936664,-0.08037376718511917,-0.0803836414612771,-0.08037149295461746,-0.08033796017869436,-0.0802836867017779,-0.08020931609213824,-0.08011549191804547,-0.08000285774776969,-0.07987205714958102,-0.07972365598251109,-0.07955799533840183,-0.07937545506281385,-0.07917641829850317,-0.07896126818822576,-0.07873038787473757,-0.07848416050079464,-0.07822296920915293,-0.07794713671334937,-0.07765684440256618,-0.07735233215067165,-0.07703384282720896 +0.,0.0002050691970409376,-0.0006894129209278741,-0.001953075942298855,-0.0035153142104823874,-0.005328258483807942,-0.007355342436394132,-0.009558842010913973,-0.0119010331500405,-0.014345991497859624,-0.01686737933256032,-0.01944179438578137,-0.02204582299237961,-0.024656051487211898,-0.027249512628460183,-0.029811374121187527,-0.032332751999164244,-0.03480474962335828,-0.03721847035473762,-0.039565017554270186,-0.04183549458292395,-0.04402292513784267,-0.04612716336221684,-0.04814818677261877,-0.05008590786728974,-0.05194023914447099,-0.05371109310240378,-0.055398382239329365,-0.05700201905348899,-0.058522927764708355,-0.05996450266378637,-0.061329246968334965,-0.06261961508300569,-0.06383806141245006,-0.06498704036131965,-0.066069006334266,-0.06708641373594065,-0.06804177555884944,-0.06893758535477738,-0.06977614742284059,-0.0705597611393948,-0.07129072588079562,-0.07197134102339878,-0.07260390594355993,-0.07319072001763477,-0.07373391563945624,-0.07423508368303659,-0.07469584870455526,-0.07511784159009159,-0.07550269322572505,-0.075852034497535,-0.07616749629160088,-0.07645070949400211,-0.07670314262913829,-0.07692577705422501,-0.0771196602281692,-0.07728584627246456,-0.07742538930860489,-0.07753934345808398,-0.07762876284239559,-0.07769470158303349,-0.07773809790529104,-0.07775954771501661,-0.07775969870080192,-0.0777392033771133,-0.07769871425841716,-0.07763888385917986,-0.07756036469386776,-0.07746380927694724,-0.07734979409273143,-0.0772186709576059,-0.07707082557394179,-0.07690664680869096,-0.07672652352880521,-0.07653084460123637,-0.07631999889293627,-0.07609437527085672,-0.07585430229246304,-0.07559996617045239,-0.07533161038461925,-0.07504948138772148 +0.,0.00045852885399268943,-0.000035162806719566406,-0.0009273774048138871,-0.002153643461186823,-0.0036647368665139466,-0.005420887727813197,-0.007381966203194316,-0.009507842450767055,-0.011760361955088432,-0.014111538887927698,-0.016536322121102613,-0.01900964430839535,-0.02150643810358811,-0.024002108899625892,-0.026480561114665666,-0.028931791690903923,-0.03134576545844305,-0.033712447247385445,-0.03602180188783347,-0.03826379420988953,-0.04043040250724008,-0.04252080775554871,-0.04453435501396881,-0.04647032169903125,-0.04832798522726687,-0.0501066230152065,-0.051805512479380984,-0.05342393103632117,-0.05496227233765943,-0.05642371344189389,-0.057810497565964805,-0.059124814821769046,-0.060368855321203445,-0.06154480917616489,-0.06265486649855022,-0.06370121740025632,-0.06468616782784659,-0.06561216498766723,-0.0664814341492728,-0.06729619316751558,-0.06805865989724781,-0.06877105219332179,-0.06943558791058976,-0.070054484903904,-0.07062981921653486,-0.07116319015774077,-0.071656211372602,-0.0721105016646514,-0.07252767983742178,-0.07290936469444598,-0.07325717503925686,-0.07357272967538724,-0.07385749465260809,-0.07411246964568535,-0.07433870971106175,-0.07453727606982893,-0.07470922994307849,-0.07485563255190213,-0.07497754511739145,-0.0750760288606381,-0.07515203268997184,-0.07520616789717283,-0.07523909097300967,-0.07525146300904044,-0.0752439450968233,-0.07521719832791636,-0.07517188379387774,-0.07510866258626556,-0.07502812143868763,-0.07493062257776627,-0.07481655732092815,-0.07468632001891044,-0.07454030502245024,-0.07437890668228464,-0.07420251934915081,-0.07401153737378588,-0.0738062949594341,-0.07358698302311534,-0.07335384847361628,-0.07310714116762411 +0.,0.000708942137253008,0.0006049532318426397,0.00007362801896750493,-0.0008261714853856914,-0.002043851264252998,-0.0035365678685283066,-0.005261864771533797,-0.007177285446591652,-0.009242518554491808,-0.011427983355153788,-0.013707032236264355,-0.016052996693560143,-0.018439208222777805,-0.020839496362355752,-0.02323654029005981,-0.02561925132334269,-0.02797649006361303,-0.030297117112279486,-0.03256999307075068,-0.034783978540435266,-0.0369300373712842,-0.03900669600524877,-0.04101268509279754,-0.04294666512501375,-0.0448072965929806,-0.04659323998778128,-0.04830315580049901,-0.04993570452221699,-0.05149076428607336,-0.05297130065061284,-0.05437930383252807,-0.055716706778915076,-0.056985442436869836,-0.05818744375348838,-0.059324643675866706,-0.06039897515110081,-0.06141254267138303,-0.06236774822979248,-0.06326673995151895,-0.06411165612009609,-0.0649046350190575,-0.06564781493193683,-0.06634333414226772,-0.06699333093358377,-0.06759982632028982,-0.068164427676209,-0.06868873778436349,-0.06917436344306578,-0.0696229114506284,-0.07003598860536385,-0.07041520170558466,-0.07076215754960334,-0.07107831957788784,-0.07136470516930193,-0.07162237658397184,-0.07185240175946389,-0.0720558486333444,-0.07223378514317971,-0.07238727922653615,-0.07251739882098004,-0.07262510307957978,-0.07271101819087307,-0.07277580907824542,-0.07282014504453023,-0.07284469539256092,-0.07285012942517088,-0.07283711644519354,-0.0728063257554623,-0.07275835396578904,-0.07269357340048097,-0.0726123807522081,-0.07251517561704181,-0.07240235759105339,-0.07227432627031415,-0.07213148125089543,-0.07197422212886855,-0.07180288855614296,-0.07161767603460827,-0.07141883472647517,-0.07120661771449749 +0.,0.0009565482082669966,0.0012312479108689048,0.00105031225997561,0.0004675254783971755,-0.00046513183384721754,-0.00170187267659509,-0.0031979924211091443,-0.0049087864386520835,-0.0067918595492716055,-0.008816089160890855,-0.010953283152004677,-0.01317522397464518,-0.015453694080844484,-0.017760998286722444,-0.020078628467849496,-0.022394443578070113,-0.0246962340405356,-0.02697179027839728,-0.029208902714806445,-0.03139536177291442,-0.033521147616029916,-0.03558415070988351,-0.03758250523085243,-0.0395142727844079,-0.0413775149760211,-0.04317029341116321,-0.044890669695305425,-0.04653670543391894,-0.04810777819310279,-0.049606647728588485,-0.05103505822181778,-0.05239469254697549,-0.05368723357824641,-0.05491436418981535,-0.05607776725586711,-0.05717912565058649,-0.05822034797148765,-0.05920379211965748,-0.060131530955651616,-0.0610056251363405,-0.06182813531859456,-0.06260112215928428,-0.06332664631528007,-0.0640067684434524,-0.0646434558471719,-0.06523832359683306,-0.06579296366577522,-0.066308970927703,-0.06678794025632098,-0.06723146652533375,-0.0676411446084459,-0.06801856937936207,-0.06836520153842153,-0.06868207556023291,-0.06897026051925341,-0.06923083069087195,-0.0694648603504775,-0.06967342377345899,-0.06985759523520535,-0.07001844901110552,-0.07015695406489261,-0.07027375099610175,-0.07036951279463323,-0.0704449166122463,-0.07050063960070023,-0.07053735891175428,-0.0705557516971677,-0.07055649510869977,-0.07054019526200604,-0.0705072342677645,-0.07045801395622325,-0.07039293893252309,-0.07031241380180478,-0.07021684316920905,-0.0701066316398767,-0.06998218381894847,-0.0698438446106794,-0.06969181398151307,-0.06952634517335494,-0.06934769431909125 +0.,0.0012015507798186509,0.0018439990639612031,0.0020030173329277827,0.001727846103400254,0.0010718701401769897,0.00008369040716639086,-0.0011898184013662406,-0.002701781591155603,-0.004407792683980113,-0.006275240036685644,-0.008274438597203814,-0.01037567348722443,-0.012549229828437317,-0.01476593846443839,-0.01700614170943531,-0.019256679190362315,-0.02150430495271147,-0.02373577304197487,-0.025937837503644562,-0.028097252383212635,-0.030203044701105304,-0.0322524874003536,-0.034243135998860696,-0.03617247113584502,-0.03803797345052497,-0.039837123582118925,-0.041567402169845294,-0.04322628985292248,-0.044812678482688835,-0.046329127676233074,-0.04777714250504735,-0.04915816282175509,-0.05047362847897972,-0.05172497932934467,-0.052913655225473345,-0.05404109601998919,-0.055109019942975436,-0.0561197419188893,-0.05707526141607869,-0.057977563401799034,-0.058828632843305764,-0.05963045470785435,-0.06038501396270021,-0.061094295575098816,-0.06176021445075094,-0.062384392993098434,-0.06296841242095179,-0.06351385576666364,-0.06402230606258666,-0.06449534634107344,-0.06493455963447667,-0.06534152897514897,-0.0657177121961169,-0.06606416026499923,-0.06638194868558188,-0.06667215769672044,-0.06693586753727057,-0.06717415844608784,-0.0673881106620279,-0.06757880442394636,-0.06774721807652241,-0.0678940061484543,-0.06801984933344227,-0.06812543227319906,-0.06821143960943742,-0.06827855598387006,-0.06832746603820976,-0.06835885441416922,-0.06837333636594925,-0.06837130348129888,-0.06835316249243543,-0.06831932277937235,-0.06827019372212309,-0.06820618470070106,-0.06812770509511974,-0.06803516428539257,-0.06792891223310771,-0.06780915324914509,-0.06767614348287822,-0.06753014194294049 +0.,0.0014441194395856587,0.0024434514706921486,0.0029320570231625535,0.002955165491904008,0.0025675836390090952,0.001820597593381362,0.0007631746868646407,-0.0005557177486972371,-0.002089733987716984,-0.003804825855530908,-0.005669866521985756,-0.007653695060565243,-0.009725150544753106,-0.011853640186134932,-0.014018394283975394,-0.016205265942160815,-0.018400006365709605,-0.020588366759640195,-0.022756098328970988,-0.024888952278720417,-0.026975032809323583,-0.02901101369404969,-0.03099388948379779,-0.03292057764490448,-0.03478799564370633,-0.03659306094653994,-0.03833269101974188,-0.04000380332964873,-0.041604818760599606,-0.043138102418959885,-0.044604927149835445,-0.04600649679306866,-0.04734401518850191,-0.048618686175977574,-0.04983171359533803,-0.05098430128642565,-0.05207798260363175,-0.05311503060073666,-0.05409737321818319,-0.05502692166230969,-0.055905587139454545,-0.056735280855956124,-0.05751791401815279,-0.058255397832382934,-0.05894959611422877,-0.05960213823941534,-0.060214594729918774,-0.06078853686244453,-0.06132553591369808,-0.06182716316038489,-0.0622949898792104,-0.0627305873468801,-0.06313541040502413,-0.06351052591637617,-0.06385701543343918,-0.06417596478854061,-0.06446845981400795,-0.06473558634216865,-0.06497843020535017,-0.06519807723587999,-0.06539551473377604,-0.06557141067847004,-0.06572645310829002,-0.06586133379948333,-0.06597674452829734,-0.06607337707097939,-0.06615192320377683,-0.06621307470293707,-0.06625745559680445,-0.06628546664240141,-0.06629751924056175,-0.06629402731426548,-0.06627540478649259,-0.06624206558022305,-0.06619442361843689,-0.06613289282411407,-0.06605782802222028,-0.06596943774753211,-0.06586798088662767,-0.0657537191515449 +0.,0.0016843912172673085,0.0030298185422140914,0.003837718449099731,0.004149836611968504,0.004022419267510648,0.0035093100443769577,0.002661492141496501,0.0015299487577983472,0.00016289336365346574,-0.0014042415252933692,-0.003138938008058683,-0.005008639942342253,-0.0069807911858438725,-0.009023425230086691,-0.011114697689545434,-0.013239507704979628,-0.015382636905558527,-0.0175288669204514,-0.019662979378827487,-0.02176975590985606,-0.023836407987583942,-0.02585902846239925,-0.027834068480394346,-0.02975789999726156,-0.03162689496869321,-0.03343742535038163,-0.03518586309801915,-0.03686858016729808,-0.03848354111462726,-0.04003292212551207,-0.041517770657840174,-0.04293906150217973,-0.044297769449098884,-0.0455948692891658,-0.04683133581294861,-0.0480081438110155,-0.04912664721596477,-0.05018907830637913,-0.05119729534988797,-0.05215313771115983,-0.05305844475486326,-0.05391505584566682,-0.05472481034823904,-0.0554895476272485,-0.05621108170880691,-0.056891048582262675,-0.05753100813267491,-0.05813251996894253,-0.05869714369996446,-0.05922643893463962,-0.059721965281866934,-0.06018528235054531,-0.06061784186951081,-0.06102072600319689,-0.0613950219765898,-0.061741820849851185,-0.0620622136831427,-0.062357291536625975,-0.06262814547046265,-0.06287586654481439,-0.06310145059342281,-0.06330557857123618,-0.06348894550566685,-0.06365224995572559,-0.06379619048042323,-0.06392146563877056,-0.06402877398977838,-0.06411881409245754,-0.06419221838777808,-0.06424939649562081,-0.06429076425444809,-0.0643167399007066,-0.06432774167084304,-0.06432418780130404,-0.0643064965285363,-0.06427508608898647,-0.06423031597807645,-0.0641723988334014,-0.06410159611065615,-0.06401817205520527 +0.,0.0019224720619184764,0.003603284008996091,0.004720263623086822,0.005312191721491225,0.005436770588624027,0.0051502751366078806,0.004505628363919831,0.0035557532690369265,0.0023506601659857677,0.0009271140951266398,-0.0006810262155472879,-0.0024398597730075034,-0.004315485584225511,-0.00627461288562462,-0.008294359699741495,-0.010358703509252297,-0.012451489350465576,-0.014556562259689902,-0.01665776727323381,-0.01873894942740588,-0.02078645732577157,-0.022795821031080875,-0.0247629657123892,-0.026683735340676782,-0.028553973886923833,-0.03036952532211057,-0.03212623361721722,-0.03381994274322399,-0.03544817545476432,-0.03701292456643898,-0.038515018941043894,-0.03995521123541468,-0.041334254106386956,-0.042652900210796374,-0.04391190220547854,-0.04511201274726909,-0.04625441176071835,-0.047341291833180185,-0.04837444340428174,-0.04935563590573517,-0.050286638769252615,-0.05116922142654623,-0.052005153309328166,-0.05279620384931058,-0.05354413857582338,-0.05425059973467388,-0.05491713663583397,-0.05554529731006584,-0.0561366297881317,-0.056692682100793736,-0.05721500227881413,-0.0577051383529551,-0.05816453882015777,-0.058594300557303775,-0.05899551608999435,-0.059369281344939955,-0.059716692248851046,-0.06003884472843804,-0.0603368347104114,-0.06061175812148157,-0.060864618911383415,-0.06109611053840957,-0.06130693466722522,-0.06149779629157199,-0.0616694004051915,-0.06182245200182534,-0.061957656075215126,-0.06207571761910249,-0.0621772771284371,-0.06226275278081452,-0.062332564623809435,-0.06238713498035542,-0.06242688617338606,-0.06245224052583495,-0.06246362036063567,-0.062461448000721814,-0.062446087420056036,-0.06241775523730397,-0.062376715311565044,-0.06232323425392739 +0.,0.002158438340001228,0.004164003599578568,0.005579931002381911,0.006442543784617902,0.006811015448910282,0.006743927711813309,0.0062960680494156705,0.005522223937806053,0.004474132284865324,0.003189839256353678,0.0017044946442405183,0.00005329441142124468,-0.0017285654792087545,-0.0036065190093734155,-0.005556683444914135,-0.007562146649823886,-0.009605849759926344,-0.011670733911045198,-0.013739740239004115,-0.01579580987962679,-0.01782445817583154,-0.01982067042065285,-0.02177986309428283,-0.023697369567434824,-0.02556852321082216,-0.027388657395158184,-0.029153105491156225,-0.03085720086952962,-0.03249803889212409,-0.03407743451079,-0.03559600473568432,-0.03705428695481114,-0.038452818556174544,-0.039792136927778624,-0.04107277945762744,-0.0422952835337251,-0.04346066044388777,-0.04457106415571861,-0.04562821911431766,-0.04663382671554955,-0.04758958835527892,-0.04849720542937041,-0.04935837933368865,-0.05017481146409829,-0.050948220136859214,-0.051680253498022216,-0.052372450345911224,-0.05302634722434944,-0.05364348067716008,-0.054225387248166335,-0.054773603481191424,-0.055289665920058545,-0.05577501971264495,-0.05623077586291609,-0.05665803182957269,-0.05705788804893444,-0.057431444957320985,-0.057779802991051975,-0.058104062586447076,-0.05840532417982594,-0.058684599422979516,-0.058942593808281596,-0.05918001528960828,-0.05939757495122143,-0.05959598387738293,-0.059775953152354636,-0.05993819386039841,-0.06008341708577615,-0.06021227102164436,-0.06032518209936295,-0.06042257434966993,-0.06050487395762737,-0.060572507108297355,-0.060625899986741984,-0.06066547877802334,-0.06069166966720351,-0.06070484091586741,-0.06070521300124902,-0.06069305202269359,-0.06066862679207002 +0.,0.0023923383002275324,0.004712106706523375,0.00641693702638056,0.00754118787074572,0.008145517284351055,0.008290691306146595,0.00803328735336363,0.0074298828432334514,0.0065338717256199575,0.0053845303493647596,0.0040182503047151204,0.0024714731438261724,0.0007806404188530442,-0.0010184551162918578,-0.0029009665288998185,-0.004849123827472827,-0.006844996640239423,-0.008870654595428157,-0.010908167321267558,-0.012939604445986188,-0.01494967740728624,-0.016932844623336128,-0.018884031028871304,-0.02079807663211565,-0.02266982144129302,-0.02449410546462727,-0.02626576871034226,-0.027979651186661847,-0.02963243515019002,-0.03122576315445197,-0.03276004704713199,-0.03423561575900794,-0.03565279822085766,-0.03701192336345903,-0.03831332011758989,-0.0395573174140281,-0.040744763230496577,-0.04187777397290408,-0.04295800991292285,-0.04398710629501866,-0.044966698363657326,-0.04589842136330462,-0.046783910538426336,-0.04762480113348827,-0.04842276552543487,-0.04917945740480489,-0.0498964051230269,-0.0505751338294151,-0.05121716867328366,-0.05182403480394677,-0.05239725737071862,-0.05293836152291339,-0.0534487889437661,-0.053929664182612636,-0.054382089268077374,-0.0548071687934872,-0.05520600735216899,-0.0555797095374496,-0.05592937994265593,-0.05625612316111483,-0.05656095813645256,-0.05684460192866405,-0.05710776843665579,-0.05735117449489863,-0.05757553693786337,-0.05778157260002084,-0.057969998315841875,-0.0581415309197973,-0.05829682595036242,-0.05843631778985153,-0.058560434228871874,-0.058669605092968924,-0.05876426020768814,-0.05884482939857501,-0.05891174249117497,-0.05896542931103352,-0.059006262217438425,-0.059034465422879684,-0.05905030710650168,-0.05905405811883674 +0.,0.0026241935537964674,0.005247698004806534,0.007231477609434913,0.008608402517790919,0.009440626393924775,0.00979097935015082,0.009717755030810366,0.009279247080244732,0.0085304376798343,0.007511782914025185,0.006260867418702249,0.004815329470186563,0.0032128073447991847,0.0014902725103243288,-0.0003265001642132606,-0.002218914308413207,-0.004168200127545885,-0.0061555878268816765,-0.008162307611690948,-0.010169589687244095,-0.012161370678765066,-0.014131599895490059,-0.016074727720163016,-0.017985117884446934,-0.019857134120004772,-0.021685140158499507,-0.023463499731594114,-0.025186576570951556,-0.0268506539897436,-0.02845720756190747,-0.030006450607940553,-0.031498510357069735,-0.03293351403852189,-0.03431158888152392,-0.035632862115302685,-0.03689746096908508,-0.038106075390259064,-0.03926078526678452,-0.04036318850460699,-0.04141485606754252,-0.042417358919407205,-0.04337226802401711,-0.04428115434518831,-0.045145588846736875,-0.0459671992291441,-0.04674764437170177,-0.04748844224472076,-0.04819110669637905,-0.048857151574854644,-0.04948809072832552,-0.0500854380049697,-0.05065070725296517,-0.051185336576598135,-0.051690463492312475,-0.05216719423980701,-0.0526166372211425,-0.053039900838379686,-0.053438093493579295,-0.053812323588802086,-0.054163699526108784,-0.05449324713439135,-0.05480169457752623,-0.05508976135919974,-0.05535816972775222,-0.055607641931524024,-0.05583890021885551,-0.056052666838087024,-0.05624966403755893,-0.056430554351079625,-0.05659577981021246,-0.056745771744875904,-0.05688096340225179,-0.05700178802952192,-0.057108678873868134,-0.05720206918247225,-0.057282392202516096,-0.0573500242020803,-0.05740519300475079,-0.057448168711878564,-0.05747922405351488 +0.,0.0028540005451844212,0.0057708590516909325,0.008023729615014462,0.009644451077094952,0.010696681193917892,0.011245196348188635,0.011349933553990832,0.011070829825408145,0.01046438754673902,0.00957219262105021,0.008432974505205987,0.00708551986459838,0.0055686153646194,0.003920363677631591,0.0021674316607935245,0.00032921088363891673,-0.0015747212038786096,-0.003524787151805444,-0.005501409510187957,-0.0074850108290725435,-0.009458781743012226,-0.01141618008341218,-0.013351198519380245,-0.015257741434927148,-0.01712971321406358,-0.018961018240800247,-0.020745560899147857,-0.022477245573117095,-0.02415197066421505,-0.025771050137855436,-0.027334505365182782,-0.028842268571007787,-0.030294271980141122,-0.0316904478173935,-0.03303072830757558,-0.03431504567549807,-0.03554393706891578,-0.036719446886430564,-0.03784311246155915,-0.0389164423334949,-0.03994094504143122,-0.04091812912456148,-0.04184950312207906,-0.042736575573177345,-0.043580930752914554,-0.04438423237323178,-0.045147988089844056,-0.045873700543825814,-0.04656287237625153,-0.04721700622819564,-0.04783760474073263,-0.048426170554936934,-0.04898413808281406,-0.049512657232938434,-0.05001283810154839,-0.05048579255548407,-0.0509326324615856,-0.051354469686693105,-0.05175241609764672,-0.052127583561286564,-0.05248100438879813,-0.052813417386860385,-0.053125547327683084,-0.05341812154117644,-0.053691867357250685,-0.053947512105816046,-0.054185783116782744,-0.05440740772006101,-0.0546130550977334,-0.054803174629993845,-0.05497820096832403,-0.055138570565570214,-0.05528471987457863,-0.05541708534819551,-0.05553610343926712,-0.05564221060063969,-0.05573578682130584,-0.05581706341092504,-0.055886312238441656,-0.055943807757383446 +0.,0.0030817319062907064,0.00628164994359512,0.00879385237779953,0.010649583079587934,0.01191400947985709,0.012653739052546138,0.012930280212835293,0.01280514137590477,0.012336277919474902,0.011566356211033186,0.010535202846057457,0.00928270508785077,0.007848750199716158,0.006272525076299707,0.004581552945713073,0.0027959897035648267,0.0009361890127227816,-0.0009774954639452616,-0.0029247100635714845,-0.004885101123288087,-0.006841141829729166,-0.008785816026870008,-0.010712675348302713,-0.012615181597146521,-0.01448679657652064,-0.016320982089544293,-0.0181111999393367,-0.019850911929017067,-0.02153564544586625,-0.023166558168169864,-0.02474348603556669,-0.026266172904472872,-0.027734362631304535,-0.02914779907247782,-0.03050622608440886,-0.03180938752351379,-0.03305767291742993,-0.034253092188034504,-0.035397123874325684,-0.03649121593117634,-0.03753681631345938,-0.03853537297604769,-0.039488333873814174,-0.04039714696163172,-0.041263354327491765,-0.042088624159173166,-0.04287445386478679,-0.043622334972717974,-0.044333759011352086,-0.045010217509074475,-0.045653201994270504,-0.046264203995325534,-0.04684465411933371,-0.04739571409518215,-0.04791849752541351,-0.04841411940199886,-0.04888369471690927,-0.049328338462115796,-0.04974916562958952,-0.05014729121130148,-0.05052375360131608,-0.05087930179066634,-0.0512146654878821,-0.051530576776244445,-0.05182776773903444,-0.052106970459533185,-0.052368917021021755,-0.05261433950678123,-0.052843913403067246,-0.053058095139176545,-0.05325732247328558,-0.05344203485087964,-0.053612671717443994,-0.05376967251846394,-0.05391347669942476,-0.054044523705811746,-0.054163197060696736,-0.05426973143391381,-0.05436440031047891,-0.05444747971457188 +0.,0.0033073379247008433,0.006780110877332813,0.0095419891391409,0.011624035544154757,0.013092929642672339,0.014016997602427923,0.014459248189284046,0.01448269016910324,0.01414666555708376,0.013494872423995982,0.01256818737742055,0.01140755104455819,0.010053904052609624,0.008547471076998266,0.006916596446925861,0.005182169827423681,0.003365289985770517,0.0014870556892451394,-0.000431434294873659,-0.002369081199307105,-0.0043076690184173485,-0.006239724952529882,-0.008158376112508268,-0.010056658320096066,-0.011927607397036797,-0.013764259165074,-0.01555964944595121,-0.017306814061411952,-0.019000923143343194,-0.020642983352620686,-0.022232651652733124,-0.023769490104069955,-0.0252530607670206,-0.02668292570197451,-0.02805864696932111,-0.029379786629449844,-0.030646591716120372,-0.03186103867040111,-0.03302454899833002,-0.03413851189405386,-0.03520431655171946,-0.036223352165473593,-0.03719700792946305,-0.03812667303783464,-0.039013848616347706,-0.03986020697080315,-0.04066723532890297,-0.041436414200868896,-0.04216922409692268,-0.04286714552728608,-0.04353165900218082,-0.04416424503182866,-0.044766330306228674,-0.04533908780582461,-0.04588363429351506,-0.046401087551047326,-0.046892565360168684,-0.04735918550262642,-0.04780206576016783,-0.04822232391454019,-0.048621004046796194,-0.048998864876486935,-0.04935664072036881,-0.049695068091054236,-0.05001488350115563,-0.050316823463285414,-0.05060162449005601,-0.05087002309407984,-0.05112270072467591,-0.0513601205618165,-0.051582723258447705,-0.051790951042680555,-0.051985246142626076,-0.05216605078639533,-0.052333807202099326,-0.05248895761784913,-0.05263188890503874,-0.05276283896690554,-0.052882082756203236,-0.05298989771825666 +0.,0.003530747868360968,0.007266263707156346,0.010268268475992815,0.012568034264270925,0.014233751858804617,0.015335356632892413,0.015937287598305573,0.016103983766815346,0.015896108320205323,0.015358342891776858,0.014532567543457154,0.013460729602078947,0.012184776394474958,0.010745924489239273,0.009173304409429144,0.007488509358677656,0.005713352442240343,0.0038696467653727257,0.0019792054333303403,0.00006384155136870701,-0.0018575676431956596,-0.003777109898310674,-0.005687504144500138,-0.007581376649339034,-0.009451353680402315,-0.011290061505264947,-0.013090126391501894,-0.014844174606688111,-0.016547032643276546,-0.018199561360751078,-0.019801245136840565,-0.0213514707421132,-0.022849624947137166,-0.02429509452248066,-0.025687266238711856,-0.02702552686639895,-0.028309986016342752,-0.029542587627289518,-0.030724697916575425,-0.03185764912353985,-0.032942773487522205,-0.03398140324786189,-0.034974870643898315,-0.03592450791497087,-0.036831776435121005,-0.0376983522691133,-0.038525712531378556,-0.039315326808343876,-0.040068664686436405,-0.04078719575208325,-0.04147238959171153,-0.04212571579174838,-0.042748597013132,-0.043342216922115546,-0.0439076951002741,-0.044446151788051516,-0.044958707225891674,-0.045446481654238424,-0.04591059531353564,-0.04635216844422718,-0.046772250421978044,-0.04717160924166841,-0.047550983504342705,-0.04791111383210958,-0.048252740847077674,-0.048576605171355654,-0.048883447427052175,-0.0491740082362759,-0.0494489746733411,-0.04970881637163006,-0.04995397666993165,-0.05018490037203613,-0.05040203228173374,-0.050605817202814755,-0.050796699939069434,-0.05097512529428802,-0.05114148330353638,-0.05129601497583297,-0.0514389965866066,-0.051570706856222066 +0.,0.0037518714954120082,0.00774011347815377,0.010972805707442117,0.013481795091128318,0.015336779278200801,0.01660919638198719,0.017364846526026767,0.017669529833858798,0.017585166097014465,0.017157373016669104,0.016428988132307202,0.01544291938780377,0.014242074727033814,0.01286861729049056,0.011352429265265473,0.009715777497819177,0.007981159015103347,0.006171070844069647,0.004308010011669759,0.002414473544855346,0.0005099722549289074,-0.001397159186513521,-0.0032992476966378606,-0.005188526238746254,-0.007057227776140823,-0.008897585272123695,-0.010701831689996999,-0.012462199993062854,-0.01417318650320154,-0.01583551143880867,-0.01744849291484727,-0.01901134884964312,-0.020523297161521997,-0.02198355576880969,-0.023391342589831984,-0.024745875542914655,-0.026047131829497028,-0.027297023846384882,-0.028496864248249244,-0.029647930106923466,-0.030751498494240928,-0.03180884648203501,-0.03282125114213907,-0.0337899895463865,-0.03471648451252587,-0.03560241550377277,-0.03644924958811604,-0.03725844552214324,-0.03803146206244198,-0.03876975796559985,-0.03947479198820444,-0.040148022886843354,-0.04079086918143014,-0.0414045246612494,-0.04199011138912959,-0.04254875173739297,-0.043081568078361825,-0.04358968278435841,-0.04407421822770501,-0.04453629678072389,-0.04497697272481405,-0.04539702287965747,-0.04579718981085372,-0.046178217934428424,-0.046540851666407174,-0.04688583542281558,-0.04721391361967926,-0.047525830673023836,-0.04782227894717336,-0.048103734232847147,-0.048370642348801465,-0.04862345047067992,-0.04886260577412607,-0.04908855543478356,-0.04930174662829595,-0.04950262653030685,-0.04969158815604882,-0.04986887549187866,-0.0500347659941772,-0.05018953951571608 +0.,0.003970600412091242,0.008201649951294143,0.01165570429075142,0.01436552519140226,0.016402309183302006,0.01783889376527847,0.018742372032327523,0.019179837079445032,0.019214401691524465,0.018892572812052796,0.018258100073854013,0.017354806548301255,0.016226515306767678,0.01491629131852606,0.013454734296164299,0.011864755177173631,0.010169504852006257,0.008392134211114372,0.006555794144950185,0.0046836355439658925,0.0027957754463776774,0.0009009540748121546,-0.0009927794611653109,-0.0028772808882268266,-0.004744405933044478,-0.006586010322290356,-0.008393949782636554,-0.010160080040755162,-0.011878580565529706,-0.013550036036847676,-0.015173604560114486,-0.016748341567944126,-0.018273302492950567,-0.01974754276774782,-0.021170117824949854,-0.02254008309717066,-0.023857288330815084,-0.025123615322397635,-0.02634032487083736,-0.02750864064822322,-0.02862978632664417,-0.029704985578189198,-0.030735462074947248,-0.031722439489007305,-0.032667303261430004,-0.03357173589175255,-0.03443719446779788,-0.035265127009602604,-0.03605698153720336,-0.03681420607063677,-0.03753824862993945,-0.03823055723514805,-0.03889254615299558,-0.0395254187359852,-0.04013029919501654,-0.04070831171171587,-0.04126058046770953,-0.04178822964462378,-0.04229238342408496,-0.04277416598771934,-0.043234636137278716,-0.04367457906943116,-0.044094740998847,-0.044495869824187365,-0.0448787134441134,-0.045244019757286216,-0.045592536662366956,-0.045925012058016745,-0.04624214326671783,-0.04654441194170921,-0.046832266178936405,-0.04710615532523621,-0.04736652872744545,-0.04761383573240095,-0.04784852568693952,-0.04807104793789799,-0.04828179829835954,-0.048481023602803666,-0.048669002350235,-0.04884601538672601 +0.,0.004186809498108428,0.008650849044487452,0.012317057141925223,0.015219424250913268,0.017430634103331934,0.019024823415168625,0.02007031112681993,0.020635416178682423,0.020784381696932207,0.02056455773270267,0.02002056123140358,0.019197085506065508,0.018138823869719024,0.01688969896656168,0.015480994300832968,0.013936235701988285,0.012279198231639388,0.010533656951398023,0.008723386922875952,0.006872163207684927,0.005000682042130391,0.003118072501602329,0.0012327439210297136,-0.0006467980701263818,-0.0025120478424048615,-0.004354499766344633,-0.006165648212484609,-0.00793698755136369,-0.009662393560998926,-0.011342320425615183,-0.012975772422131306,-0.01456164879054971,-0.016098848770872816,-0.01758627160310305,-0.01902281652724283,-0.02040738278329458,-0.021739697555773978,-0.023021612963221007,-0.02425433963571473,-0.025439049592923278,-0.02657691485451477,-0.027669107440157353,-0.028716799369519142,-0.02972116266226828,-0.03068354654585876,-0.031605636192264634,-0.03248887877463101,-0.03333471166882245,-0.03414457225070355,-0.03491989789613887,-0.03566212598099299,-0.0363726938811305,-0.03705301149757451,-0.03770429118920835,-0.038327658986050256,-0.03892424056068626,-0.03949516158570242,-0.04004154773368474,-0.040564524677219285,-0.04106521808889209,-0.04154469090891945,-0.04200373626588977,-0.04244310371235389,-0.04286354432267043,-0.043265809171197996,-0.04365064933229518,-0.0440188158803206,-0.04437105988963283,-0.04470808331219986,-0.04503037337026327,-0.045338380237321146,-0.045632555233953584,-0.04591334968074067,-0.04618121489826251,-0.046436602207099185,-0.046679962927830804,-0.04691169549030898,-0.047132049447192965,-0.04734130420522237,-0.04753974146823613 +0.,0.004400358248025417,0.009087674336214544,0.012956948007877231,0.016043685705747553,0.01842204294240503,0.020167358719345406,0.021349111729763923,0.022036780666855823,0.022295677331839396,0.022173949458575953,0.02171703713814133,0.020970459653282626,0.01997973628674695,0.018789603803642165,0.017431996183129937,0.015931025308716634,0.014311061094279702,0.01259647345369657,0.010811632300844688,0.008980907549601489,0.007125547931250189,0.005255054851750305,0.0033781822601009932,0.0015037814533499806,-0.0003592962714549792,-0.0022021996172661403,-0.004016077287035756,-0.005792077983716075,-0.007523786797048957,-0.009211532396720283,-0.010854171337795888,-0.012450452885006697,-0.013999126303083627,-0.01549894085675761,-0.01694864581075957,-0.018346990429820423,-0.019693584166680395,-0.020990250364343436,-0.022238151150103365,-0.023438408617215467,-0.02459214485893503,-0.025700481968517368,-0.026764542039217756,-0.0277854471642915,-0.028764511503479376,-0.02970342253546395,-0.030603617583183248,-0.03146652346956017,-0.03229356701751765,-0.0330861750499786,-0.033845774389865926,-0.034573791860102565,-0.035271632882939094,-0.03594051826981988,-0.03658157554514351,-0.03719593155832254,-0.03778471315876955,-0.03834904719589707,-0.038890060519117656,-0.03940887997784386,-0.03990657227237467,-0.04038393802098052,-0.0408417298072332,-0.04128070157862882,-0.041701607282663464,-0.0421052008668332,-0.042492236278634146,-0.04286346746556236,-0.04321960068363518,-0.04356112843196082,-0.04388850276513086,-0.0442021767832634,-0.04450260358647656,-0.04479023627488845,-0.045065527948617165,-0.04532893170778083,-0.045580848419202874,-0.04582153022312005,-0.046051257302588626,-0.046270312087281384 +0.,0.0046110921355604325,0.009512078519964374,0.013575452793177126,0.016838497937569162,0.019376822075565608,0.021266872830801208,0.02257922360770864,0.02338444781072058,0.023748865254981256,0.023721376658496565,0.023348201714740058,0.022675642027817765,0.021749999201835703,0.020616781178653055,0.019308539524370616,0.01784994370791966,0.016265929557404232,0.01458143290092839,0.012821389566596179,0.011010735382511655,0.009171245204848924,0.007312776429776644,0.005444412203873707,0.0035753339998946497,0.0017147232905940397,-0.0001282384512735664,-0.001944369752953614,-0.0037244891416915336,-0.005461903857979059,-0.007156821974400575,-0.00880795835411647,-0.010413918425889176,-0.011973307618481123,-0.013484731360654756,-0.014946795081172505,-0.016358104208796805,-0.017718155229781228,-0.01902874359365293,-0.020290984569285603,-0.02150595202735985,-0.022674719838556276,-0.023798361873555513,-0.024877952003038155,-0.02591456409768482,-0.026909478377599234,-0.027864384260550525,-0.02878070928250103,-0.02965986980328137,-0.030503282182722184,-0.031312362780654086,-0.03208852795690771,-0.0328331940713137,-0.033547761953611845,-0.03423346031707723,-0.0348914178599483,-0.03552276229852093,-0.03612862134909102,-0.03671012272795443,-0.037268394151407064,-0.03780456333574479,-0.0383197003664968,-0.03881461291230276,-0.039290056285257745,-0.039746787007842205,-0.04018556160253655,-0.04060713659182119,-0.04101226849817658,-0.04140171384408311,-0.04177618286746905,-0.04213617305365201,-0.04248213814506594,-0.04281453283043459,-0.04313381179848171,-0.04344042973793106,-0.04373484133750638,-0.044017501285931446,-0.044288812708545684,-0.04454903020198092,-0.0447984345976824,-0.045037308922843904 +0.,0.00481884394362413,0.009924004815912393,0.014172640847054923,0.017604045432965707,0.02029525641168045,0.02232373964735951,0.02376109928106136,0.024678939453844473,0.025144528355865683,0.02520747573152706,0.02491473796633144,0.024313355970118085,0.023450370652726253,0.022372018807903647,0.021111437140345933,0.019693824612932318,0.018144654418036095,0.016489399748030553,0.014753533795288995,0.012962529752184709,0.011138662569883072,0.009292129481933056,0.0074323276154528405,0.005568753328323852,0.0037109029784275567,0.0018682729236454048,0.000050359521858846755,-0.0017333408690506542,-0.003475870311818171,-0.005177321133735109,-0.006836272457118788,-0.008451191933802777,-0.010020547215620644,-0.011542805954405972,-0.013016435801992333,-0.014439904410213297,-0.01581259999756011,-0.017136290981304213,-0.018412047393765136,-0.019640896563885322,-0.020823865820607218,-0.021961982492873292,-0.02305627390962597,-0.024107767399807724,-0.02511771035354826,-0.02608779375817485,-0.027019435424450976,-0.027914041337356813,-0.028773017481872575,-0.029597769842978433,-0.030389704405654593,-0.031150227154881256,-0.03188073421334822,-0.03258246164861428,-0.03325653901639723,-0.03390409459410028,-0.03452625665912661,-0.0351241534888794,-0.03569891336076185,-0.03625166455217713,-0.03678348016259299,-0.03729517447474649,-0.03778750523114727,-0.03826123123553748,-0.03871711129165926,-0.039155904203254766,-0.03957836877406615,-0.03998526380783555,-0.04037730320562773,-0.04075498914990266,-0.041118776880918195,-0.04146912248836629,-0.04180648206193883,-0.042131311691327736,-0.042444067466224936,-0.042745205476322344,-0.04303513092842027,-0.04331410074387574,-0.04358239627809433,-0.04384030103110385 +0.,0.005023435077755908,0.01032338836438571,0.014748576231896187,0.01834050992378652,0.02117763043626181,0.023338334769704205,0.024895194908556628,0.025920782837262003,0.02648325651925823,0.026632891520781365,0.026417338650605686,0.025884335750058295,0.025081620660466295,0.024056117330794945,0.022841515605758932,0.02146351623543279,0.01994810162162207,0.01832125416613232,0.016608956270769092,0.014837190337337946,0.013028705729283756,0.01119402355785483,0.009342839917673732,0.007484950967009553,0.005630152864131413,0.0037882417673084237,0.0019690138348096905,0.0001822652249043365,-0.001564793447453955,-0.0032721435485278194,-0.004938234329822278,-0.006561401642845801,-0.00813998133910685,-0.009672309270113907,-0.01115672128737544,-0.012591553242399916,-0.013976089716393435,-0.015312072934339154,-0.01660052929058302,-0.017842441229355188,-0.019038791194885822,-0.0201905616314051,-0.02129873498314317,-0.02236429369433022,-0.023388453415810292,-0.02437290633305819,-0.02531906059175217,-0.026228311888435503,-0.027102055919651483,-0.027941688381943372,-0.028748604971854445,-0.029524201385927998,-0.030269868924355316,-0.030986850464734532,-0.03167627610806313,-0.03233927439121738,-0.03297697385107356,-0.03359050302450792,-0.034180990448396724,-0.03474956465961623,-0.035297301403882694,-0.0358250211449359,-0.03633348376200186,-0.03682345005081001,-0.037295680807089816,-0.037750936826570734,-0.038189978904982214,-0.038613567838053704,-0.03902242087464732,-0.039417044607017025,-0.03979789558647774,-0.040165431119362265,-0.04052010851200334,-0.040862385070733746,-0.04119271810188626,-0.04151156491179364,-0.04181933261312723,-0.042116280319918924,-0.042402689790602625,-0.04267884487697312 +0.,0.005224676990264974,0.010710157584127272,0.015303318958278758,0.019048071507125786,0.02202422923533838,0.024311036441663558,0.025981971154440692,0.02711051140200917,0.02776564736958186,0.027998278005190103,0.027856706921901446,0.027389327167775788,0.026644531790873036,0.02566989083525095,0.024499615746508555,0.02315988174721877,0.02167715269572081,0.02007789245035388,0.0183885648694572,0.01663563381136998,0.014842297723130813,0.013019385832730665,0.011176878397715629,0.009324856502274482,0.007473401230596037,0.00563259366686909,0.0038125148952824397,0.0020232460000248944,0.0002722379489042484,-0.0014403843781528019,-0.0031129461487534264,-0.004743657306139346,-0.006330727793552274,-0.007872367554233945,-0.009366786531426075,-0.010812194668370382,-0.012207777469590444,-0.01355525178610893,-0.014855601948837041,-0.01610976714972916,-0.017318686580739675,-0.018483299433822993,-0.01960454490093349,-0.020683362174025572,-0.02172093623583632,-0.022718960096732216,-0.02367883229555909,-0.024601938324798768,-0.02548966367693309,-0.02634339384444387,-0.02716451431981294,-0.027954410595522148,-0.028714469032806438,-0.029445938778482547,-0.030149950170773154,-0.03082763170851374,-0.03148011189053977,-0.032108519215686714,-0.03271398218279004,-0.033297629290685214,-0.0338605385672847,-0.03440353622753472,-0.034927383998113695,-0.035432844381932276,-0.035920679881901105,-0.03639165300093085,-0.036846526241932145,-0.037286062107815654,-0.037710980883400955,-0.03812179328521581,-0.03851895699213573,-0.03890293034613315,-0.03927417168918049,-0.03963313936325017,-0.03998029171031463,-0.04031608707234628,-0.04064093429382245,-0.04095509454911167,-0.04125884988223292,-0.04155248437928984 +0.,0.005422372466461648,0.011084235529781807,0.015836926216778704,0.019726909747734066,0.022835339497692276,0.025242226464587507,0.027021894025994805,0.02824866555948922,0.028992306981861525,0.02930429895665492,0.029233556939732376,0.02882908811830256,0.028139899679574146,0.027214167346987502,0.026086593096198,0.024783799706649744,0.02333270514886893,0.021760227393381727,0.02009328441071432,0.018358794171392887,0.016580379238038057,0.014769161398602914,0.012935390481766451,0.011089417837758454,0.009241594816808738,0.007402272769147098,0.005581803045003338,0.0037905369946072643,0.0020361537927349213,0.00031887992192588567,-0.0013594914033026578,-0.0029970500233901898,-0.004591885778776186,-0.006142088509900143,-0.007645748057201538,-0.00910095426111985,-0.010506798038659861,-0.01186497166323109,-0.013176418952075181,-0.014442037451963138,-0.015662724709665964,-0.01683937827195468,-0.017972895685600284,-0.019064174497373795,-0.020114370073074494,-0.021125175872933315,-0.022097980885163793,-0.023034160480319,-0.023935090028952034,-0.024802144901615968,-0.025636700468863884,-0.026440132101248882,-0.027213821103499094,-0.027959022354398642,-0.028676866125468336,-0.02936848058408484,-0.030034993897624818,-0.03067753423346491,-0.03129722975898179,-0.031895208641552104,-0.03247255083091474,-0.033030087866865405,-0.033568583038723385,-0.03408880027625666,-0.03459150350923319,-0.03507745666742098,-0.035547423680588,-0.03600216847850221,-0.03644241407366914,-0.03686867502332498,-0.037281409953692314,-0.03768107806471711,-0.03806813855634529,-0.038443050628522796,-0.038806273481195565,-0.039158266314309534,-0.039499439531672495,-0.0398300562354725,-0.040150398641340936,-0.040460750955819226 +0.,0.005616316963543876,0.011445541141000555,0.016349453513252763,0.02037720471222289,0.023611250464077208,0.026132291072710968,0.028015435684458346,0.02933579344565352,0.03016385058524891,0.030551628598080836,0.03054861448584564,0.030204389171523835,0.029568533578095933,0.028689789344344418,0.027603318382072226,0.02633616451787807,0.024915672905091404,0.023369188697041684,0.021724057047058362,0.0200076231084709,0.018243908958662132,0.016444313598405754,0.014619342052768013,0.0127796014969629,0.010935699106204427,0.009098242055706598,0.007277837520683426,0.005485092676348915,0.003727902941164873,0.0020065918297922265,0.000323065324213894,-0.0013206520300816597,-0.002922535687605963,-0.00448056110287056,-0.005992703730386982,-0.007456939024666763,-0.008872267731401064,-0.010240358320435312,-0.011562115619627724,-0.012838397111586262,-0.014070060278918887,-0.015257962604233581,-0.01640296157013829,-0.017505914659241003,-0.018567948651085107,-0.0195907570790499,-0.020575719434683427,-0.021524201046306563,-0.022437567242240195,-0.0233171833508052,-0.024164414700322448,-0.02498062661911284,-0.02576719523667852,-0.026525380630233783,-0.027256312704783157,-0.02796111900690106,-0.028640927083161898,-0.029296864480140072,-0.029930058744410005,-0.03054163742254609,-0.031132682029470917,-0.031704029007047545,-0.032256442926895064,-0.032790688869811166,-0.03330753191659354,-0.03380773714803986,-0.034292069644947826,-0.03476129448811511,-0.035216137113094396,-0.03565711563633644,-0.036084689454492586,-0.03649931845116009,-0.03690146250993614,-0.03729158151441795,-0.037670135348202724,-0.03803758389488768,-0.03839433894678431,-0.03874066540129875,-0.03907684553514882,-0.03940316356500208 +0.,0.005806299866149755,0.011793990605500801,0.016840955899838594,0.020999138058115884,0.02435225490574284,0.026981621813556934,0.028963075238682807,0.0303724516382451,0.0312809032122562,0.03174095219061974,0.031802617495193385,0.03151601405234621,0.03093125678844727,0.030097614152154607,0.02905067788197764,0.027817886754290307,0.02642698659698397,0.024905723237949987,0.02328184250507973,0.021583090226264548,0.01983386376628743,0.01804582420647158,0.016229717614669334,0.014396392772923277,0.012556698463276018,0.010721483467770153,0.008901596568448286,0.00710788654735302,0.005348453289472292,0.0036237128459299097,0.0019356784405292755,0.00028648337832302333,-0.0013217390356362036,-0.0028868554962957816,-0.004406732698603069,-0.00587923733750543,-0.007303284329186299,-0.008680519091415023,-0.010011808960794245,-0.011297972910014931,-0.01253982991176804,-0.013738198938744554,-0.014893898963635417,-0.016007748959131617,-0.01708084812827842,-0.018114889699325863,-0.01911124371871285,-0.020071265549597376,-0.020996310555137468,-0.021887734098491124,-0.02274689154281636,-0.023575138251271204,-0.02437384505818675,-0.025144276709561476,-0.025887562448162905,-0.026604828914467735,-0.027297202748952646,-0.027965810592094312,-0.028611779084369425,-0.029236234866254665,-0.029840260664994046,-0.030424697405468933,-0.030990310665739235,-0.03153786640630748,-0.03206813058767619,-0.0325818691703479,-0.03307984811482514,-0.03356283338161042,-0.034031552528369126,-0.034486526951482896,-0.03492821664441288,-0.035357082003429054,-0.03577358342480135,-0.036178181304799745,-0.03657133603969417,-0.036953508025754575,-0.037325110274686255,-0.03768640934654949,-0.038037687472035245,-0.0383792287711464 +0.,0.005992105802198225,0.012129498596351733,0.017311489093864235,0.021592894044928458,0.025058650043788606,0.027790616385781907,0.029865299510653597,0.03135920585814892,0.03234410034275023,0.032872966625781236,0.03299631660110531,0.03276476014336799,0.03222890712721478,0.03143851437068222,0.030429573821146756,0.029229893525899428,0.02786759390624686,0.026370795383495675,0.024767618378952506,0.023086183313923973,0.021351238992774788,0.019574693665137373,0.017767520513176222,0.015940795934468236,0.014105596326590312,0.012272998087119354,0.010454077613632262,0.008659911303705945,0.006898791921240745,0.005171223634703997,0.0034793214749230233,0.001825321972517357,0.00021146165810653848,-0.0013600229376899108,-0.002886895284252452,-0.004366918850961551,-0.005798926990516751,-0.0071845427973069665,-0.008524597587997472,-0.00981987335215971,-0.01107115207936512,-0.012279215759185162,-0.013444846381191262,-0.014568825934954885,-0.015652227035446498,-0.016696742226090133,-0.01770373215719345,-0.018674542301011836,-0.019610518129800723,-0.020513005115815507,-0.021383348731311595,-0.022222894448544417,-0.023033007685455093,-0.02381495733125336,-0.024569871674824578,-0.02529887616927433,-0.026003096267708196,-0.02668365742323174,-0.027341685088950546,-0.027978304717970186,-0.028594599900829604,-0.029191415630272673,-0.02976951821943251,-0.030329674241698627,-0.030872650270460534,-0.03139921287910774,-0.03191012864102976,-0.032406164129616104,-0.03288804872748435,-0.03335630683401096,-0.03381139886914897,-0.03425378557420364,-0.034683927690480246,-0.035102285959284066,-0.03550932112192035,-0.03590549391969438,-0.03629121841637786,-0.036666762702276715,-0.037032408858314936,-0.03738844080451635 +0.,0.006173515809439378,0.012451979518110053,0.01776111060418998,0.0221586605322803,0.025730738447937072,0.028559679452224748,0.030722603774115714,0.03229663164258235,0.03335408851842662,0.03394838098674058,0.03413047564858713,0.03395143895494037,0.03346233735677436,0.03271337827192815,0.031740924736731194,0.0305731288149127,0.02923845987288586,0.027765387277063874,0.026182380393859953,0.024517908589687297,0.02279704864609648,0.021031941293822784,0.019233773129857152,0.01741383440671669,0.015583415376918508,0.013753806292979714,0.01193629740741742,0.010142178972748736,0.008379925237040248,0.006650124145180019,0.004954987192844351,0.0032968486963816167,0.0016780429721401971,0.00010090433646845311,-0.001432232894285236,-0.0029190344037724964,-0.004358256170129523,-0.005751499670075376,-0.00709956164421946,-0.008403188597712528,-0.009663127035705335,-0.010880123463348651,-0.012054924385793218,-0.01318827630818981,-0.014281226224144745,-0.015335465611312653,-0.016352345770106785,-0.01733320235315392,-0.018279371013080865,-0.01919218740251439,-0.020072987174081295,-0.02092310598040838,-0.021743903700519776,-0.02253665284553884,-0.02330248046287976,-0.024042510540997986,-0.02475786706834899,-0.0254496740333882,-0.026119055424571096,-0.026767135230353116,-0.027394997559435974,-0.0280034910613315,-0.02859338251737605,-0.029165438851536024,-0.029720426987777826,-0.030259113850067857,-0.030782266362372523,-0.03129065144865822,-0.031785000023179646,-0.032265839213863674,-0.03273362970013522,-0.033188832404030776,-0.03363190824758683,-0.03406331815283987,-0.03448352304182637,-0.03489298383658283,-0.03529211548747228,-0.03568118748292235,-0.03606048165367446,-0.03643028161988138 +0.,0.006350308664162335,0.01276134869962129,0.01818988080507047,0.022696629950966826,0.026368828915373885,0.0292892234348853,0.03153549247383056,0.03318531499653915,0.03431152593354322,0.03496791708397303,0.035205872179974076,0.035076876565493714,0.034632415584479284,0.03392311016140999,0.03298566580555508,0.03184855377258714,0.03054056716404762,0.02909049908147798,0.027527142626419674,0.025879290900414152,0.024172325590898956,0.022418605452447504,0.020629517030101303,0.018816550905120952,0.016991197658767058,0.015164947872300237,0.013349292126981098,0.011555721004070253,0.009792879037124549,0.008061433686326707,0.0063636876643497885,0.004702067729312034,0.003079000639331686,0.001496913152526968,-0.00004176797298387528,-0.0015346159790826058,-0.002980313579902596,-0.004380441317058315,-0.0057357627709509465,-0.007046990432290799,-0.008314836791788181,-0.009540014340153424,-0.01072323556809682,-0.011865212966328705,-0.012966968851737483,-0.01403019325420225,-0.01505622816758441,-0.016046399493002114,-0.01700203313157354,-0.01792445498441684,-0.018814990952650178,-0.019674966937391732,-0.020505737154887255,-0.02130857722135471,-0.022084612659185165,-0.022834965718871916,-0.02356075865090826,-0.024263113705787473,-0.02494315313400285,-0.025601999186047678,-0.026240736147609307,-0.026860215918104547,-0.027461205484704092,-0.02804447186414674,-0.028610782073171272,-0.029160903128516477,-0.02969560204692115,-0.03021564584512407,-0.03072176667961705,-0.03121449413506813,-0.031694288986646585,-0.03216161217613905,-0.03261692464533212,-0.033060687336012445,-0.03349336118996664,-0.033915407148981315,-0.03432724088645453,-0.0347291331572214,-0.035121365444697246,-0.035504220972636454 +0.,0.006522262141595169,0.013057523570974627,0.018597863994694333,0.023207000251797432,0.02697323732462714,0.029979669283843585,0.03230447991854103,0.034025853017813766,0.03521708299878274,0.035932309963583195,0.03622329789292358,0.03614191403397443,0.03574002563390628,0.03506863071215185,0.03416474914430496,0.033057146988793785,0.031774916315885936,0.030347149195849007,0.02880293769895062,0.027171373895458362,0.025478121703952177,0.02373574368029848,0.02195581308710147,0.02015000754611852,0.018330004679107025,0.016507482107824375,0.014694117454027962,0.012901588339475181,0.011138698583687143,0.0094061909823514,0.007706454313237207,0.006042002529799529,0.004415349585493337,0.002829009433773581,0.0012854960280952311,-0.00021267667808674753,-0.0016641221659123842,-0.0030704007011851985,-0.004432244091343617,-0.005750332253328225,-0.007025345104079608,-0.008257962560538375,-0.009448864539645097,-0.010598730958340385,-0.011708560379052345,-0.012780041001014297,-0.013814505551838994,-0.014813270245942374,-0.01577765129774041,-0.01670896492164904,-0.01760852733208423,-0.018477654743461944,-0.019317695584036173,-0.020129928062987294,-0.02091547589814923,-0.02167545933268044,-0.02241099860973937,-0.023123213972484468,-0.023813225664074185,-0.024482153927666957,-0.025131082888775785,-0.025760867294263204,-0.02637227407926722,-0.026966070099995174,-0.027543022212654384,-0.028103897273452184,-0.028649462138595913,-0.029180483664292885,-0.029697694963855256,-0.030201627809661635,-0.030692742912182127,-0.031171501075282118,-0.031638363102827,-0.03209378979868217,-0.032538241966713,-0.0329721804107849,-0.033396021365758824,-0.033810036721697176,-0.034214507520760426,-0.03460971649707892 +0.,0.006689154222568002,0.01334042480024338,0.018985129416858232,0.023689975820743704,0.027544287459405525,0.0306314472192021,0.03303009094968981,0.03481885450042505,0.036071442886194176,0.036842308399266215,0.03718355908444299,0.03714740780055184,0.0367860674064201,0.03615087729083976,0.03527914410354973,0.034199904756907686,0.032942525972090644,0.03153637447027562,0.030010816972639652,0.02839522020035975,0.02671550803021727,0.024984432836201415,0.023213741607723604,0.02141528596015879,0.01960091750888194,0.017782487869268025,0.01597184865669202,0.014180851486528898,0.012418448666854043,0.010685454231890672,0.008984337970116166,0.007317695882992656,0.005688123971982285,0.004098218238547175,0.0025505746841494657,0.0010477893102512938,-0.0004086860816227651,-0.0018203921173916109,-0.0031880301896331643,-0.004512249052316067,-0.00579369745940896,-0.007033024164880508,-0.00823087792269934,-0.009387907486834123,-0.010505088564824298,-0.011584107141736997,-0.012626286716062559,-0.013632933876855347,-0.014605355213169757,-0.015544857314060153,-0.016452746768580925,-0.01733033016578646,-0.018178950019365044,-0.01899988662422889,-0.019794261618119022,-0.020563192971397557,-0.021307798654426627,-0.022029196637568332,-0.022728504891184807,-0.023406841385638158,-0.02406528975318842,-0.02470470719026419,-0.025325860326603465,-0.025929515609070836,-0.026516439484530873,-0.02708739839984816,-0.027643158801887287,-0.02818448713751282,-0.02871211719549137,-0.02922658266980576,-0.029728344049055746,-0.030217861844807167,-0.030695596568625857,-0.031162008732077664,-0.031617558846728414,-0.03206270742414395,-0.03249787110112451,-0.03292332277243892,-0.033339342948217145,-0.03374621378297416 +0.,0.00685076429600335,0.01360997739968531,0.01935175225325993,0.024145768368355934,0.02808231180782913,0.031244997448916435,0.033712861586083796,0.035564940513797175,0.03687530204963613,0.03769867535951675,0.03808747706991394,0.0380942300620751,0.03777145721724767,0.03717080425826099,0.03632983753588964,0.03527784131274958,0.03404443309646144,0.03265923039464587,0.031151850714923497,0.029551911564914966,0.027885574913502315,0.026165769213534473,0.024404402433512423,0.02261348738019431,0.020805036860338324,0.018991063680702632,0.01718358064804541,0.015394600569124842,0.013633213648123806,0.011900301145078982,0.010198408903791156,0.00853020992701049,0.006898377217487157,0.005305583777971311,0.00375450261121312,0.0022478067199627506,0.0007870093198291672,-0.0006294111877143734,-0.0020021271088312012,-0.0033317574149076285,-0.004618921077329969,-0.005864237067484558,-0.007068324356757695,-0.008231801916535721,-0.009355623476083545,-0.010441472422357684,-0.011490663058545533,-0.01250449240607029,-0.013484257486355178,-0.014431255320823393,-0.015346782930898153,-0.01623213733800268,-0.017088655013324457,-0.017917617835397154,-0.018720145090311502,-0.019497352214054443,-0.02025035464261293,-0.020980267811973884,-0.021688207158124265,-0.02237528811705099,-0.023042593498829825,-0.023690982556333604,-0.02432122136502766,-0.024934075718103983,-0.025530311408754544,-0.02611069423017133,-0.02667598997554633,-0.027226964438071505,-0.027764351804719423,-0.028288687428041602,-0.028800431420854127,-0.02930004385131834,-0.029787984787595582,-0.030264714297847193,-0.030730692450234517,-0.031186379312918896,-0.031632191767226994,-0.03206840358289699,-0.0324952946503405,-0.032913146457629446 +0.,0.0070068743193661236,0.013866111855427014,0.019697814634279196,0.024574597818505628,0.028587652347973225,0.031820770864296855,0.034353339639870105,0.03626474494708658,0.03762937070334054,0.0385021884262442,0.03893588854731213,0.038983269087168634,0.03869712806643806,0.03812938320078529,0.03731783399193089,0.03629198899867281,0.03508169310846473,0.03371679120876035,0.03222712818701338,0.030642548930677495,0.028989432046248074,0.027280868574392263,0.025528914961153504,0.023745732650189002,0.021943483085155997,0.020134327709711717,0.018330427967513392,0.01654394530221826,0.014784097429328994,0.013051828907566142,0.011349756781082115,0.009680626109231172,0.008047181951367586,0.00645216936684561,0.004898333415019512,0.0033884191552435586,0.001923997431372601,0.000503565083241786,-0.0008735224072034323,-0.002207855578242752,-0.0035000249681558728,-0.004750621115222517,-0.005960234557722368,-0.007129455833935154,-0.008259217548264999,-0.009351200105430979,-0.010406708643707116,-0.011427030670867774,-0.012413453694687352,-0.013367265222940216,-0.014289752763400757,-0.015182203823843362,-0.016045948703665866,-0.016882270368257513,-0.017692285484459436,-0.018477106696158124,-0.019237846647240063,-0.01997561798159171,-0.020691533343099554,-0.021386705375650062,-0.022062215742414466,-0.022718925364519815,-0.023357599518774096,-0.02397900310483563,-0.024583901022362704,-0.025173058171013635,-0.025747239450446734,-0.026307209760320287,-0.026853703412745317,-0.02738725715901891,-0.027908330585500574,-0.02841738316909309,-0.028914874386699242,-0.02940126371522181,-0.02987701063156357,-0.03034257461262731,-0.03079837262902737,-0.03124467919664458,-0.0316817735014999,-0.03210993628147947 +0.,0.007157270018282383,0.014108765169526255,0.02002340656978474,0.02497669314415511,0.029060661295574386,0.03235922970752332,0.03495208531131844,0.0369189150382763,0.03833437329171596,0.03925364021079553,0.039729645964510604,0.03981542953943895,0.03956402992215834,0.039027603177511014,0.038244155934774836,0.03724339844864422,0.03605538004189646,0.03471015003730886,0.0332377577576587,0.03166825252572327,0.030028208547288014,0.02833086621220741,0.0265884181916746,0.024813162262466704,0.023017396201360875,0.021213417785134258,0.01941352479056399,0.017630014994427234,0.015872223448984105,0.014141154171381664,0.01243949065290036,0.010770045168199497,0.009135629991938395,0.007539057398776354,0.00598313966337269,0.004470689060386715,0.0030033303597530375,0.0015795782741045098,0.00019881480578412894,-0.0011395234684670836,-0.002435999971908107,-0.0036911781277979444,-0.004905621359395557,-0.006079893089959955,-0.007214905629029263,-0.008312336015024202,-0.009373480248149973,-0.010399616372636785,-0.011392022432714881,-0.012351976472614472,-0.013280756536565791,-0.014179640668799075,-0.015049952866293405,-0.015892976690095845,-0.016709825924136258,-0.017501610166299798,-0.018269439014471596,-0.019014422066536772,-0.01973766892038047,-0.02044028917388781,-0.021123363022880863,-0.021787752673656628,-0.022434222364459508,-0.023063535866006235,-0.023676456949013505,-0.024273749384198046,-0.024856176942276573,-0.025424503393965785,-0.025979462907690536,-0.026521593377024092,-0.02705135371441714,-0.027569202660885785,-0.028075598957446126,-0.02857100134511428,-0.02905586856490634,-0.029530659357838412,-0.029995790630806707,-0.030451537518040136,-0.030898178419439974,-0.031335993241970825 +0.,0.007301741873679717,0.014337881921914791,0.020328626898334735,0.025352293194819468,0.029501701835819974,0.032860848219254765,0.03550967176222607,0.037528111881836114,0.03899104893799845,0.039953838749087506,0.04046961786608661,0.04059163278087026,0.04037312998531293,0.039866470950396,0.03910984393913177,0.03813313875946065,0.03696658669109514,0.035640419013747746,0.03418486700713094,0.032630161950957255,0.031003053032455125,0.029316917008426143,0.027584070774811783,0.02581693639124445,0.02402793591735657,0.022229491412780553,0.02043402493714882,0.01865395855009379,0.016898734678738793,0.015169413046493017,0.013468738941556792,0.011799587117260995,0.01016483232693652,0.008567349323914245,0.007010012861525063,0.005495697693099858,0.00402607889702393,0.0025996885165951496,0.0012159338378678116,-0.00012572273364041381,-0.0014258187924118583,-0.0026848919329288757,-0.003903479749673781,-0.005082119837128933,-0.0062217050163497056,-0.007323908575729959,-0.008390017400596016,-0.00942130011773656,-0.010419025353940318,-0.011384461735995976,-0.01231887789069225,-0.013223542444817854,-0.014099772960986176,-0.014948853110505495,-0.01577189353464899,-0.016570000535193627,-0.017344280413916363,-0.01809583947259414,-0.018825784013003923,-0.01953522033692265,-0.020225226856717987,-0.020896666686051918,-0.021550302789166316,-0.022186897576864324,-0.022807213459949054,-0.023412012849223653,-0.02400205815549124,-0.024578111789554945,-0.02514090751163148,-0.02569098410459243,-0.026228799662724768,-0.026754812049727357,-0.027269479129299046,-0.027773258765138684,-0.028266608820945123,-0.028749987160417212,-0.029223810476024747,-0.029688354393713604,-0.030143896448393177,-0.0305907156382848 +0.,0.00744008641544861,0.014553415235473507,0.020613584143543483,0.025701647475643487,0.029911148817467956,0.03332611325385597,0.03602668565811061,0.03809301090353497,0.03960015185730632,0.04060360785850826,0.0411566891991981,0.04131281713131673,0.04112541290680499,0.04064701116301746,0.03991595683540252,0.038962297603783023,0.037816424696263326,0.03650872934094779,0.03506960276594077,0.033529436199346625,0.0319151336165403,0.030240195419292838,0.0285170509825514,0.026758234854690314,0.02497628158408392,0.023183725719106544,0.02139310180813251,0.019616944399536162,0.01786479354836785,0.016137761021364178,0.014438649357634836,0.01277039115840939,0.011135919024917408,0.009538165558388446,0.007980063360052074,0.006464545031137857,0.00499333242618989,0.0035649744004210464,0.0021789023183321904,0.0008346031602324447,-0.00046843609356906756,-0.0017307284627632463,-0.002952786967040952,-0.004135124626093088,-0.005278615555093826,-0.006384928909275975,-0.00745534247852204,-0.008491115514165416,-0.009493507267539534,-0.01046377698997779,-0.011403183932813605,-0.012312987347380401,-0.013194498228686128,-0.01404899987892744,-0.01487759954087064,-0.015681399973842117,-0.016461503937168255,-0.017219014190175425,-0.01795503349219002,-0.018670664602538405,-0.019366983838431346,-0.020044854848539795,-0.020705039092117034,-0.02134829739349612,-0.021975390577010083,-0.02258707946699198,-0.02318412488777485,-0.02376728766369173,-0.024337300886588933,-0.02489470397928658,-0.02543995407682597,-0.02597350802732603,-0.02649582267890567,-0.027007354879683823,-0.027508561477779404,-0.02799989932131133,-0.028481784739965193,-0.028954493726051383,-0.02941830287339653,-0.02987349019648959 +0.,0.0075721072660898696,0.014755327764556347,0.020878397391560527,0.02602501691896734,0.030289389432864904,0.03375552487605173,0.036503727689418466,0.03861430231385573,0.04016245174914117,0.041203787475098105,0.04179176160069803,0.04197993811049042,0.041821880989024715,0.04137026650584393,0.04066357184248063,0.0397319813339025,0.03860602462137544,0.03731623134616531,0.035893131149537995,0.03436725367275937,0.03276563791442608,0.031101895463235142,0.02938855668455867,0.02763825708002383,0.025863632151257816,0.02407731739988781,0.02229194832754099,0.02052016043584455,0.01877158187794517,0.017047372891265348,0.015350388825116926,0.01368361560483346,0.012050039155748518,0.010452645403195647,0.00889442027250841,0.007378349689020368,0.005906198837211863,0.004476532888763641,0.0030888061131490096,0.0017425288441315903,0.00043721141547499887,-0.0008276358390571681,-0.0020525025857012743,-0.00323787849069373,-0.004384619722001257,-0.005494390919426332,-0.006568460792994097,-0.007608079256344706,-0.008614496223118346,-0.00958896160695517,-0.010532725321495358,-0.011447037280379088,-0.012333201776507367,-0.013192501269840333,-0.014026039352660662,-0.014834914999235886,-0.015620227183833546,-0.016383074880721153,-0.01712455706416624,-0.017845772708436326,-0.0185477957283474,-0.019231489940839144,-0.019897615073626754,-0.02054693014240986,-0.021180194162888067,-0.021798166150760998,-0.022401605121728275,-0.022991270091489508,-0.023567893227772068,-0.024132014347755047,-0.02468408948929786,-0.02522457434981252,-0.02575392462671103,-0.02627259601740542,-0.026781044219307687,-0.02727972492982985,-0.027769053969919664,-0.028249307574295227,-0.028720761322308243,-0.029183692172470244 +0.,0.007697616232752562,0.014943592607446947,0.021123197103147008,0.026322674607594753,0.030636823856997505,0.034149596927197816,0.03694141307348577,0.03909269155115147,0.04067873418569681,0.041755233992169676,0.04237575369083665,0.04259396869106978,0.04246355440224111,0.04203729789936797,0.041353784720927225,0.040443315108045816,0.039336536056449704,0.03806409456186484,0.036656637620017175,0.03514481222663266,0.0335557730705843,0.03190323073694417,0.030199805352451488,0.028458222097450706,0.026691206152286328,0.024911482697302825,0.02313177691284468,0.021364813979256395,0.019620300837079902,0.017899442713076925,0.01620514343239264,0.01454043782869435,0.01290836073564937,0.011311946986924995,0.00975423141618854,0.008238248857107318,0.006765804464621244,0.005335479254639484,0.003946749260265534,0.0025991469803895717,0.0012922049139017676,0.00002545555969227231,-0.0012015685833487216,-0.0023893350163310696,-0.003538682694634942,-0.0046512713615003715,-0.005728360658746667,-0.0067711911957586335,-0.007781003581921114,-0.008759038426618918,-0.009706536339236876,-0.010624737929159828,-0.011514940651453623,-0.012378425657184871,-0.013216292640027829,-0.014029636550287092,-0.014819552338267256,-0.015587134954272901,-0.016333479348608623,-0.017059680471579002,-0.01776680953289325,-0.01845573015679867,-0.01912720011734056,-0.019781976403790105,-0.020420816005418476,-0.021044475911496857,-0.021653713111296425,-0.022249284594088353,-0.022831921352198466,-0.023402163355466632,-0.023960465409750917,-0.024507281929731038,-0.02504306733008668,-0.02556827602549756,-0.026083362430643374,-0.026588780960203822,-0.027084946782886563,-0.02757213625336518,-0.02805062386574426,-0.02852068545297249 +0.,0.007816434333345114,0.015118194241908247,0.021348125937815318,0.026594906498292457,0.030953865880678142,0.03450885757337579,0.037340372023995594,0.03952889968014775,0.04114980094711525,0.04225882053805939,0.04290960129891227,0.043155899477369294,0.043051471321125856,0.042649184593010495,0.04198770983829654,0.04109744292584222,0.04000912762660254,0.03875350771153245,0.03736132695158697,0.035863329117721054,0.03428676569145022,0.03264543433449495,0.030952033967605715,0.02921936843841018,0.02746024159453605,0.025687457283611013,0.02391381935326276,0.022152131651118987,0.020412170815409306,0.01869518367309946,0.017004118298089496,0.015342054125597087,0.01371207059083991,0.012117247129035624,0.010560663175401903,0.009045398165156425,0.007573293951572231,0.0061429469456296985,0.004753853835164502,0.003405568129205978,0.0020976433367834502,0.0008296329669262275,-0.00039890947133634194,-0.001588430468974957,-0.002739752479249239,-0.0038545299691182685,-0.004934013519832411,-0.005979434465541741,-0.00699202414039637,-0.007973013878546377,-0.008923635014141862,-0.009845118881332928,-0.010738755960434922,-0.011605825633649654,-0.012447423451747645,-0.013264640105839946,-0.014058566287037604,-0.014830292686451642,-0.015580909995193116,-0.01631150890437305,-0.017023157620466522,-0.0177167192200051,-0.0183929493056293,-0.019052602626728605,-0.019696433932692475,-0.020325197972910384,-0.020939649496771802,-0.021540543253666198,-0.022128608813749333,-0.02270438606183947,-0.023268328440059403,-0.02382088895139646,-0.024362520598837943,-0.024893676385371183,-0.025414809313983495,-0.0259263723876622,-0.02642877998187125,-0.02692230845040122,-0.027407231133872452,-0.02788382267265419 +0.,0.007928392821307806,0.015279129412375999,0.021553339535321046,0.0268420121089089,0.03124094350996949,0.034833849824677264,0.037701250197186294,0.03992366377165061,0.04157647034073077,0.04271543724044279,0.043394257677855215,0.04366673887505532,0.04358668805413039,0.043207024258351975,0.04256648023008225,0.04169552766056599,0.04062498699889535,0.03938567869416245,0.03800842319545945,0.03652404095187845,0.03495986177832659,0.033329758767407236,0.03164649893023388,0.029922954035359016,0.028171995851335167,0.026406496146714847,0.02463932669005058,0.022883359249894885,0.02114843129516194,0.019435827956934108,0.017748537438949694,0.016089679581061375,0.014462374223121832,0.012869741204983732,0.011314900366499757,0.009800971547522581,0.008329830115419483,0.006900087450018113,0.005511259817716142,0.004162920616337334,0.00285464324370545,0.0015860010976442287,0.00035656757597745097,-0.00083408392347115,-0.0019867600380709203,-0.0031031095804719177,-0.004184374074905031,-0.005231775604803108,-0.006246536253599029,-0.0072298781047256434,-0.008183023241615826,-0.009107193747702452,-0.010003672990490688,-0.010873738130244712,-0.0117184803343426,-0.012538985803113759,-0.013336340736887591,-0.01411163133599348,-0.014865943800760837,-0.015600364331519043,-0.01631595783802244,-0.017013586500153735,-0.017694003535788115,-0.018357961245293472,-0.01900621192903768,-0.01963950788738862,-0.020258601420714187,-0.02086424482938225,-0.02145716601916852,-0.02203790455632175,-0.022606912390547747,-0.023164640987208624,-0.02371154181166646,-0.024248066329283356,-0.024774666005421407,-0.02529179230544271,-0.02579985867310761,-0.02629914134220483,-0.026789912434081114,-0.02727244533198531 +0.,0.008033334151042374,0.015426407985440963,0.02173900726831632,0.02706430517916233,0.03149849954327777,0.03512513203494654,0.038024709120797855,0.040277737267460895,0.041959577508414156,0.04312599148147567,0.04383069371217836,0.04412751325658754,0.044070279170768514,0.04371193308226866,0.043091247662356905,0.042238751094653866,0.04118532089384653,0.03996183457462184,0.03859916965166678,0.03712820363966832,0.035576326668879305,0.033957475893931235,0.03228447597820294,0.03057025613170094,0.02882774556443175,0.027069873486401906,0.02530956910761793,0.02355976163808636,0.02183034073467646,0.02012262663031051,0.018439643648606268,0.016784547947809016,0.015160495686164035,0.013570643021916589,0.01201814611331196,0.010506161118595427,0.009036593823351919,0.007608070172795302,0.006220124968699487,0.004872350410255251,0.0035643386966533646,0.00229568202708458,0.0010659726007396886,-0.00012519738319056507,-0.0012786194081198096,-0.0023959362563305683,-0.0034783803944296926,-0.0045271646750678525,-0.0055435019508957555,-0.006528605074564074,-0.007483686898723506,-0.00840996027602475,-0.009308701322005505,-0.010181184529027668,-0.011028496444371146,-0.011851718549606552,-0.012651932326304504,-0.013430219256035587,-0.014187660820370427,-0.014925338500879615,-0.01564431362180526,-0.01634544712368702,-0.017029489630623677,-0.017697190789104617,-0.018349300245619225,-0.018986567646656877,-0.019609742638706972,-0.020219574868258886,-0.020816790339097974,-0.021401928069604212,-0.021975438391401786,-0.022537771109281995,-0.023089376028036124,-0.023630702952455472,-0.024162201687331325,-0.02468432203745498,-0.02519747637900931,-0.025701940708484698,-0.026197985864523418,-0.02668588391109368 +0.,0.008131112913583585,0.015560053779207567,0.021905312969585755,0.02726211430673517,0.03172699212432034,0.035383278377337395,0.03831142659846698,0.04059189032038975,0.042299974708847214,0.04349140812770584,0.04421989810055973,0.04453926710119169,0.044503337603384506,0.04416504583457757,0.04356318266900239,0.042728313929948664,0.041691355071855216,0.0404832215491608,0.03913482881630423,0.03767709232772428,0.036137444954745385,0.03452987682483353,0.03286726008276246,0.031162571169055225,0.029428786524234873,0.027678882588824456,0.025925835803347036,0.02418262260832566,0.022459176431552222,0.020756849500010336,0.01907869835691569,0.01742791150406445,0.01580767744325278,0.014221184676276826,0.012671621704932747,0.011162177031016707,0.009694783851228527,0.008268082295278265,0.006881624690344571,0.0055350209837223465,0.004227881122706494,0.0029598150545918933,0.0017304327266734654,0.0005393440862460835,-0.0006142278337473382,-0.0017319194114132253,-0.002814954050915216,-0.003864535389401395,-0.004881867064019885,-0.005868152711918773,-0.006824595970246171,-0.007752400476150194,-0.008652834952996204,-0.009527170786593622,-0.010376489671176959,-0.011201868145159572,-0.012004382746954815,-0.012785110014976014,-0.013545126487636535,-0.014285508703349705,-0.015007314116801162,-0.015711402092876383,-0.016398520457209367,-0.0170694160018047,-0.017724835518666947,-0.01836552579980069,-0.018992233637210518,-0.01960570582290099,-0.02020666622569421,-0.020795653091159633,-0.021373115010155125,-0.021939500006901116,-0.022495256105618006,-0.023040831330526226,-0.023576673705846176,-0.024103231255798278,-0.024620915155740237,-0.025130001049489512,-0.02563075843182001,-0.02612345798754238 +0.,0.008221596870450723,0.015680105328401302,0.022052455602354918,0.027435783535828075,0.03192689524535217,0.03560887927523142,0.03856209707679243,0.040866910101361856,0.04259853156677167,0.043812629726750464,0.04456287750493396,0.044903063100932825,0.0448869747143577,0.044567515900153336,0.043983474552550826,0.0431654357608694,0.04214433428201603,0.04095110487289767,0.039616682290421305,0.03817200129149389,0.03664452036032282,0.03504827179027463,0.03339616530537635,0.03170121463573242,0.029976433511447305,0.028234835662625444,0.0264894348193713,0.024753244711789326,0.023036234348526564,0.021339784937952966,0.019666981452915122,0.01802104087596372,0.016405180189649433,0.014822616376522928,0.013276566419134876,0.011770247300035959,0.010305616709061688,0.008881328601896773,0.007496951854566523,0.006152113143587214,0.004846439145475116,0.0035795565367464795,0.002351091993917595,0.0011606721935047064,7.534071566657127e-6,-0.0011099519745364356,-0.002193000277438747,-0.003242805269345864,-0.004260561382463413,-0.005247463048996983,-0.006204704701152189,-0.007133480771134649,-0.008035052448934077,-0.00891068758266202,-0.009761462784295205,-0.010588449428239052,-0.011392718888898971,-0.012175342540680353,-0.012937391757988623,-0.013679937915229168,-0.01440403431828145,-0.015110538426579884,-0.015800195066899572,-0.01647374798037102,-0.017131940908124713,-0.01777551759129115,-0.01840522177100083,-0.01902179718838424,-0.019625965348998103,-0.02021826350511881,-0.02079913838709487,-0.021369036121478464,-0.02192840283482176,-0.022477684653676954,-0.02301732770459621,-0.02354777811413171,-0.024069445726227837,-0.02458260571866613,-0.02508752618336818,-0.02558447636830112 +0.,0.008304667823867328,0.015786616660698322,0.02218064993601424,0.027585672942277123,0.0320986992497125,0.03580254182706333,0.03877743199825872,0.04110360108722782,0.042856135302566024,0.044090616684057775,0.04486065667953898,0.04521998224686552,0.045222320343893264,0.04492051529260016,0.04435333136757899,0.043551355031075295,0.04254552219533438,0.04136676877260156,0.04004603067512211,0.03861424381514134,0.03709887560968923,0.03551398999572957,0.033872524644456815,0.032187520905919545,0.030472020130166363,0.028739063667245858,0.027001692867206623,0.025272949080097258,0.02356282893343154,0.021872739699917876,0.02020579110293958,0.018565224855616055,0.0169542826710667,0.015376206262410893,0.013834237342768035,0.012331617625257522,0.010870326464280565,0.009449031305121473,0.008067316629680087,0.006724824859379676,0.005421198415643493,0.004156079719894779,0.0029291111935568098,0.0017399352580528184,0.0005878021636788956,-0.000528910548234101,-0.0016114081253904662,-0.0026608758008355373,-0.0036784988076146905,-0.004665462378773266,-0.005622951747356631,-0.00655215214641015,-0.007454317090397978,-0.008330710466279409,-0.0091824035782709,-0.010010462419435082,-0.01081595298283457,-0.011599941261531962,-0.012363493248589891,-0.013107674937070957,-0.013833535209637949,-0.01454192929711789,-0.015233598831296718,-0.015909284310223135,-0.016569726231945817,-0.017215665094513458,-0.017847841395974753,-0.018466995634378383,-0.019073846728413675,-0.019668930721135798,-0.02025269236554255,-0.020825575776279026,-0.021388025067990278,-0.0219404843553214,-0.02248339775291746,-0.023017209375423533,-0.023542327607472595,-0.024059027049544362,-0.02456757433381546,-0.025068237215820646 +0.,0.00838022255173301,0.015879657991001998,0.02229012715368458,0.027712159170650265,0.03224291129606777,0.035964890207615424,0.038958160146558174,0.04130278535416097,0.04307369097657423,0.04432634746287944,0.045114278630882104,0.04549112395290591,0.04551052290127451,0.04522523471597117,0.04467397995602066,0.043887329045328084,0.042896201395845095,0.0417315164195233,0.04042419352831433,0.03900515213416981,0.03750185235685051,0.03592837954152341,0.034297689945706505,0.03262284314220532,0.030916898703825448,0.029192916203372426,0.02746395521365182,0.025743075307469204,0.024040292998289695,0.022357038802208764,0.020696443625416713,0.01906177027452389,0.017456281556140627,0.015883240276877257,0.01434590924334412,0.012847551262151554,0.011390164613340381,0.009972429917333573,0.008593946352666092,0.00725437113608389,0.0059533614843329035,0.0046905746141590555,0.003465667742308307,0.0022782980855265726,0.0011277286438204873,0.000012344467952326264,-0.0010690505870983124,-0.0021176325561709994,-0.003134577474105344,-0.00412106137574092,-0.005078260295917327,-0.006007350269474166,-0.006909576992305013,-0.007786199974626129,-0.008638284991077213,-0.00946689243952862,-0.010273082717850695,-0.01105791622391377,-0.011822453355588203,-0.012567754510744326,-0.013294863879297755,-0.014004634147067513,-0.014697803558952035,-0.015375109181855405,-0.016037288082681688,-0.016685077328334963,-0.017319213985719308,-0.01794043512173879,-0.018549456849340017,-0.01914681379107377,-0.019732948608605805,-0.020308303293247783,-0.020873319836311337,-0.021428440229108135,-0.021974106462949818,-0.022510760529148043,-0.0230388092278432,-0.02355852647312522,-0.024070177382536518,-0.024574028165594685 +0.,0.008448173594723687,0.015959316463712056,0.0223811354903409,0.027815635974454002,0.03236005581123194,0.03609656603652708,0.039105027936071526,0.04146530279559743,0.04325212164358694,0.04452081868109657,0.04532480466245382,0.045717606053785755,0.045752749321219435,0.0454828834844589,0.04494666583456907,0.04417463382863047,0.04319767321522489,0.042046669742934105,0.04075250916033988,0.039346077216024,0.03785481095387214,0.03629280718073128,0.03467303165182429,0.033008553038929,0.03131244001382331,0.02959776124828507,0.027877585414092153,0.026164981183022438,0.024469977451461265,0.022794025254400833,0.02114027322487575,0.019512001739425664,0.017912491174590216,0.016345021906909032,0.014812874312921763,0.013319328769168044,0.011866399832209476,0.010452781004809376,0.009078085286749981,0.007741983775378709,0.00644414756804297,0.005184247762090157,0.0039619554548677004,0.0027769417437229884,0.0016284818381669377,0.0005149686605524311,-0.0005647848093139781,-0.0016119454041471552,-0.002627679956662005,-0.0036131552995733985,-0.00456953826559623,-0.005497995687445393,-0.006399765298557734,-0.007276101824848858,-0.00812806529322487,-0.008956710295966983,-0.009763091425356394,-0.010548263273674292,-0.011313280433201886,-0.012059197496220362,-0.012787053695285249,-0.013497698861673804,-0.014191867665696488,-0.014870293559156159,-0.015533709993855642,-0.016182850421597793,-0.016818448294185454,-0.017441237063421454,-0.01805192982238187,-0.018651059566556345,-0.019239066755114536,-0.01981639114737424,-0.02038347250265323,-0.020940750580269302,-0.02148866513954024,-0.022027655939783834,-0.02255812807420647,-0.023080354663672007,-0.023594599258114645,-0.02410112646936107 +0.,0.00850845011335272,0.016025696803212048,0.022453940793714516,0.02789651470521274,0.032450674906010564,0.036198228726558944,0.039218799697845697,0.04159201135085865,0.04339236852895164,0.04467504523983029,0.04549331446049383,0.045900564852256985,0.04595018507643445,0.045694689503870234,0.04517265314858269,0.04441456405586156,0.04345125764119105,0.04231356932005534,0.04103233450793859,0.039638388620325,0.03815913029938229,0.036608658157789,0.034999938632901025,0.03334604064587607,0.0316600331178719,0.02995498497004625,0.028243965123556845,0.02654004249956145,0.024853251104888924,0.023185059865693497,0.021538631797987234,0.019917261438527033,0.01832424332406977,0.016762871991372337,0.015236441977191617,0.013748247818284499,0.01230031778852914,0.010891358001631792,0.009520994437178142,0.00818891119336469,0.006894792368387934,0.005638322060444348,0.004419184367730447,0.0032370633884427026,0.0020912460256650987,0.0009801334319522895,-0.00009745226150127966,-0.0011426686764551344,-0.0021566734346688396,-0.003140624157901923,-0.00409567846791394,-0.005022993986464445,-0.005923800339859427,-0.006799347070345577,-0.007650688242572056,-0.008478872436254498,-0.009284948231108524,-0.010069964206849748,-0.010834968943193801,-0.011581011019856296,-0.012309124452370382,-0.0130201559149047,-0.013714836319643615,-0.01439389532339846,-0.015058062582980548,-0.015708067755201213,-0.016344640496871784,-0.016968510464803566,-0.017580387522838452,-0.018180802837648766,-0.018770194557517345,-0.019349000103825835,-0.019917656897955858,-0.02047660236128906,-0.021026273915207068,-0.02156710898109153,-0.022099510825531528,-0.022623751671647166,-0.023140093450618772,-0.023648799126727866 +0.,0.00856099860320326,0.016078921983791652,0.02250882710455469,0.02795522479939285,0.03251532878027437,0.036270555817484534,0.039300257950427374,0.041683787218506935,0.043495391188567815,0.04479006043504579,0.045620906161985296,0.04604115514802638,0.046104034171809034,0.04586189923514096,0.045353224608510136,0.044608432964407085,0.043658293209386995,0.04253357425000502,0.04126504499281636,0.03988347434437616,0.03841620767306908,0.03687733603379884,0.03527981800418788,0.033636714180002004,0.031961085157007095,0.030265991530968994,0.02856449389765353,0.026869652852826578,0.025191500472365513,0.023531521042826485,0.02189288873168438,0.02027890894031133,0.01869288707007948,0.017138128522360952,0.015617938698527891,0.014135622999952437,0.012693220948589384,0.011289451018815105,0.009923951362587983,0.008596418234251689,0.007306547888149891,0.006054036578626238,0.0048385805600244134,0.003659876086688058,0.0025172212635541273,0.0014090258281940398,0.000334121094193977,-0.00070864133547364,-0.0017204098578364282,-0.002702332869921968,-0.003655558768757866,-0.004581235951371731,-0.005480585791532271,-0.006354852256792349,-0.007205083238626346,-0.008032321100594476,-0.008837608206256948,-0.009621986919173953,-0.010386499602905708,-0.01113218862101241,-0.01186008251732422,-0.012571024513398596,-0.013265741583880213,-0.013944959414722805,-0.01460940369188009,-0.0152598001013058,-0.015896874328953665,-0.0165213520607774,-0.017133939726798497,-0.01773516646798607,-0.018325468016073033,-0.018905279351231343,-0.019475035453632925,-0.02003517130344974,-0.020586121880853712,-0.021128322166016796,-0.021662173482001865,-0.02218794705205072,-0.022705903139184396,-0.02321630301201172 +0.,0.00860578371510127,0.01611913381315106,0.02254609715161091,0.027992214209727556,0.03255459608541949,0.03631424327139571,0.039350203631378144,0.0417415250290887,0.04356216762881733,0.044866916029766626,0.045708696383849126,0.046140550228250726,0.046215519100157275,0.04598577761893816,0.04548968138698841,0.04475757222718619,0.043820136855493755,0.042708061987873344,0.04145203434028719,0.040082740628697544,0.03862745853018167,0.037100262471903636,0.0355140949040792,0.03388199979758811,0.03221702112331014,0.030532202852125042,0.02884058895491257,0.027155223402552497,0.025486129530034967,0.023834804550676318,0.022204430664431424,0.020598320955978636,0.019019788509996333,0.01747214641116287,0.015958707744156603,0.014482785593655916,0.013046428350409717,0.011648366620032762,0.01028824995348054,0.008965785951646094,0.00768068221542247,0.006432646345702689,0.005221385943379812,0.004046608609346854,0.0029076231828090225,0.001802848337141916,0.0007311246037885349,-0.00030868717079754124,-0.0013177261401627707,-0.0022971314578535736,-0.003248042277416399,-0.0041715977523976945,-0.005069010857943374,-0.005941519604367769,-0.006790165502630412,-0.007615984499899806,-0.00842001254334445,-0.009203285580132826,-0.009966839557433441,-0.010711710422414778,-0.011438920997530319,-0.012149310763371567,-0.012843602581715484,-0.013522517995773465,-0.014186778548756893,-0.014837105783877168,-0.015474221244345682,-0.01609884647337382,-0.016711684267362043,-0.017313261549620703,-0.017904011532350817,-0.018484366653859975,-0.01905475935245574,-0.0196156220664457,-0.020167387234137423,-0.020710487293838495,-0.021245321510961725,-0.02177216000917446,-0.022291261335587464,-0.022802885016646107 +0.,0.008642788998626895,0.01614649350419098,0.022566072836996368,0.028007949818228243,0.03256907426508936,0.03633000574622769,0.039369456308614395,0.041766137999220604,0.04359369440837109,0.04490668230860737,0.04575782023489552,0.04619994184127971,0.0462858807818041,0.04606760798484238,0.045583343001242456,0.04486333181165864,0.04393816375396855,0.04283842816604977,0.04159471438577991,0.040237611751036566,0.03879431628511136,0.03727887701230852,0.035704212262275825,0.034083341357078016,0.03242928361877982,0.030755058369445996,0.029073684931141264,0.02739818262593037,0.0257385594696691,0.024096323265979228,0.022474661240948938,0.0208768910956537,0.01930633053116899,0.01776629724857028,0.016260108948933042,0.014791083333332749,0.013361275371924384,0.011969427592679422,0.010615200206246653,0.009298311385602059,0.008018479303721626,0.006775422133581321,0.005568858048157143,0.0043985052204250485,0.0032636827803204033,0.002162818683563861,0.0010947627193943496,0.00005838500148878825,-0.0009474443564759427,-0.001923855240822925,-0.002871977537875267,-0.003792941133956078,-0.00468795045887563,-0.005558237191768808,-0.006404836259289727,-0.007228776995299337,-0.008031088733658591,-0.008812800808228414,-0.009574942552869769,-0.010318543301443583,-0.011044619910380284,-0.01175400783817132,-0.012447425662442974,-0.013125590615716819,-0.013789219930514414,-0.014439030839357332,-0.015075740574767144,-0.015700066369265405,-0.016312707190620137,-0.016914187557509733,-0.01750493806225489,-0.018085388503212247,-0.01865596867873843,-0.01921710838719009,-0.01976923742692384,-0.02031278559629633,-0.020848149991738143,-0.02137559954013314,-0.021895391026500653,-0.022407782190179838 +0.,0.008672017604120317,0.016161182220685518,0.022569095698399592,0.028002917829714178,0.032559379880720093,0.03631857685720779,0.03935885438223079,0.04175855807884259,0.04359098673618736,0.04491044813129262,0.04576943132931854,0.046220540174272295,0.046316378510161064,0.046108691968428,0.04563554720506172,0.04492707984986484,0.04401376716906484,0.0429260864288892,0.04169451489556541,0.040349529835320946,0.03891823211049246,0.03741463686326936,0.035851630584243364,0.034242200198431064,0.032599332630849076,0.030936014806514013,0.029265233650442472,0.027599976087651065,0.025950228468394676,0.02431750694733201,0.022705000883008673,0.02111602964042451,0.019553912584579378,0.018021969080473117,0.016523518493105578,0.015061880187476614,0.013639113513538317,0.01225397273298827,0.010906128010929673,0.009595307353089262,0.008321238765193764,0.007083650252969891,0.005882269822144389,0.004716825478443961,0.003586646222941018,0.002490169635786373,0.0014262548084358873,0.00039378115053977656,-0.0006083719282517805,-0.0015813250182885679,-0.0025261987099203975,-0.0034441135934970804,-0.004336265406203974,-0.005203879130772019,-0.006047982909279931,-0.006869599268652375,-0.007669750735814014,-0.00844945983768949,-0.009209749101203474,-0.00995164105328061,-0.010676146344725268,-0.01138409613807893,-0.012076204559537338,-0.012753184366876707,-0.013415748317873236,-0.01406460917030314,-0.014700479681942628,-0.0153240726105679,-0.015936082905091378,-0.01653703249761287,-0.017127349262809403,-0.017707460263516776,-0.018277792562570772,-0.018838773222807206,-0.019390829307061857,-0.019934387878170527,-0.020469843755042974,-0.02099746457309876,-0.021517505310571377,-0.022030221876212314 +0.,0.00869349296314942,0.016163401588576713,0.02255552733892707,0.027977624136883008,0.03252614891106507,0.0362807094139326,0.039319255263120446,0.041719736076263454,0.043555078548322385,0.04487932096594802,0.045744701780852125,0.046203573812286944,0.046308289879504715,0.04611034941119105,0.04564764986422872,0.04495020249249307,0.04404835829042557,0.042972468252467706,0.04175288337306098,0.04041995464664687,0.03900067472276987,0.037509016678958194,0.035957827722937805,0.03436005491011416,0.03272864529589274,0.031076545935679024,0.02941670388487849,0.027762066198896606,0.02612259144765706,0.02449980199470447,0.02289688655000229,0.02131716330440514,0.019763950448767647,0.018240566173944425,0.016750328670790102,0.015296556130159294,0.013881310171804887,0.012503356622465642,0.011162374930495118,0.009858102230151558,0.0085902756556932,0.007358632341378268,0.006162909421465015,0.0050028440302116554,0.00387777464406616,0.0027861488050413834,0.001726834955715929,0.0006987218354802154,-0.0002993018162753747,-0.001268347260160423,-0.0022095257567845373,-0.0031239485667573256,-0.004012802586822937,-0.004877305746900504,-0.005718479207714532,-0.006537338498878321,-0.007334899150005173,-0.008112176690708366,-0.00887018665060121,-0.009609944559296982,-0.010332454627425701,-0.011038543455066325,-0.011728920553663973,-0.01240429404603958,-0.013065372055014055,-0.013712862703408329,-0.014347474114043328,-0.014969914409739963,-0.01558087433511457,-0.016180873058804144,-0.016770335642614567,-0.017349686320755665,-0.017919349327437252,-0.018479748896869166,-0.01903130926326122,-0.01957445466082325,-0.02010957752519608,-0.020636944108215125,-0.0211568075380395,-0.02166942185072847 +0.,0.008707259366087508,0.016153374206226728,0.022525749856046686,0.027932594660460705,0.03247003701197959,0.03621717561240786,0.03925153550571188,0.04165064173585802,0.04348702253299119,0.044814426866476235,0.04568482213728811,0.046150289634546024,0.04626291064736918,0.04607391819321356,0.04562102476323735,0.04493410369314703,0.04404336599813411,0.04297902269339011,0.041771284794106536,0.040450363315474906,0.039043130097480366,0.037563508267729216,0.03602429858177721,0.03443840102832395,0.03281871559606904,0.031178142273712097,0.02952958104995273,0.027885931913490543,0.02625711977072425,0.024644671148902446,0.023051771440891933,0.021481734939624828,0.019937875938033256,0.01842350872904932,0.01694194760560515,0.015496506860632864,0.014089248363079714,0.012718949355676426,0.011385297932803885,0.010088039688099583,0.008826920215201003,0.007601685107745614,0.006412079959370917,0.005257850363714374,0.004138343900542692,0.0030520184063215997,0.001997751728142369,0.0009744419739244623,-0.000019012748412694774,-0.000983714330949641,-0.0019207646657669413,-0.0028312656449451597,-0.0037163951764921394,-0.004577363790014155,-0.005415185471562641,-0.006230868566283211,-0.0070254214193214756,-0.007799852375823026,-0.008555169780933479,-0.009292381979798425,-0.010012486513387372,-0.01071630516018799,-0.011404542657488921,-0.012077902336743059,-0.012737087529403274,-0.01338280156692246,-0.014015747780753504,-0.01463662950234928,-0.01524613309136309,-0.015844774781178156,-0.01643297672798643,-0.0170111602466776,-0.017579746652141327,-0.01813915725926731,-0.018689813382945206,-0.01923213633806471,-0.019766516074036337,-0.020293217369585753,-0.020812491460832278,-0.02132459047032909 +0.,0.008713382598485689,0.016131344083261924,0.022480166203509656,0.027868375654338116,0.03239171976084155,0.036128767220695295,0.03915659093802525,0.0415522638169573,0.043387890163467537,0.04471691046405633,0.04559100133481252,0.046061952735510704,0.046181554625925506,0.046000754098113966,0.045557063447072235,0.0448802050300232,0.04400023666710846,0.0429472161784696,0.041751201384248204,0.04044225010458586,0.03904710122771337,0.03757962034374978,0.03605255486095559,0.034478750779343224,0.032871054098925136,0.031242310819713764,0.02960536694172155,0.027973068464960935,0.02635530098031441,0.024753593230305147,0.023171124734535392,0.021611203278351247,0.020077136647098825,0.018572232626124202,0.017099799000773495,0.0156631435563928,0.01426432647963656,0.012902136299501394,0.011576269153077056,0.010286478459225352,0.009032517636808072,0.007814140104686992,0.006631099281723921,0.005483148586780629,0.004369644354453933,0.0032890550432398955,0.0022402679626109374,0.0012221906328256362,0.0002337305741425365,-0.0007262046931797857,-0.0016587076488827777,-0.002564870772707887,-0.003445862835198116,-0.004302886627127701,-0.005136948769984602,-0.005949050240474486,-0.006740192015303013,-0.007511375071175829,-0.0082636003847986,-0.008997868932876971,-0.00971517136229768,-0.010416324378245915,-0.011102027788315727,-0.011772979979927435,-0.012429879340501333,-0.013073424257457729,-0.013704313118216935,-0.014323244310199247,-0.014930899632186658,-0.01552779221563081,-0.01611434122080048,-0.01669096495492855,-0.017258081725247888,-0.017816109838991383,-0.01836546760339191,-0.01890657332568235,-0.01943981436885746,-0.019965453951622643,-0.02048374137733682,-0.020994926815442603 +0.,0.008711950411035122,0.016097577124505758,0.02241920059839967,0.027785534025922008,0.03229189290406208,0.0360162957634791,0.03903533679079522,0.041425610172632535,0.043258771730208416,0.04458793495703579,0.045464466649989245,0.04593984634314011,0.046065553570559734,0.04589223067579961,0.045457175061434735,0.044789945526607505,0.04392043397104563,0.04287853229447682,0.04169413239662879,0.04039712617722924,0.039014107884385946,0.0375588782810283,0.03604412480669527,0.03448263282533695,0.03288718770090348,0.03127057479734497,0.029645579478611555,0.028024987108653355,0.02641863854158528,0.024828062883255418,0.023256431335936568,0.021707042681594332,0.020183195702194304,0.018688189179702056,0.017225321896083196,0.015797892633303313,0.014407958053034813,0.013054317859816152,0.011736675663936762,0.010454792110235908,0.00920842784355285,0.00799734350872683,0.0068212997505971255,0.005680057214002972,0.004572980663300255,0.0034985495014082206,0.0024556605625197674,0.001443230828060821,0.00046017727945727534,-0.0004945831018649456,-0.00142213333447994,-0.002323556436961807,-0.00320001189327339,-0.004052694425892179,-0.00488260310524676,-0.005690731358777571,-0.006478072613925046,-0.007245620298129601,-0.007994367838831678,-0.008725308663471692,-0.009439426305495716,-0.010137532152526168,-0.01082032093074256,-0.011488485934564136,-0.012142720458410116,-0.012783717796699741,-0.013412171243852244,-0.014028774094286853,-0.014634203414749629,-0.015228969073218773,-0.01581348714610939,-0.0163881728469612,-0.0169534413893139,-0.01750970798670722,-0.01805738785268086,-0.01859689620077454,-0.019128617710151244,-0.019652813955232332,-0.02016973226705498,-0.020679620823467 +0.,0.008703073165316695,0.016052361463752093,0.022343298783127078,0.027684657559708538,0.0321712725279619,0.035880592639156265,0.03888870776193701,0.04127170776494945,0.04310077631193432,0.04442868204242338,0.04530646359579963,0.04578527168317617,0.04591625701566613,0.04574973905268157,0.04532278614061476,0.044664781420317844,0.0438054386346813,0.0427744715265956,0.0416015938389512,0.04031651931463856,0.03894568632611868,0.03750282381731922,0.0360005529106964,0.034451591960707406,0.03286865932180943,0.031264473348459665,0.029651752395115298,0.02804321481623353,0.026448651538048012,0.024869590273911914,0.023309191576495213,0.02177074284214456,0.020257531467206602,0.018772844848027964,0.017319970380955306,0.015902195462335275,0.014521571474601487,0.01317690920579611,0.01186791920358894,0.010594368774343443,0.009356025224423083,0.0081526558601913,0.006984027988011571,0.005849908914247337,0.004749671531204308,0.0036818065032884896,0.002645220256209864,0.0016388392863979572,0.0006615900902822659,-0.0002876008357076853,-0.0012098069951423927,-0.0021061018915923537,-0.0029776355729784097,-0.003825594373119647,-0.004650969628250681,-0.005454747038832931,-0.0062379123053278125,-0.007001451128196726,-0.007746349207901094,-0.008473592244902316,-0.009184156444903871,-0.00987884764116885,-0.010558355330511327,-0.011223367569026038,-0.011874572412807692,-0.012512657917951019,-0.01313831214055074,-0.013752223136701573,-0.014355063074668125,-0.01494733840255184,-0.015529462027310082,-0.016101845984999214,-0.01666490231167559,-0.017219043043395568,-0.01776468021621551,-0.018302225866191773,-0.018832061893988613,-0.019354448148131108,-0.01986962994885906,-0.020377853444985626 +0.,0.008686884289014686,0.01599600785342249,0.022252928342225472,0.027566355163627973,0.03203059524171143,0.035722509245329326,0.038717658088753625,0.04109160268625631,0.04291503174929431,0.044240351848978496,0.045118255819897174,0.045599547846858865,0.04573503211467209,0.04557468774572773,0.04515534039884144,0.044506185934689454,0.043656748190108494,0.04263655100193524,0.04147511820700638,0.04020197364215863,0.03884338901965388,0.03741301476920747,0.035923399620515294,0.034387188818692634,0.03281702760885479,0.031225561236117066,0.029625434945594737,0.028029293982403117,0.02644687438243758,0.024879700804363386,0.023330920930694766,0.02180380850348196,0.020301637264775206,0.018827680956624737,0.017385213321080808,0.015977508100193657,0.014606609730703051,0.01327134000946419,0.011971415919334732,0.01070661089852113,0.00947669838522982,0.008281451817667228,0.007120644634039808,0.005994050272553979,0.004901049474710215,0.003840144477741632,0.0028102513701262245,0.0018103062221508715,0.0008392451041024239,-0.00010399591373223919,-0.0010204807610662595,-0.001911273367612781,-0.0027775141954917773,-0.003620380878700345,-0.0044408568394654745,-0.005239919876622784,-0.0060185477890078885,-0.006777718375456387,-0.0075184094348039006,-0.008241598765886027,-0.008948255037418158,-0.009639178299011349,-0.0103150526738736,-0.010976560838034879,-0.011624385467525134,-0.01225920923837432,-0.0128817148266124,-0.01349258490826932,-0.014092486591391139,-0.014681922752980166,-0.015261303047194731,-0.0158310362509968,-0.01639153114134833,-0.016943196495211286,-0.017486441089547625,-0.018021673701319317,-0.01854927336077637,-0.019069498111644212,-0.01958259122587488,-0.020088796786746025 +0.,0.008663540802116341,0.015928849990561513,0.022148578963521452,0.0274312570813529,0.03187061833254622,0.03554291708037034,0.038523161602985724,0.04088636017855283,0.04270268463527072,0.0440241628937807,0.044901125025883566,0.045384011679270475,0.0455232635016326,0.045368502506252045,0.044956298558997555,0.04431564909385003,0.0434758767776515,0.04246630427724398,0.041316254259469484,0.04005504939117002,0.03870878439091212,0.03729102477783337,0.03581424108400356,0.0342909996169241,0.032733866684096426,0.031155408593021974,0.02956819165120218,0.027984782166138485,0.02641485655049697,0.024859934844135633,0.02332314974943019,0.02180775919768098,0.02031702112018836,0.018854193448252666,0.01742253411317425,0.01602530104625347,0.01466453015994319,0.013339054204645458,0.01204859612950455,0.010792935009239741,0.009571849918570302,0.008385119932215485,0.007232524124894577,0.0061138415713268245,0.0050284606064304,0.003974895346370328,0.002952071618046294,0.001958935129402,0.0009944315883811203,0.000057506702927356406,-0.0008528938190156101,-0.0017378242695041003,-0.002598415374811899,-0.003435835767043585,-0.00425106077790342,-0.005045060133002446,-0.005818803557951703,-0.006573260778362211,-0.007309401519845017,-0.008028195508011142,-0.008730603670352078,-0.00941742005095383,-0.010089323258853821,-0.010746990451827797,-0.011391098787651485,-0.012022325424100618,-0.012641347518950942,-0.013248842229978179,-0.0138454714479091,-0.014431734332491845,-0.015008037203975417,-0.015574785500766344,-0.016132384661271135,-0.016681240123896324,-0.017221757327048424,-0.017754341709133956,-0.018279369340741352,-0.01879709638447157,-0.019307764027472786,-0.019811614251918172 +0.,0.00863322370038704,0.015851244840780827,0.02203076269168276,0.027280015079617408,0.031692119894952166,0.03534270781887612,0.038306211753970705,0.04065706460281739,0.04246490023273114,0.04378135195964655,0.04465637081898181,0.0451400175980626,0.04528235308421461,0.04513262608565401,0.044727138094432975,0.04409467744490005,0.043264354853052994,0.042265281034889544,0.04112656670640744,0.03987732258360444,0.03854345650697191,0.03713844298655615,0.03567466882183235,0.03416461582507141,0.032620765808544235,0.03105560058452171,0.029481601965274752,0.027911251763074258,0.026354162262756727,0.02481184741797324,0.02328742295117083,0.02178412893831999,0.020305205455391225,0.018853892578355028,0.0174334303831819,0.016047058945842345,0.014696804163187594,0.013381509702605182,0.012100904043769204,0.010854771436802762,0.009642896131828949,0.008465062378970845,0.0073210544283515595,0.006210656530094166,0.0051332643795369785,0.004087404272436264,0.0030720118541525537,0.0020860425390195917,0.0011284517413710931,0.0001981948755408018,-0.0007057726441375593,-0.0015844954033302698,-0.0024390942421705303,-0.0032707284979294235,-0.004080365238532739,-0.0048689659477749395,-0.0056374921094504826,-0.0063869052073538095,-0.007118166725279386,-0.007832238147021652,-0.00853007245960532,-0.009212457487138557,-0.009880066187542913,-0.010533570065642605,-0.011173640626261822,-0.011800949374224774,-0.012416167814355672,-0.013019967451478704,-0.013613004806507458,-0.014195775180849099,-0.01476868148190478,-0.015332125732153368,-0.015886509954073712,-0.01643223617014468,-0.016969706402845123,-0.017499322674653907,-0.01802145800989859,-0.018536366616321624,-0.019044287560644354,-0.019545460689214224 +0.,0.008596138344988609,0.015763572920268,0.02190001414450795,0.027113302607634327,0.031495898933685915,0.035122793361109884,0.038067821609021536,0.040404819396536215,0.0422028623944561,0.0435131739792652,0.044385310556727196,0.0448689374141173,0.04501371983871071,0.044868518009801926,0.04446935298559628,0.04384479379897655,0.043023728914999976,0.04203504679872378,0.04090763591520519,0.03967038472950144,0.038349004767138185,0.03695687372803521,0.03550628941259594,0.03400964384945264,0.03247932906723758,0.03092773709458302,0.029367259960121246,0.02781028969248454,0.026266370171633684,0.02473700789474498,0.023225299714029816,0.021734465916671254,0.02026772678985238,0.018828302620756264,0.01741941369656598,0.016044280304464604,0.014704916921239016,0.013400178113601904,0.012129797488910337,0.010893564045550964,0.009691266781910427,0.00852269469637536,0.007387636787332418,0.00628588205316823,0.005216833339258078,0.004179029416208855,0.0031714158322330476,0.0021929577816836537,0.0012426204589136376,0.0003193690582759961,-0.0005778312258762973,-0.0014500151991902704,-0.0022982936651371045,-0.00312381638230943,-0.003927541984823916,-0.004710423549039492,-0.005473414151315087,-0.006217466868009609,-0.006943534775481994,-0.007652570950091152,-0.008345520244109924,-0.009023164054590424,-0.00968616955496186,-0.01033520246584132,-0.01097092850784588,-0.011594013401592626,-0.012205122867698643,-0.012804922626781004,-0.013394063682002431,-0.0139730373403597,-0.01454224301958883,-0.015102079250907133,-0.0156529445655319,-0.016195237494680444,-0.01672935656957006,-0.01725570032141805,-0.017774638644292542,-0.0182864237198884,-0.018791292459716315,-0.019289482540370377 +0.,0.008552514814726929,0.015666238562788874,0.021756890717543047,0.026931814944345676,0.031282775459554275,0.03488410588297246,0.037809023858711356,0.04013074703088214,0.04191777347261134,0.04322090190940786,0.04408927919653874,0.044572160156852184,0.044718799613196386,0.04457765435702131,0.0441844534760241,0.043567536969851155,0.042755561230571254,0.04177718265025316,0.04066105762096565,0.03943584253477749,0.03812704360906891,0.036747936226459214,0.03531072418997832,0.03382770472519236,0.03231117505766747,0.03077343241296979,0.02922677401666545,0.0276834970943206,0.026153073068373774,0.024636999700367222,0.023138353191630552,0.02166033221859818,0.020206135457704527,0.018778961585384012,0.01738200927807106,0.016018477212200087,0.014690367125733734,0.01339654448298509,0.012136747649045043,0.01091076997747807,0.009718404821848638,0.008559445535721202,0.00743368547266025,0.006340917986230226,0.005280552883766543,0.004251141699821056,0.003251639974210564,0.002281022759867313,0.0013382651097235197,0.0004223420767114316,-0.0004677712862367271,-0.0013330999261887324,-0.0021747444592634486,-0.0029938447907711364,-0.0037913509541581967,-0.00456820745564024,-0.005325358801432877,-0.006063749497751697,-0.006784324050812316,-0.0074880269668303225,-0.008175794774003806,-0.008848402242690743,-0.009506510631942168,-0.010150779750271524,-0.010781869406192229,-0.011400439408217718,-0.012007149564861428,-0.012602659684636777,-0.013187615109576301,-0.013762503021305597,-0.014327719273116291,-0.014883658831579709,-0.015430716663267167,-0.01596928773475,-0.016499767012599517,-0.017022549463387052,-0.017538001769552405,-0.018046374018224863,-0.018547900931620707,-0.01904281798329941 +0.,0.00850260821640888,0.015559670160581912,0.021601972764280608,0.026736269321594286,0.031053590562730538,0.034627597865076444,0.03753087080231738,0.03983598894813874,0.04161085420813109,0.04290582658578659,0.04376962912528388,0.04425109188294908,0.04439904491510832,0.04426152752068718,0.04387396581309255,0.04326446149764343,0.04246142954634598,0.041493284931206346,0.040388442624230685,0.039175317597425145,0.03787920220435202,0.03651326428969047,0.03508960892992041,0.033620433798279635,0.03211793656800591,0.030594314912337026,0.02906176650451077,0.02753248901776493,0.02601587758287748,0.02451342002435463,0.02302817022285554,0.02156330353526577,0.0201219953184709,0.01870742092935648,0.01732275572480808,0.01597117506171127,0.01465466670418252,0.013372107021782048,0.012123238800526638,0.010907859390600995,0.009725766142189828,0.008576756405477826,0.007460627530649714,0.00637717686789017,0.005325821020255684,0.004305124567484874,0.0033140531341707274,0.0023515917154332236,0.0014167253063923101,0.0005084389021679654,-0.00037428250211985555,-0.0012324539113511969,-0.00206716560353245,-0.0028795473656769424,-0.0036705404667896316,-0.004441080683052589,-0.005192103790647882,-0.00592454556575756,-0.0066393417845637,-0.007337428223248349,-0.00801973290162373,-0.008687023771347877,-0.00933995605058911,-0.009979183511100817,-0.010605359924636359,-0.011219139062949116,-0.011821174697792465,-0.012412120600919768,-0.01299261631447482,-0.01356314476911301,-0.014124098180820067,-0.014675867879954157,-0.015218845196873425,-0.01575342146193604,-0.016279988005500155,-0.01679893615792393,-0.01731062931139581,-0.017815315392954664,-0.018313226901902815,-0.018804597075877182 +0.,0.00844669905197053,0.015444320319483944,0.021435863695477807,0.026527404988900827,0.030809206422427585,0.034354242043171425,0.03723443425342556,0.03952170545548322,0.041283343630381095,0.04256925659707596,0.04342772999069837,0.04390715546452395,0.04405592467182832,0.04392164596970189,0.04353943200989037,0.042937137406629286,0.04214292683700003,0.04118496497808397,0.040091416506962504,0.038890446100717,0.0376071241326334,0.036254505980928824,0.03484459353117093,0.033389480420554916,0.03190126028627601,0.030392026765529424,0.02887387349551037,0.027358894113414068,0.025856403846336216,0.02436787947138149,0.022896350986821433,0.021444968831036415,0.020016883442406815,0.018615245259312994,0.01724320472013533,0.0159039122632542,0.014599340528499611,0.013328376814313585,0.012090768023208479,0.01088631517693886,0.009714819297259305,0.008576081405924365,0.007469902524688626,0.0063960836753066365,0.0053540481122415625,0.00434237373554703,0.00336003635205523,0.0024060309875354137,0.0014793526677567988,0.0005789964184886353,-0.00029604273449985205,-0.0011467697654394362,-0.0019742644633319066,-0.0027796462410287727,-0.003563847442412312,-0.004327794956574033,-0.005072415672605442,-0.005798636479598027,-0.006507384266643303,-0.007199585922832755,-0.007876160780159312,-0.008537869786905055,-0.009185361997228931,-0.009819285024714327,-0.010440286482944603,-0.011049013985503144,-0.01164611514597333,-0.012232237577938522,-0.012808014889342748,-0.013373925639273154,-0.013930358335475915,-0.014477700602368044,-0.01501634006436654,-0.015546664345888419,-0.016069061071350678,-0.01658391786517033,-0.01709159475319318,-0.017592337439426158,-0.018086376167222508,-0.018573941905938564 +0.,0.008385093465644143,0.015320666042506904,0.021259190106881078,0.026305983288479533,0.030550506335782664,0.03406503139615972,0.03692080548777832,0.03918907562880612,0.040936498917027914,0.0422125181131781,0.04306496850650861,0.04354179037524228,0.04369092399760197,0.043559533992838725,0.043182409569952236,0.042587149914812966,0.041801661003805425,0.0408538488133141,0.0397716193197235,0.038582878499418105,0.03731246706768939,0.035973323302175314,0.034577341694799583,0.033136507625189396,0.03166280647297196,0.030168223617774496,0.028664744439224198,0.027164354316948282,0.025676285185602523,0.024202001762269176,0.022744508707314506,0.021306930049341605,0.01989238981695357,0.018504012038753473,0.01714492074334441,0.015818239959329468,0.014525926136502847,0.013266877545195017,0.012040844931828092,0.010847632697511558,0.009687045243354906,0.008558886970467599,0.007462962279959143,0.006399075572939,0.005366656632829448,0.0043642969499222565,0.0033909826149594513,0.0024457187775541356,0.0015275105873193803,0.0006353631938682887,-0.00023171825318606137,-0.0010747286042305897,-0.001894737008213235,-0.00269285225685318,-0.003469997611296527,-0.004227090919306816,-0.004965050028647582,-0.0056847927870823465,-0.006387237042374653,-0.007073300642288019,-0.007743894056347179,-0.008399771051925072,-0.009041574399390491,-0.009669945435966517,-0.010285525498876201,-0.010888955925342622,-0.01148087805258885,-0.012061933217837944,-0.012632748964852086,-0.01319379936533955,-0.013745469149777543,-0.014288142168735645,-0.014822202272783417,-0.01534803331249044,-0.015866019138426276,-0.016376543601160504,-0.016879963286501827,-0.017376521614835088,-0.01786644654114612,-0.018349966734722955 +0.,0.00831812347649674,0.01518920887379054,0.02107260187022179,0.026072787699587128,0.030278394716572336,0.03376097910242379,0.03659109516060407,0.03883929719457574,0.04057159524395842,0.04183695470642318,0.04268274824803538,0.043156452463351515,0.04330554394692803,0.043176731435307064,0.04280447120957625,0.04221609914433235,0.0414392545752335,0.04050157683793784,0.03943070526810352,0.038254279201388686,0.03699690245554129,0.03567139187025651,0.03428953059934785,0.03286319180198895,0.031404248637353506,0.029924574264615186,0.028436041842947672,0.02695052453152464,0.025477167808215934,0.024017423422167956,0.022574269344562855,0.02115080180837839,0.019750117046592325,0.01837531129218239,0.017029480778126365,0.015715721737401996,0.014435973448812976,0.013189145220477955,0.01197499140143223,0.010793319512060997,0.009643937072749463,0.008526651603882801,0.007441270625846231,0.0063876016590249255,0.0053650809151608,0.004372313740451868,0.0034082966153426548,0.002472044911094072,0.0015625739989669997,0.0006788992502223496,-0.00017996396387899154,-0.001015000272076135,-0.0018272680320282932,-0.0026178651759772814,-0.0033877057277634983,-0.0041376983423899736,-0.004868751674859736,-0.005581774380175797,-0.006277675113341192,-0.006957362529358933,-0.007621738065323398,-0.008271548137112629,-0.00890742911483729,-0.009530015944361746,-0.010139943571550328,-0.010737846942267398,-0.011324361002377308,-0.011900120697744399,-0.01246574738217292,-0.013021710528747689,-0.013568391023528024,-0.014106168877134048,-0.01463542410018586,-0.015156536703303589,-0.01566988669710733,-0.016175854092217206,-0.016674791962967988,-0.01716694138764426,-0.01765252800109781,-0.01813177814137431 +0.,0.008246147175176573,0.015050475016608856,0.02087677220057015,0.02582862386042373,0.029993797073679315,0.033443118478385034,0.036246433208047935,0.03847358639617506,0.04018992561925308,0.04144392715793122,0.04228248943490788,0.04275261371384622,0.04290130125840941,0.04277479342586028,0.042407204570748765,0.0418255998228383,0.041057344399173475,0.04012980351679885,0.039070342392759035,0.03790632624409859,0.03666211518861076,0.03535040058927511,0.033982850572285124,0.03257122236863847,0.031127273209332885,0.029662760325366064,0.028189440947735726,0.026719072307439577,0.02526071048633805,0.02381579346843695,0.02238727128682736,0.020978211096305086,0.019591680051665943,0.018230745307705725,0.016898474019220257,0.015597933341005345,0.014331044484682745,0.013096727888005045,0.011894741292082768,0.01072489510797497,0.009586999746740703,0.008480865619439,0.007406303137128931,0.0063631227108695235,0.005350766901805607,0.004367855174287711,0.0034133945082989777,0.00248641059904902,0.0015859291417474194,0.0007109758316037898,-0.00013942363617228015,-0.0009662435663712,-0.001770531373918655,-0.002553373901614185,-0.0033156757844375728,-0.004058336335982371,-0.004782254869842137,-0.005488330699610405,-0.006177463138880736,-0.006850551501246666,-0.007508488026003019,-0.008152011613714557,-0.008781752121039292,-0.009398337990641678,-0.010002397665186133,-0.010594559587337108,-0.01117545219975904,-0.01174570394511636,-0.01230592986556897,-0.012856594728699811,-0.013398075510851714,-0.013930748318372876,-0.014454989257611467,-0.014971174434915684,-0.015479679956633699,-0.0159808819291137,-0.01647512984590941,-0.016962662386626828,-0.01744370283488935,-0.017918475166967648 +0.,0.00816954888486748,0.014905015425430287,0.020672397700047255,0.025574319567585737,0.029697659969311237,0.03311250289929376,0.035887968732703675,0.03809317784471137,0.03979280070121311,0.04103481324913537,0.04186562870088781,0.04233176199976611,0.042479728089065964,0.04235529009444627,0.04199221192467644,0.041417280975848125,0.04065758132676647,0.03974019705623668,0.038692212243063934,0.037540710966053416,0.03630980327591602,0.0350120513194867,0.03365900475777045,0.032262301437886554,0.030833579206954335,0.029384475912093103,0.02792662940042218,0.02647167751906088,0.02502858423959676,0.02359877309822214,0.02218516504181137,0.020790796965936317,0.019418705766168855,0.018071928338080825,0.016753501577244107,0.01546646237923056,0.014212713076755744,0.012991185356977655,0.011801640172843464,0.010643890628413614,0.009517749827748537,0.008423030874908645,0.007359546873954389,0.006327110928946179,0.005325171893100075,0.004352363608301507,0.003407703667889223,0.0024902281977330566,0.0015989733237028068,0.0007329751716683066,-0.00010873013250063684,-0.0009271064629342147,-0.0017231901401573385,-0.002498056695757906,-0.0032526012272758864,-0.003987713560996726,-0.004704283523205874,-0.005403200940188757,-0.00608535563823083,-0.006751637443617522,-0.007402929236986739,-0.008039962246397886,-0.008663359705084528,-0.009273743443779544,-0.009871735293215788,-0.010457957084126127,-0.011033030647243433,-0.011597577813300562,-0.012152207195117897,-0.012697378752114801,-0.013233465487426689,-0.013760839540547595,-0.014279873050971531,-0.014790938158192535,-0.015294407001704624,-0.01579065172100182,-0.016280018161578497,-0.016762742549533194,-0.017239045786828323,-0.017709149458062037 +0.,0.00808873928645114,0.01475340587203163,0.020460198377895415,0.025310724753070737,0.029390950956971418,0.03277020570225059,0.03551686987324673,0.03769932435429832,0.03938154860044145,0.040611007538466484,0.04143361885080382,0.04189540082262687,0.04204237173910907,0.04191980628040068,0.04156110986592578,0.0409927856100731,0.040241629887855006,0.03933443907428392,0.03829800954437224,0.037159137673132385,0.03594167750930896,0.0346580585426028,0.03331970878071972,0.03193814348067052,0.030524877899466042,0.029091427294117095,0.027649306921634517,0.026210032039029132,0.024782472016839707,0.02336803537573249,0.021969612927889164,0.0205902102289374,0.01923283283450489,0.017900486300219284,0.016596176181708274,0.015322908034599533,0.014082564584755045,0.012874088916736536,0.011697245045047568,0.010551848599638524,0.009437715210459783,0.0083546605074617,0.007302500120594673,0.0062810496798090575,0.005289764294429672,0.004327292440519567,0.00339266244253344,0.002484920968078178,0.0016031146847606346,0.0007462902601876972,-0.00008650563803377226,-0.0008962263422969109,-0.0016838969268429967,-0.0024505813983877805,-0.0031971651713755037,-0.003924528441584631,-0.00463355140479363,-0.005325114256780947,-0.006000097193325055,-0.006659380410204403,-0.007303837272994715,-0.00793419118660581,-0.008551058654030518,-0.009155054788381292,-0.00974679470277056,-0.010326893510310763,-0.010895966324114346,-0.011454628257293743,-0.012003481379556303,-0.012542980743643226,-0.013073495317737731,-0.013595393213576463,-0.014109042542896036,-0.014614811417433099,-0.015113067948924268,-0.01560418024910618,-0.016088490450106015,-0.016566232271381556,-0.01703762420340476,-0.017502885409780088 +0.,0.008004155507877222,0.014596246985665693,0.020240917646906612,0.025038711438834398,0.029074658499181505,0.03241732007145912,0.035134323658168466,0.03729329676181074,0.038957514665977556,0.04017392112319909,0.04098792860459603,0.04144504904198379,0.0415907943671778,0.04146994123118295,0.04111552899716986,0.04055377038772011,0.039811167958047865,0.038914224263367304,0.03788944185889261,0.036763323299837974,0.03555946112575309,0.034290149023520115,0.03296669040717837,0.03160047498518185,0.030202892465984593,0.028785332558040617,0.027359184969803944,0.0259358394097286,0.02452406837679689,0.023125264919214208,0.02174228876515314,0.020378113149517995,0.019035711307213092,0.01771805647314273,0.016428121882211218,0.01516888076932287,0.013942195608101593,0.012747021054755432,0.011583124064846264,0.010450322657544437,0.009348434852020292,0.008277278667444151,0.007236672122986373,0.006226433237817277,0.005246023362457525,0.004294105860582906,0.0033697199094643083,0.002471922833898422,0.0015997719586819034,0.0007523246086114404,-0.00007136189151630224,-0.0008722302169046594,-0.001651294043446557,-0.002409605647482371,-0.0031480406175580194,-0.0038674693783725686,-0.004568762354625084,-0.005252789971014613,-0.005920422652240226,-0.006572530823000973,-0.0072099781818275,-0.007833480166391507,-0.008443646445695476,-0.009041085312493565,-0.009626405059539902,-0.010200213979588634,-0.01076312036539391,-0.011315732509709863,-0.011858645829249479,-0.012392310375747465,-0.012917091022349822,-0.013433351793721049,-0.01394145671452563,-0.014441769809428066,-0.014934655103092841,-0.015420476620184451,-0.015899572716125337,-0.016372174552372044,-0.016838498178556188,-0.01729876030841394 +0.,0.007916261177741427,0.014434164267282699,0.0200153222962092,0.02475917366889933,0.0287497918649561,0.032054958905709904,0.034741535843611566,0.03687638373111189,0.0385220612554861,0.03972498138645872,0.040530042328470794,0.040982240594128236,0.041126572696037,0.041007308291654866,0.04065711360454245,0.040101905291767374,0.039367886417400466,0.038481260045512576,0.03746822924017453,0.03635499706545719,0.03516488946564231,0.033910061468476006,0.032601689200999574,0.03125103411187191,0.02986935764975195,0.028467921263298623,0.027057986401170856,0.02565081451202758,0.02425507916765245,0.022872157587623038,0.02150487756627976,0.020156179137625102,0.01882900233566155,0.01752628719439158,0.01625097374781767,0.015006002029942303,0.013793213697462382,0.012611575173847634,0.011460856265037981,0.01034087727339356,0.009251458501274503,0.008192420251040925,0.007163582825052975,0.006164766525670763,0.005195438950298164,0.0042542785992327236,0.0033403356282413156,0.002452678139220469,0.0015903742340666808,0.0007524920146764778,-0.00006190041705363551,-0.0008537349592271541,-0.0016240137372103235,-0.002373777099842863,-0.0031038906697316362,-0.0038152149624489204,-0.004508610493566988,-0.005184937778658096,-0.005845057333294522,-0.006489829673048524,-0.007120108681854109,-0.00773660169272983,-0.008339911439889314,-0.008930639295818561,-0.00950938663300355,-0.010076754823930277,-0.010633345241084736,-0.011179759256952906,-0.01171658552928589,-0.012244269018846999,-0.012763170445202168,-0.013273649688090396,-0.013776066627250672,-0.014270781142421996,-0.014758153113343359,-0.015238542419753759,-0.015712283579077252,-0.016179605145424523,-0.01664072069851113,-0.017095844473557716 +0.,0.007825546443071435,0.014267808077805417,0.019784202440412808,0.024473027419016007,0.02841738100702931,0.03168425466809575,0.03433973073530666,0.036449891541753404,0.03807656748949947,0.03926563172939063,0.04006145975316564,0.040508524199916955,0.04065129770873701,0.040533534583901985,0.040187521323599505,0.03963887328221424,0.03891348880071066,0.03803726622005324,0.03703610388120642,0.03593590012513467,0.034759709627160636,0.03351954617963026,0.03222645617682798,0.030891570344398104,0.02952601940798489,0.028140934093232643,0.026747445125785637,0.025356683231288174,0.02397722120552569,0.02261042016699473,0.021259075227214267,0.019926092441635394,0.018614377865709207,0.01732683755488679,0.01606637756461923,0.014835903950357611,0.01363723706522841,0.012469355308585525,0.011332031276178553,0.010225087478752504,0.009148346427052397,0.008101630631823224,0.007084762603810019,0.006097564853757769,0.005139511251636636,0.004209295676821286,0.0033059793933257306,0.0024286414046807165,0.0015763607144169583,0.0007482163260651987,-0.000056712756843840646,-0.0008393475307794373,-0.0016006784183995322,-0.002341733652725958,-0.00306336875303071,-0.0037664341901019714,-0.004451780434727917,-0.00512025795769671,-0.005772717229796533,-0.006410008721815545,-0.007032976360027229,-0.0076423192423068845,-0.008238633070084447,-0.008822512198335762,-0.009394550982036655,-0.00995534377616297,-0.010505484935690541,-0.0110455688155952,-0.011576177212696366,-0.0120977499115288,-0.012610643420922767,-0.013115213419128767,-0.013611815584397304,-0.014100805594978883,-0.014582539129124,-0.01505737186508317,-0.015525634423195336,-0.015987552703340003,-0.016443337786211015,-0.016893201399766508 +0.,0.007732527951320722,0.014097853600458043,0.0195483714451114,0.0241812104838756,0.028078476418833244,0.0313063592179583,0.03393015099461004,0.03601514386214773,0.03762242898971409,0.038797331288489875,0.03958369567932446,0.04002546306173411,0.04016657433523509,0.040050260677597614,0.03970842279588786,0.03916636994330508,0.03844969093942995,0.03758397460384318,0.03659480975612544,0.035507785215857415,0.034345680116682695,0.03312036470607295,0.031842753449388916,0.030523844136510343,0.02917463455731696,0.027806122501688537,0.026429305759504803,0.025055182120645517,0.023692221951861225,0.022341770056513544,0.021006588217674094,0.01968954784054681,0.018393520330335637,0.0171213770922445,0.01587598953147733,0.014660229053238067,0.013475894294922384,0.012321975840933046,0.01119824904697221,0.01010453858963183,0.009040669145503874,0.008006465391180283,0.007001752003253037,0.00602635365831407,0.0050797505437929534,0.004160652150848176,0.0032681309857163737,0.0024012770829888374,0.0015591804772568261,0.0007409312031116275,-0.000054380704855492926,-0.0008276652120532674,-0.001579900886406361,-0.0023121036662862573,-0.0030251188327327636,-0.003719786678308901,-0.004396947495577684,-0.005057441577102114,-0.005702109215445215,-0.006331790703169984,-0.006947319870425548,-0.007549387456787449,-0.008138582035526387,-0.008715490849330498,-0.009280701140887906,-0.009834800152886748,-0.01037837512801516,-0.01091201330896126,-0.011436289533798038,-0.011951638330822849,-0.012458411942163473,-0.012956961789087009,-0.013447639292860558,-0.01393079587475122,-0.014406782956026092,-0.014875951957952284,-0.015338629547171861,-0.015795038925585644,-0.01624538864531051,-0.016689887897741812 +0.,0.007637748796570306,0.013925000777148812,0.019308665828744617,0.023884682341874883,0.027734148971228462,0.030922443625066024,0.03351405742864261,0.035573481507213255,0.037161057601340614,0.03832155463809275,0.03909827966998279,0.03953463454958628,0.03967402112947875,0.03955914025090962,0.03922150131612142,0.03868610312172727,0.03797822059519017,0.03712312866397313,0.036146102255539166,0.03507241629735128,0.033924570495215106,0.032714289491258694,0.03145235387908306,0.030149626554877892,0.02881697041483293,0.027465248355137892,0.0261053232719825,0.024748058061556475,0.023401819189728355,0.02206793495427891,0.02074913327147109,0.01944825033566953,0.01816812234123886,0.01691158548254369,0.01568147595394866,0.014480629949818401,0.013310824049536232,0.01217106121509115,0.011061119563943443,0.009980825929828213,0.00893000714648055,0.007908490047635535,0.006916101467028272,0.005952668238393837,0.0050176769297319065,0.00410985286252194,0.003228279923646186,0.0023720593134578154,0.001540292232309968,0.0007320798805558132,-0.00005347654145150234,-0.0008172758333588301,-0.0015602845567064,-0.0022835061868281417,-0.0029877756339529746,-0.0036739228809757785,-0.004342777910791435,-0.004995170706294808,-0.005631931250380784,-0.006253889525944227,-0.006861869133323219,-0.007456552338560298,-0.008038520493784142,-0.0086083536368294,-0.009166631805530697,-0.009713935037722672,-0.010250843371239967,-0.010777936843917204,-0.011295783241663002,-0.011804811762542783,-0.012305370326955622,-0.01279780604447753,-0.013282466024684504,-0.01375969737715256,-0.014229847211457697,-0.014693262637175923,-0.015150266313504272,-0.015601078704703446,-0.01604590580375628,-0.016484954235043498 diff --git a/projects/pic/data/kdv/kdv_ann_pretrained.pickle b/projects/pic/data/kdv/kdv_ann_pretrained.pickle new file mode 100644 index 0000000..8484251 Binary files /dev/null and b/projects/pic/data/kdv/kdv_ann_pretrained.pickle differ diff --git a/projects/pic/data/ode/ode_ann_pretrained.pickle b/projects/pic/data/ode/ode_ann_pretrained.pickle new file mode 100644 index 0000000..1e7f603 Binary files /dev/null and b/projects/pic/data/ode/ode_ann_pretrained.pickle differ diff --git a/projects/pic/data/ode/ode_data.npy b/projects/pic/data/ode/ode_data.npy new file mode 100644 index 0000000..544dd8e Binary files /dev/null and b/projects/pic/data/ode/ode_data.npy differ diff --git a/projects/pic/data/vdp/vdp_ann_pretrained.pickle b/projects/pic/data/vdp/vdp_ann_pretrained.pickle new file mode 100644 index 0000000..c17f73b Binary files /dev/null and b/projects/pic/data/vdp/vdp_ann_pretrained.pickle differ diff --git a/projects/pic/data/vdp/vdp_data.npy b/projects/pic/data/vdp/vdp_data.npy new file mode 100644 index 0000000..05a824c Binary files /dev/null and b/projects/pic/data/vdp/vdp_data.npy differ diff --git a/projects/pic/data/wave/ann_pretrained.pickle b/projects/pic/data/wave/ann_pretrained.pickle new file mode 100644 index 0000000..7e56ad9 Binary files /dev/null and b/projects/pic/data/wave/ann_pretrained.pickle differ diff --git a/projects/pic/data/wave/wave_sln_80.csv b/projects/pic/data/wave/wave_sln_80.csv new file mode 100644 index 0000000..8271838 --- /dev/null +++ b/projects/pic/data/wave/wave_sln_80.csv @@ -0,0 +1,81 @@ +0.,0.,0.,0.,0.,0.,0.,-2.7755575615628914e-17,0.,-2.7755575615628914e-17,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,-5.551115123125783e-17,0.,0.,0.,-5.551115123125783e-17,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,-1.1102230246251565e-16,0.,0.,0.,0.,0.,-1.1102230246251565e-16,0.,0.,-1.1102230246251565e-16,-1.1102230246251565e-16,0.,-1.1102230246251565e-16,-1.1102230246251565e-16,0.,0.,0.,0.,-1.1102230246251565e-16,-1.1102230246251565e-16,-1.1102230246251565e-16,-1.1102230246251565e-16,0.,-1.1102230246251565e-16,-1.1102230246251565e-16,0.,-1.1102230246251565e-16,0.,0.,-1.1102230246251565e-16,0.,0. +0.015236700444775542,0.015629737779308622,0.016505908665018415,0.017950013769348044,0.019946551701977205,0.02249165124808092,0.025556894470016994,0.029137850458229392,0.033199212528312594,0.03771163814732004,0.042657044656658705,0.04801191780551678,0.0537322970546639,0.05976311006218682,0.06607101715849356,0.0726336535700843,0.07941760623294467,0.08636502619752107,0.09342409259131931,0.10055941879130023,0.1077386220351638,0.11491925493319594,0.12205454341628308,0.12910574440442632,0.1360319367413519,0.14279363793432442,0.14935899637559175,0.15570946852516954,0.1618133544793618,0.1676288510340067,0.1731242600062013,0.1782972409191675,0.18315111894324515,0.18765732535867147,0.19178045973010338,0.19551170889050312,0.19888265251186843,0.20190401159568405,0.20455406731215997,0.20681524230905707,0.20870794819210287,0.21026302088359794,0.2114967667964151,0.21242566155733383,0.2130689732527663,0.21345083102076284,0.21359444961691054,0.21352410684507278,0.213273139001751,0.21287762673595556,0.2123691799328129,0.2117648310770298,0.21107994182676348,0.21035535139150252,0.2096419769063158,0.2089800308619567,0.2083820088976429,0.20785610469086058,0.20742962847379065,0.2071378125554567,0.20701101588016302,0.2070622760476073,0.20729057025459974,0.20770740296987178,0.20833469858190656,0.2091924071922906,0.210286135835849,0.21160697720268895,0.21315255384565257,0.2149232976133284,0.21691605156584648,0.21912024822991483,0.22152402050843634,0.2241143054177428,0.2268757299461417,0.22979107450321234,0.23284210463716803,0.23600637665754431,0.2392612188868879,0.2425842289264194,0.2459499047200676 +0.0594138678223934,0.0600457947133418,0.06139967722655963,0.06360246291240297,0.06663541720795771,0.07049380143373625,0.0751429520900542,0.08057730621789251,0.08675337941036088,0.09363484096779952,0.10119920576024827,0.10941715437212209,0.118233808280081,0.12757984375747372,0.13741274445336582,0.14770351804514448,0.1584089925016815,0.16945432689545223,0.18077116905218066,0.19231330096317947,0.2040383661155703,0.2158892771834372,0.22779918281687317,0.2397129795702366,0.25157700663800775,0.2633384248924724,0.2749476048254859,0.28636695339817186,0.2975490667729932,0.3084377466929431,0.318986029344765,0.3291746024981573,0.33899137809203744,0.34839264085966537,0.35732797352893886,0.36577435227746213,0.37375203229398046,0.3812602734764911,0.38826285118550186,0.3947284646985953,0.400668476847483,0.40610726378779183,0.41105313724393927,0.41551446031430794,0.41950372339533976,0.42304103985854596,0.4261485708179493,0.4288491704386036,0.43117399919136296,0.43315787697034225,0.43483378245422627,0.4362236705372044,0.43734823900075737,0.43825273731949105,0.43899292576145266,0.4396157548051062,0.44014315108201085,0.44059252160668727,0.44100137664563915,0.4414166062398634,0.4418807146707909,0.44241833902127303,0.4430387386461236,0.44376559497340734,0.44463534924129533,0.44568269486337736,0.4469260301690415,0.44836551475164893,0.45000976661930847,0.451874562126041,0.45397087194917685,0.4562961947279651,0.4588458895369823,0.4616173873140585,0.46460570111478106,0.46780027239018784,0.47118654713711655,0.4747484933204104,0.47846768376144105,0.48232358765122696,0.4862921191514561 +0.13027529650318628,0.13105512713271866,0.1326356436899898,0.13515792139363075,0.13860665989766016,0.14297790649732064,0.14824287794598356,0.15439673843840152,0.16140277986464177,0.16923007560264447,0.1778593738448951,0.18726545113933407,0.1974009678451978,0.20820535061814074,0.21964071807152888,0.23168061230486114,0.24428582227575557,0.25738825872116305,0.2709240235869117,0.2848504514473645,0.29912834197855687,0.3137028845845417,0.32850497783970006,0.3434779240392565,0.3585718360818642,0.37373620622663944,0.3889147785964542,0.40405542832657526,0.4191064755287746,0.4340138098706491,0.44872666529552685,0.4632058267667777,0.4774183988234222,0.4913154190967705,0.5048446209039867,0.5179693217852637,0.5306800908669255,0.5429563147999137,0.5547539171454086,0.5660326479081291,0.5767820929325913,0.5870028631545651,0.5966846541614861,0.6058169409776722,0.6143935207229237,0.6224167099111094,0.6298947326140509,0.6368355562361271,0.6432502961643636,0.6491534855025496,0.6545629703452408,0.6594966133637056,0.6639723379624787,0.6680194335644212,0.6716734247747614,0.674968353833109,0.6779296389866307,0.6805801494128622,0.6829547846856919,0.6850956695645655,0.687043546142735,0.6888292103649387,0.6904735540273523,0.6920079830043533,0.693474279511848,0.6949137841472721,0.6963564548799576,0.6978171075623766,0.6993180259891255,0.7008920223580424,0.7025676122360064,0.7043569867004373,0.7062699364078335,0.7083225853970945,0.7105297276420707,0.7128974110714535,0.715424974359666,0.7181152607153198,0.7209665266439081,0.7239722628393781,0.7271234757659525 +0.22562332606158886,0.22650754850181865,0.2281752832903043,0.23076487994357314,0.23426711745178003,0.23867950825913414,0.24398404753214403,0.2501774382753796,0.2572359215290715,0.2651391664858787,0.2738744085434847,0.28342475852769256,0.2937583018118999,0.30483325960316854,0.3166226278952331,0.32910680169549406,0.34225694342545004,0.35602285716346377,0.370355444811837,0.38522247120652037,0.400594177360736,0.41642676436127574,0.43265966040133463,0.4492433492642271,0.4661396315234164,0.4833082447886096,0.5006946613978283,0.5182393956064734,0.5358941154345758,0.5536151504561679,0.5713553551943586,0.5890601197319578,0.6066790334260922,0.6241647865154146,0.6414708830931921,0.6585511878274402,0.6753649944704783,0.6918726915340363,0.7080281260966377,0.7237870816651781,0.7391161670955406,0.7539888901689072,0.7683751943159464,0.782244474374511,0.795569817123577,0.8083322038332799,0.8205214172411068,0.832126019412347,0.8431318366357833,0.8535272359383954,0.8633085853624922,0.8724836574900552,0.8810616943597986,0.8890475970300404,0.8964469594208018,0.9032717910350606,0.9095441412415001,0.9152862664794887,0.9205217234173748,0.9252776300432018,0.929583249446442,0.9334683608574457,0.9369607643168988,0.9400925951531376,0.9429017945610463,0.9454274436676965,0.9477049168630959,0.9497611621812976,0.9516280298705468,0.9533489699768711,0.9549647593346218,0.9565008376897417,0.9579805602287477,0.9594367128973409,0.9609022858556384,0.9624002464729076,0.9639456627557449,0.9655613511009183,0.9672644311566742,0.9690657004139247,0.9709752171072361 +0.3433188552680024,0.34429771278909505,0.34599365639140195,0.34853358461374884,0.3519143380749088,0.35613492360433396,0.3611882140673701,0.3670725544787916,0.37377745302373405,0.3812935487877521,0.3896148722758275,0.398733297112143,0.4086331162966872,0.41929263516116566,0.43069680985792524,0.4428338668576026,0.45568672660482334,0.46922554329514327,0.48341891839591644,0.4982467332415835,0.5136902243223407,0.5297195268440186,0.5462879690427598,0.5633574541909114,0.5809034483855924,0.5988983947467247,0.6172952288394133,0.6360349816200717,0.6550771974081796,0.6743912989170628,0.6939379874234968,0.7136556275345627,0.7334843445322653,0.7533828742967451,0.7733140341023206,0.7932281731127897,0.8130618735174439,0.8327628765250384,0.8522876161192317,0.8715925264400539,0.8906271201055824,0.9093432656376479,0.9276959105603706,0.9456391374888329,0.9631295688135888,0.980129994252125,0.9966129997253321,1.0125492954650368,1.0279024641517571,1.0426375043276197,1.0567299801546204,1.0701744236090578,1.0829678368301834,1.095090637870131,1.1065192927777066,1.1172422472445935,1.1272719503734485,1.1366236315627096,1.1453045431496365,1.1533217746056392,1.1606873561017905,1.1674229696754286,1.1735557597398327,1.1791111584580445,1.1841154579764717,1.188597417812417,1.1925896804555212,1.1961243062432805,1.199235150253485,1.2019660923593658,1.2043605153950114,1.2064509540977162,1.2082686540175513,1.2098551601342828,1.2112536799427276,1.212498479907711,1.2136167181255697,1.2146452349006174,1.2156155865038119,1.2165531612890383,1.2174844124056012 +0.4812813560806589,0.48236709783738885,0.48408585638535995,0.48654987533155536,0.4897605167184538,0.4937179336678458,0.49842321581102006,0.5038760309910757,0.5100760765136303,0.5170225218581498,0.5247147639368244,0.5331516187954191,0.5423301856323491,0.5522449332932956,0.5628907383001067,0.574262572553767,0.5863534036347463,0.5991508304955075,0.6126393297013836,0.6268093585990947,0.6416517655715699,0.657149759183428,0.6732719832751406,0.6899927398402016,0.7072994599775874,0.725176275402854,0.7435869298694023,0.7624800323332748,0.7818250887872057,0.8016036398827348,0.8217860224018703,0.84231291260621,0.8631245845495622,0.88418816924171,0.9054765317423136,0.9269426261514947,0.9485135720475038,0.9701331495646974,0.991763784264429,1.0133663752006499,1.0348826092925845,1.056252672868292,1.077424173972152,1.098343593498165,1.1189585590728541,1.1392204944821294,1.1590895274486324,1.1785236908189416,1.1974720048011291,1.2158838028013814,1.2337188501786376,1.250957874981744,1.2675844307829152,1.2835595916152531,1.298837835865549,1.3133875576989815,1.3272071505064988,1.3402995844860273,1.3526542225091926,1.3642573700119074,1.3751016929879312,1.3851951834562113,1.3945565160683087,1.4031981767563924,1.411129372552107,1.418362555007583,1.424919789865314,1.4308296979291537,1.4361197970295083,1.44082380545333,1.444977177228297,1.4486114617089325,1.4517580288266494,1.4544569980830524,1.4567512076183395,1.4586777649481817,1.4602692753768243,1.4615672173354346,1.462609641450633,1.4634302771030356,1.4640653741926821 +0.6374888876374862,0.6387059887342419,0.640473457591708,0.6428890244551705,0.6459564313035575,0.6496764730192754,0.6540546833790044,0.6590915445452734,0.6647924602714262,0.6711615026700001,0.6782012525846329,0.6859146679814269,0.69430644052964,0.7033809576162041,0.7131395183251913,0.7235817364963089,0.7347074058859395,0.7465160311729181,0.7590036664243839,0.7721681923785327,0.7860072207889343,0.8005140790651052,0.8156714579255948,0.8314648838902247,0.8478902264086992,0.8649404450093634,0.8825913282122888,0.9008042087932728,0.9195587935190339,0.93884584572774,0.9586453635754297,0.9789084298260848,0.9995841750928847,1.0206488646551382,1.042084408666644,1.063851494761242,1.0858813971810606,1.1081224874578917,1.130545268350574,1.1531178151672279,1.175783714986508,1.198482470547853,1.2211625159912085,1.2437709718101426,1.266254720980249,1.2885618892403694,1.3106464997139697,1.3324606734296778,1.3539482129270541,1.3750523232097507,1.3957240656162984,1.4159320372463133,1.4356473421813125,1.4548194912994876,1.4733910179847072,1.4913166486982423,1.5085787850622292,1.5251651468216212,1.5410485172727704,1.556196983266134,1.5705848853331221,1.5842033753607012,1.5970565378621915,1.6091400085357082,1.6204434610746354,1.630959935992062,1.6406950916000174,1.6496663354232204,1.6578877818190252,1.665374586639199,1.672145515726165,1.678223332214002,1.683631837867532,1.6884000819410816,1.6925603257933721,1.6961437142976878,1.6991806982828406,1.7017074750568364,1.7037596583032402,1.7053716059514017,1.7065810749161767 +0.8099781102479722,0.8113554611822111,0.8132109631567033,0.8156295753284537,0.8186153789440175,0.8221693188478206,0.8262977470598145,0.831001442263162,0.8362871512402021,0.8421602792626837,0.848624411128833,0.8556838427486281,0.8633456562389542,0.8716178154249463,0.880504378155364,0.8900072981750128,0.9001297026620103,0.9108769634873302,0.9222517212356137,0.9342557797396477,0.9468903529516015,0.9601551884242594,0.9740441574829637,0.9885516152584504,1.0036779635021627,1.019421574586085,1.0357697438067812,1.0526990826261284,1.0701984323507283,1.088264310155107,1.1068852284074764,1.1260280026777565,1.145656686416293,1.16575621122982,1.186315663408673,1.2073067995733426,1.2286758685730848,1.2503824540901036,1.272405928759027,1.294722989567207,1.3172871269568844,1.3400466056216886,1.362957570558167,1.3859747480991027,1.4090514601490052,1.4321399653616882,1.4551946473107304,1.4781684633876353,1.5010087168386015,1.5236614807592057,1.5460765032904684,1.568213718227077,1.5900347013249523,1.611485739587179,1.6325076570542307,1.6530489742642414,1.6730770244601336,1.6925641225716679,1.7114701091993507,1.7297498020304791,1.7473628597045685,1.7642834041362847,1.7804978017096829,1.7959839447849046,1.810712747109742,1.8246580020917773,1.8378062568950864,1.8501583663707732,1.8617106817984688,1.872455307591704,1.882389155280297,1.89152030973288,1.8998590006352833,1.9074161546947548,1.9142058162427635,1.9202459066625406,1.9255577792407774,1.930164092779578,1.9340900668880776,1.9373631881198146,1.9400145635545702 +0.9968442993850283,0.998409364869181,1.0003922529526041,1.0028671808358471,1.0058371121692047,1.0093027801471817,1.0132687441305506,1.017735679253383,1.0227084875962005,1.0281912641854412,1.0341869500185918,1.0406990560234222,1.0477331407120007,1.0552958737559133,1.0633911614874374,1.0720213876848455,1.0811901395824899,1.0909036580728302,1.101166794420958,1.111982341909793,1.1233524746577412,1.1352799271603735,1.147766190443581,1.1608115886837966,1.1744178117089838,1.1885856217372859,1.203311894243716,1.2185882315343628,1.2344107230189598,1.2507785817796535,1.2676870193689889,1.2851202994059934,1.303059816844883,1.3214978679305887,1.340428651419003,1.3598361075388494,1.3796874678669582,1.3999570904949223,1.4206328302300768,1.4417001593899517,1.4631273308522923,1.4848775261723342,1.5069190162525428,1.5292186243426737,1.5517409774620852,1.5744479425138698,1.59729971208009,1.6202556235218775,1.6432729645223576,1.6663074576501256,1.689313878581493,1.712248227743401,1.7350669666318885,1.7577204808996774,1.780156329950205,1.802323952799794,1.8241793338179835,1.8456821988496592,1.8667847548624126,1.88743495503907,1.9075833469982584,1.9271898731797208,1.9462236994679474,1.9646472868454554,1.9824166357884125,1.9994896834940423,2.0158348041423344,2.0314343973356093,2.0462662035274826,2.060299567141539,2.0735089532017996,2.0858848373381713,2.097420627033088,2.108105950686059,2.1179329512736293,2.1269018936238866,2.1350199390298,2.1422910999145452,2.1487246185314817,2.1543351018171686,2.159140382231858 +1.1962413596768555,1.1980163068386782,1.2001570314769452,1.2027284419572781,1.2057317751469843,1.2091673869004738,1.21303692617295,1.217340756209576,1.2220805113115754,1.227257747940783,1.2328739509308309,1.2389307967535315,1.245430422763479,1.25237571544976,1.2597688198487116,1.2676113945536787,1.2759057769658702,1.2846560647601755,1.2938663965221286,1.30353875219126,1.3136746544508708,1.3242773268518686,1.3353523436570451,1.3469032593087369,1.3589311050217745,1.3714370044686384,1.3844245362039718,1.3978973347945438,1.4118564604399308,1.4263017977113044,1.4412331949289539,1.4566503091735794,1.4725503722066655,1.4889312528783996,1.5057904761028356,1.523123005434679,1.5409173881483216,1.5591628049215471,1.5778542236588484,1.5969848651415595,1.6165387946398353,1.6364960942983375,1.6568385184309264,1.677546408858369,1.6985975918075578,1.7199663039128161,1.7416222264803818,1.7635346500040423,1.785674169354029,1.8080097467142864,1.8305055773435666,1.8531190974186496,1.8758069884208757,1.8985289735420832,1.9212447428441184,1.9439104037477701,1.96647664065219,1.9888960465617214,2.0111194344821772,2.033094832511936,2.0547703854288026,2.076097389600037,2.09703198150374,2.117526424262572,2.1375281945932363,2.1569855275218996,2.175853121318384,2.194096803257547,2.2116785467761058,2.2285500241370255,2.244667379140638,2.2600039835784362,2.2745364414967417,2.288234299179238,2.3010686593934073,2.3130212065096023,2.324082426569261,2.3342365072015956,2.3434743400392333,2.3517940189086004,2.359195982832271 +1.4063818058080408,1.4083744001180527,1.41067378862542,1.4133344026158379,1.4163570417902802,1.4197419601443397,1.4234901385211582,1.4276018580921503,1.4320780525984913,1.4369197207647155,1.4421279784316905,1.4477040630850322,1.4536493880962524,1.4599660216038948,1.4666557554930884,1.473720159109646,1.4811613499157081,1.488982933890292,1.4971889279907162,1.5057814218141599,1.5147621779478158,1.5241348029971271,1.5339059685552874,1.5440804294976518,1.5546597938000242,1.565646075145891,1.5770451633920355,1.5888646486079367,1.6011079776791746,1.6137763885344958,1.6268723283456692,1.6404014253532224,1.6543675798894015,1.668771995521713,1.6836149158700502,1.698897012791806,1.7146172673291749,1.7307730384914977,1.7473635653221389,1.7643869192383754,1.7818367759878988,1.7997041446180617,1.8179799264133671,1.8366536562717406,1.8557127203183368,1.8751415409254202,1.8949200412925942,1.9150276253913658,1.9354455397045656,1.9561535012567224,1.9771260504275503,1.9983293494459893,2.0197280375571016,2.0412925935348847,2.062994388727275,2.0847994978605064,2.1066644093237445,2.1285454982024037,2.1504005271277684,2.1721859342992342,2.193856652503564,2.215366119950648,2.2366684931618774,2.2577166532698736,2.2784609315802666,2.2988513500385506,2.318839996022441,2.338384425605211,2.3574401175002095,2.3759553847233126,2.3938810193318365,2.4111792258839393,2.4278149281275,2.4437462770096703,2.4589318790347416,2.4733393633143463,2.4869434804835326,2.4997138870471924,2.511625575206124,2.5226594330545895,2.532797498702224 +1.625536898634125,1.6277599471729256,1.6302329277021108,1.6330001835511516,1.6360623836472565,1.6394197369372692,1.6430730703242953,1.6470226122796228,1.6512691462447906,1.6558135043053694,1.6606566131136185,1.6657994994915821,1.671243327393898,1.6769897686069173,1.6830403384894335,1.689396422831379,1.6960598276307097,1.7030334797254167,1.7103207121859785,1.7179235217055786,1.725843726844344,1.734084561942625,1.7426517365530731,1.7515496450636892,1.7607802633151295,1.7703459896968086,1.780252498298144,1.7905073467950907,1.8011145743119097,1.8120762596547833,1.8233958584606316,1.8350804815413027,1.8471363371629788,1.8595662871175689,1.8723722462048868,1.8855577354881938,1.8991271912811114,1.9130828325673277,1.9274263170558563,1.9421587094198833,1.9572801991886117,1.9727897484609762,1.9886851183464709,2.004962817166154,2.021617889652833,2.0386436494114673,2.056030188136605,2.0737668013030826,2.0918441129898917,2.110251469095007,2.1289740807863304,2.1479898733161256,2.1672748070910584,2.1868108344896404,2.206580993428272,2.226563017031089,2.2467244086519518,2.267030729671087,2.287450839212398,2.307953641663957,2.328505468717523,2.3490676526331953,2.3695986227941455,2.3900578307681033,2.4104044986729525,2.430596640251802,2.4505901584155856,2.470340194142307,2.4898017974975115,2.5089276039393322,2.527670367459845,2.5459861738128025,2.563832876517252,2.5811634021104894,2.597930073671492,2.6140909035267104,2.629607894878088,2.6444406567914784,2.658551656683457,2.671906703213879,2.684471508091469 +1.852036528011046,1.8545007468100885,1.857160117006226,1.8600492726867477,1.8631689629904362,1.8665193949352086,1.8701016001148092,1.8739157852075128,1.8779629821729347,1.8822441543383952,1.8867601924139814,1.891512108246857,1.8965012060155815,1.901729166031715,1.9071973661172625,1.9129070104983643,1.9188596396815458,1.925057599751017,1.931503407105516,1.938198909320311,1.9451459033789176,1.9523469313123571,1.9598057637525965,1.967525631934408,1.9755087518334946,1.9837575986897351,1.9922762205295235,2.001069812291779,2.0101418939550384,2.0194950130501232,2.029132575880544,2.0390602642814977,2.049283616011413,2.059805947133835,2.070630030368361,2.0817600993800895,2.0932021412919117,2.1049604794697103,2.117037796435613,2.129436660508313,2.1421608882653103,2.1552143207665075,2.168599182997956,2.1823166421470415,2.1963671855905345,2.2107509468732536,2.225466666356392,2.2405120270833083,2.25588498182701,2.271582540030831,2.287599395275823,2.303925665230337,2.3205494176555557,2.3374630539308443,2.3546597362003743,2.3721285050977308,2.3898500981035746,2.407802035321227,2.4259657690932523,2.4443238089188837,2.4628556508745216,2.481533854053012,2.500325566134882,2.519200538367348,2.538130177047233,2.557084077808468,2.576026568191152,2.5949159128433603,2.6137119483636346,2.632376844881934,2.650870745453816,2.669149190880843,2.687168307797365,2.704881904927629,2.722242356937338,2.739203652632772,2.755720171192274,2.771746950037106,2.787239083859575,2.8021520427102016,2.816440138719723 +2.0842692338458892,2.086983313266128,2.089839717332982,2.092863713452294,2.0960560292624466,2.09941685448851,2.1029472551019066,2.1066474144568845,2.11051844135895,2.1145613170218005,2.1187768524557784,2.123165990157212,2.1277300581805307,2.132470690751126,2.1373891313877773,2.142486421734882,2.147763894556418,2.153223523800747,2.158867317044857,2.1646969419910254,2.1707140715276596,2.1769207713980947,2.1833196622322686,2.1899132181033347,2.196703647178868,2.2036933169667696,2.2108852209602197,2.2182830276952625,2.225889788980104,2.233708170433014,2.241741357049781,2.249993895998691,2.258470581730005,2.267174811315031,2.276109711425748,2.2852796304014023,2.294690876215519,2.3043486012072734,2.314256030723621,2.3244165428706944,2.3348356426774064,2.345519460930736,2.356472611436312,2.367698979474214,2.379202281841732,2.39098658638468,2.4030556907019545,2.415412425804061,2.4280593350823567,2.4409983536109037,2.4542304029818305,2.4677539851418775,2.481565869966901,2.4956654874064115,2.510052682283342,2.5247244961852626,2.5396721334445647,2.5548834718498625,2.5703498300173973,2.586063816833856,2.602015287194881,2.6181873870847525,2.6345574057337933,2.6511054488599983,2.6678138735453705,2.684663112016197,2.701627464518704,2.7186733601203543,2.7357691999390212,2.7528874781108894,2.769997724344447,2.7870613732758027,2.8040393165904844,2.8208920745641604,2.8375784254917438,2.8540556715045007,2.8702791310529907,2.8862060009875554,2.9017917934515256,2.9169904993919187,2.9317544515434677 +2.3206822645819782,2.3236529545223377,2.326715101380448,2.329884949950957,2.333163117359178,2.336549763331552,2.340045839794187,2.343651507491188,2.34736780477337,2.3511956370356772,2.355135704545098,2.3591888417942615,2.3633563104127346,2.367639670825615,2.372040059427661,2.37655839915956,2.3811958989072344,2.385954392602436,2.390835714424348,2.395841376047429,2.40097288901125,2.4062321124155255,2.4116213489404275,2.4171427947958133,2.422798501837311,2.428590653309058,2.4345218677783333,2.440595280861626,2.446813675072047,2.453179615416945,2.4596960651606787,2.4663670113964042,2.4731967610391368,2.480188617690613,2.4873457342748777,2.4946722843026015,2.5021742145749672,2.5098566632791615,2.5177231069838695,2.525777258222346,2.534024878366951,2.542472403779364,2.551125197935632,2.5599882937158873,2.5690667988760336,2.5783662746597837,2.5878924381638586,2.5976503970596627,2.6076448987047374,2.6178803805625694,2.6283609875105602,2.6390898485811363,2.6500689407933886,2.6613015128896764,2.672790941982734,2.6845390176043544,2.696544160982253,2.7088022465398227,2.7213113680368894,2.734070547296443,2.7470767972487478,2.7603222159857936,2.773794197405877,2.787482137529234,2.8013772502499217,2.815469098297036,2.829742081767611,2.8441737905259736,2.8587431974315836,2.873432641799284,2.8882216056760512,2.903081822029141,2.9179835666375094,2.9328978952548392,2.9477942473919247,2.962638703870056,2.977394260085618,2.992025451060699,3.006494429382132,3.0207610031327117,3.0347833070668844 +2.559781547742296,2.5630137334451946,2.5662885005199225,2.5696134244825313,2.572989003886347,2.5764153702716945,2.57989334917102,2.5834230863070884,2.587005546146395,2.5906415671511067,2.59433175687824,2.5980768646732164,2.6018781170032206,2.605737073094925,2.609654823520223,2.6136322231278433,2.61767044001825,2.621771347669927,2.6259368164215497,2.6301682575868877,2.6344670458684143,2.6388349966869953,2.643274487662065,2.647787668003214,2.6523764094743356,2.6570427099812015,2.6617890999560263,2.6666185580162405,2.67153366825437,2.676536810535919,2.6816307016000294,2.6868188981570094,2.6921051798100777,2.6974926155802352,2.7029841884471235,2.7085836278733186,2.714295930265278,2.720125603643489,2.726076101499849,2.7321510867389827,2.7383556248218657,2.7446952169421355,2.7511748886166516,2.757799708465584,2.7645749046105115,2.771505969176497,2.7785986215548384,2.7858584025233286,2.793290620549212,2.800900516503373,2.8086934324360264,2.816674649190563,2.8248489452924637,2.8332212065185876,2.841796203264066,2.8505781951908338,2.8595703656500024,2.868774496084896,2.878193123191774,2.8878291082330856,2.8976842328186465,2.907757775043555,2.9180461867816203,2.92854670231239,2.9392574803831817,2.9501755033176096,2.961294437514211,2.972603561465387,2.98409256093531,2.9957527070698227,3.007573077891688,3.0195372504147806,3.031626753995537,3.0438244766448275,3.056112049468994,3.068466857020522,3.0808629528799383,3.0932749488641065,3.1056750211569333,3.1180328497938756,3.1303167051812677 +2.800131667247712,2.8036284400599603,2.8071209157895947,2.810608354872547,2.814091140428483,2.8175693768880574,2.8210437660149177,2.824514439690898,2.827982289587907,2.831448089973912,2.8349123614895118,2.8383757742016327,2.8418395225975983,2.8453051696919305,2.8487737723614512,2.8522461337284053,2.8557233981905603,2.859207498432461,2.862700370976188,2.866203354808382,2.8697177182873137,2.873245275211582,2.87678855158135,2.880349712270697,2.88393047557034,2.8875326889323,2.8911588866537676,2.894812031446835,2.898494566774038,2.9022086909190317,2.9059569155171605,2.9097425142602944,2.9135688607582386,2.9174387858836615,2.9213550602327794,2.9253209875148505,2.9293406841429044,2.9334179882123133,2.9375562113953424,2.9417588194738604,2.9460300487055813,2.9503743071917525,2.954795993630438,2.9592998054157764,2.9638906051954743,2.9685732860061878,2.9733528890399983,2.978234616080022,2.983223620763893,2.9883251496758447,2.9935447072350843,2.9988882313331606,3.00436165974335,3.009970361141907,3.0157194701677716,3.021614323686732,3.027660717236745,3.033864031360295,3.0402293299737377,3.046761523795786,3.053465208587298,3.0603444556967183,3.067402211929995,3.07464123399423,3.082064407427504,3.089673924491306,3.0974704342966257,3.105452584780371,3.113618638198638,3.121966817063592,3.1304939451455436,3.13919387762272,3.1480583133924633,3.157080357900924,3.166252255962685,3.1755621074775213,3.184995051539288,3.194535878522595,3.2041672681807314,3.2138696704150727,3.2236224722342084 +3.0403558807569477,3.0441186113917804,3.047832167952347,3.0514878508157377,3.0550859410089233,3.0586265165531032,3.0621101721219404,3.0655370218545404,3.0689078915576817,3.0722234904609094,3.0754842484470153,3.078690748869704,3.0818441364745617,3.0849459301788587,3.087997123855565,3.0909984537483064,3.0939510008545605,3.0968566449505768,3.0997172670786903,3.102534130933681,3.1053084235326094,3.108041888657885,3.110736994130827,3.113395825199052,3.1160200003909058,3.118611263342752,3.121172051596442,3.1237052078589356,3.1262130499840253,3.1286976528485106,3.1311613858114122,3.1336073265374056,3.1360386169858696,3.1384579262491625,3.1408678739536695,3.1432715342349935,3.145672644061899,3.148074730693189,3.1504809513747163,3.1528945982931744,3.1553195420453157,3.157759741316722,3.1602192886857976,3.1627026563699534,3.1652144824845765,3.1677593584498327,3.1703419818826033,3.1729673415528175,3.175640488284853,3.17836663609114,3.181151290282374,3.184000503770985,3.186920539044756,3.1899170085887865,3.1929953337501997,3.196161379054717,3.199421937111991,3.202783834161492,3.206253318378082,3.2098363935568766,3.213539106485225,3.217367788725717,3.221328504456898,3.2254268642400508,3.2296683244100897,3.234057988636474,3.2386003918262456,3.243299338970363,3.2481580525125855,3.253179168611131,3.258364496392377,3.26371438230702,3.2692274346185273,3.274903313650196,3.2807410967398756,3.2867366383536893,3.292883530445608,3.299174738015728,3.3056015535315106,3.3121534624242486,3.3188191561236735 +3.279136136466174,3.2831645475835303,3.287100904993115,3.2909289032238167,3.294648725599805,3.298260424519772,3.3017644915736457,3.3051610230361104,3.3084507793352045,3.311634405033921,3.31471224180613,3.3176847878641307,3.320553134670814,3.3233187481459043,3.325982559869542,3.3285452460870344,3.3310078263122884,3.333372115075696,3.3356399309508955,3.3378124803124307,3.3398908947911465,3.3418768616129277,3.34377279510978,3.3455807260119186,3.3473022103087136,3.3489389302899695,3.3504932743667295,3.3519680483078953,3.353365506659263,3.3546876424035954,3.3559367527786508,3.3571158707802136,3.3582280771511233,3.3592759457989114,3.3602619881484177,3.3611891778535163,3.3620611435948673,3.3628812757995,3.3636525930599683,3.364378234513067,3.365061886967814,3.365707292149053,3.3663183215607257,3.3668992186147157,3.3674543669993224,3.3679880586938458,3.3685046102573493,3.3690086644221044,3.3695050284605808,3.3699986855822104,3.370494825870469,3.370999030029533,3.3715171476224106,3.372054624693846,3.3726168360278788,3.373209564581724,3.3738394000453216,3.3745131613790567,3.375237200912551,3.376017687211379,3.376860979890546,3.3777739751708977,3.378763755168454,3.3798369853510555,3.3810001023495886,3.3822594454426897,3.3836213809311206,3.385092328238988,3.386678172808249,3.388384073306888,3.390214835278408,3.392174914658495,3.3942676716887683,3.3964969657756705,3.3988663014650626,3.4013772631069887,3.4040300584095626,3.4068242109557625,3.409758120035776,3.4128289399126226,3.4160332798464816 +3.5152130320067343,3.519505271847032,3.5236645754880778,3.5276673858127654,3.5315137928949354,3.5352038244595474,3.538737873332999,3.5421160181733065,3.545338955553582,3.5484072667412656,3.5513212082115975,3.5540811953892715,3.55668826524495,3.559143826277765,3.561448748678204,3.563603652832732,3.565609498030385,3.5674680264403733,3.5691809894309774,3.5707495460917955,3.5721747872319014,3.573458349953302,3.5746025940571298,3.575609508293674,3.576480609484808,3.577217544672938,3.5778226775696127,3.5782988192308736,3.5786481990681973,3.578872759425132,3.578974771140147,3.57895730937043,3.5788234972237007,3.578575863910439,3.5782168525406903,3.577749426588638,3.5771772860581463,3.5765038137892082,3.575731923078687,3.5748646408143308,3.57390561854135,3.572858569241299,3.571727246466442,3.570515720270211,3.569228168106062,3.567868661800237,3.566441214210064,3.5649501385253095,3.563399989677736,3.561795477895918,3.560141389918993,3.558442628353821,3.5567043277586636,3.55493160409579,3.553129644095588,3.551303860542539,3.5494600416847715,3.5476042161200407,3.545742258535078,3.5438800046405547,3.5420234821976866,3.540179134262003,3.5383537212340204,3.5365538039959015,3.5347857883239553,3.5330561460477856,3.5313716377055897,3.529739420185347,3.5281663198309494,3.5266585939729875,3.525222482450889,3.5238645387257366,3.5225908945332747,3.5214076338384475,3.520320657998665,3.519335251558241,3.5184561770142357,3.5176876194716544,3.5170331807899102,3.5164957897049653,3.51607804023269 +3.74738581113184,3.7519385267983267,3.756319420669926,3.7604980382469217,3.7644743810222536,3.7682484535730927,3.7718205553959816,3.775190749149541,3.778359672672756,3.7813278491137163,3.7840954551661468,3.7866628294272666,3.7890309617822586,3.7912012150287855,3.7931744029184538,3.794951089908275,3.796532179108369,3.7979193558979962,3.7991143158360376,3.8001181631603456,3.800931934325997,3.801557212329425,3.801996304881442,3.8022511479074192,3.8023232020775426,3.8022140588735853,3.8019260315836574,3.801461885616375,3.8008237944298955,3.8000136372804865,3.7990336257605937,3.797886787091838,3.796576190551423,3.7951042955652157,3.7934734694940135,3.791686605018694,3.7897473313360233,3.7876589445884745,3.7854242642912035,3.7830462169700736,3.7805283468537794,3.777874251519416,3.77508755037245,3.7721721596355877,3.7691320879802808,3.7659712273559527,3.7626933776729996,3.7593026311800344,3.7558033474541803,3.752200032700068,3.7484972228430795,3.7446994773144464,3.7408115786473015,3.736838413363679,3.7327849839531972,3.728656462098344,3.724458259936945,3.720196041035554,3.7158754304839743,3.71150207501266,3.7070818247324864,3.7026209148442373,3.6981259625140797,3.6936034856731985,3.6890598969811097,3.6845017574351253,3.679936034445234,3.6753702377044197,3.670811681827622,3.6662672533733613,3.6617439946814923,3.6572495399660316,3.6527914412570333,3.648377050776648,3.644013674262054,3.6397086088495767,3.6354690880364977,3.6313018637026966,3.627213471230751,3.623210165620848,3.6192980378388713 +3.9745123897680283,3.979320800555536,3.9839205005804392,3.9882744921164583,3.992382692621926,3.9962450847217035,3.9998618793805525,4.003233124085946,4.006359401017505,4.009241178179054,4.011878556444374,4.014271801395983,4.016421858285575,4.018330046697213,4.0199971262063565,4.021423607044032,4.022610338020068,4.023558949970465,4.024271083982462,4.024747790372995,4.024990051852836,4.024999397372704,4.024778080364329,4.024327983200555,4.023650513613271,4.02274721033425,4.021620333891681,4.020272598011353,4.018706123646821,4.016922736718301,4.014924596412705,4.012714680707123,4.010296010149684,4.007670989351303,4.004841929024858,4.001811669341651,3.9985837930776995,3.9951615423559166,3.991547672847885,3.987745046041203,3.9837571486896888,3.9795875248366577,3.975239722643072,3.9707175713957654,3.9660249872511524,3.961165773718115,3.956143635651993,3.9509625538300464,3.9456267731292254,3.9401406748354404,3.934508651146287,3.9287350757656716,3.9228245225721894,3.916781721243113,3.910611530268567,3.904318935189962,3.897909068125005,3.8913872705169568,3.884758935008869,3.8780295259066526,3.87120468460185,3.8642903382479488,3.8572927370099537,3.8502181324335334,3.843072751772794,3.835862988923326,3.828595599759577,3.8212778099810722,3.8139167908260587,3.806519495005242,3.799093083266648,3.791645276227677,3.7841838980830973,3.7767165865569736,3.769251022178058,3.7617951550190556,3.754357112713569,3.746944682444691,3.739565680637451,3.7322279143303656,3.7249391497531152 +4.195509315890845,4.200567286558889,4.205381653836823,4.209909230876402,4.214149855646921,4.2181034900146575,4.2217702612079755,4.225150202371351,4.2282438430840426,4.231051599188463,4.233573499747603,4.2358097411906215,4.237761226590983,4.239429234794191,4.2408144736694915,4.241917401282546,4.242738814343543,4.243280291679535,4.243543421781479,4.2435292024025495,4.243238563563509,4.2426729809675425,4.2418346531307956,4.24072540888084,4.239346605382002,4.237699730614971,4.235786990875006,4.233611043724873,4.231173959646619,4.228477517689426,4.2255238287152315,4.22231581874499,4.2188564606308745,4.215148113896547,4.211193045596388,4.206994054020732,4.202554687161441,4.197878152673401,4.192967163982479,4.187824542736901,4.182453749240373,4.176858311184631,4.1710417488057185,4.165007854744671,4.158760510352024,4.152303496672028,4.145640507812421,4.138775497032018,4.131712661757426,4.124456327745696,4.1170108356953685,4.109380509110876,4.1015698471834385,4.093583499161135,4.085426233074058,4.077102923524628,4.068618550136575,4.0599782395964805,4.051187222516626,4.042250834248533,4.033174546930324,4.0239640045406455,4.01462505798516,4.00516363946753,3.995585740959433,3.985897500182475,3.9761052890723567,3.966215765327025,3.956235650692703,3.946171661829399,3.9360306968634333,3.92582000550551,3.9155470025386556,3.9052190346935527,3.8948435417397365,3.8844282617608528,3.873981178483411,3.863510090618589,3.853022958290794,3.8425278854282645,3.8320330151794986 +4.409351768049737,4.4146518820983065,4.419675496165399,4.42437358832907,4.428745921610071,4.4327924382987405,4.436513186661152,4.43990818631328,4.4429779172036215,4.445722745982267,4.448142634042226,4.45023771264329,4.4520088450528945,4.453457271621389,4.4545836516527375,4.455388394397641,4.455872247749683,4.456036741677119,4.455883415537148,4.455413218073821,4.454627030264165,4.4535262784226015,4.45211311077069,4.450389306643144,4.448356175314058,4.446015156800132,4.443368407634177,4.440418534747446,4.437167560970569,4.433617219568997,4.429769574966166,4.425627505125437,4.421193937248785,4.416471186278587,4.411461475406371,4.406167560363272,4.400592951089089,4.394740817034347,4.3886138284820975,4.382214765315706,4.375547054203622,4.368614195109709,4.361419673391152,4.353967241466376,4.3462607413838965,4.338303922047138,4.330100452509798,4.321654250311391,4.3129694639667475,4.304050364841256,4.294901241693293,4.285526367351829,4.275930174212499,4.266117238284261,4.256092246340694,4.245859978667455,4.235425295785705,4.224793165348217,4.213968685073117,4.202957072704578,4.1917636553304884,4.180393861105891,4.16885324768912,4.157147497127455,4.145282399834998,4.133263873047213,4.121097980382406,4.108790946410361,4.096349124308784,4.0837789857498334,4.071087155706395,4.0582804631123635,4.045365921466257,4.032350562122259,4.019241534736068,4.006046268059586,3.992772443470737,3.979427687315782,3.96601983956493,3.9525569498918824,3.939047155737406 +4.615073553147262,4.620607186099383,4.625833418207076,4.630697746468614,4.635199863497272,4.639339693191069,4.643117209630554,4.646532419719543,4.649585756753555,4.652277541164436,4.654607670813166,4.65657621577302,4.658184001890592,4.6594322332912315,4.660321523831306,4.6608522372396495,4.661025075834661,4.6608415238773935,4.6603030750075956,4.659410632389721,4.658165031444522,4.656567652807718,4.654620598828365,4.652325603162623,4.649683929683292,4.646696973003497,4.643366843981323,4.639696103831259,4.63568673013428,4.631340411184876,4.626659166401517,4.621645828668323,4.616303280585643,4.610633792467979,4.6046395429683145,4.598323243715055,4.591688361789027,4.584738023493319,4.577474854500107,4.5699015903559745,4.562021615508176,4.553838389171275,4.545355353676408,4.536576216625671,4.527504775251489,4.518144735440817,4.508499725324193,4.498573617624605,4.488370510727838,4.477894623482699,4.467150192802591,4.456141440408322,4.444872741196404,4.433348608759898,4.421573662999948,4.409552612485786,4.397290238438539,4.384791414419299,4.3720611469598,4.359104563046172,4.345926887440954,4.33253342175005,4.318929565430419,4.305120852221738,4.291112939080796,4.2769115984573505,4.26252271099624,4.247952264619489,4.233206392779312,4.2182913869362055,4.203213672735002,4.187979810657655,4.17259653961478,4.157070662799955,4.141409109348686,4.125619050427585,4.109707881865742,4.093683040242601,4.077552170079771,4.061323126738992,4.045003877382206 +4.811767094367434,4.817524487036785,4.822945573415884,4.82797072336265,4.832599563625538,4.836832000897019,4.840667939855841,4.844107375514247,4.847150697518924,4.849798183064049,4.852049670583909,4.85390517291869,4.855365480906571,4.8564317647868025,4.857104595916806,4.857384295465076,4.857271523141104,4.856767720501526,4.855874338485047,4.854592237549115,4.8529222103880425,4.850865594891761,4.8484244506290395,4.8456004683684055,4.842394868987422,4.838809004051951,4.834844941338376,4.830505198753248,4.825791710723183,4.820706122534992,4.815250410404472,4.809427363378754,4.803239820054949,4.796690007846434,4.789780062809306,4.782512653150656,4.774891201339378,4.766918790281821,4.758598003834766,4.749931535847686,4.740922727759475,4.73157499528622,4.721891739445711,4.711876628281563,4.701533420126969,4.690865781624016,4.679877303306934,4.668571821236726,4.656953395127763,4.645026205547539,4.632794454255993,4.620262334879984,4.60743419497714,4.594314509505321,4.580907855222003,4.567218902185136,4.553252405339431,4.539013212319102,4.52450628902524,4.509736712799924,4.494709657693189,4.479430380481111,4.463904230289434,4.448136679146492,4.432133317550362,4.415899843587439,4.39944205207756,4.382765829349833,4.365877195578475,4.348782320086397,4.331487489598422,4.313999094116106,4.296323673444683,4.278467862657898,4.2604384186256015,4.242242277980548,4.223886547297808,4.205378437530465,4.186725326253795,4.16793476240132,4.1490144128026065 +4.998583421333205,5.004553753079981,5.0101608681936165,5.015340363275406,5.020091803751708,5.024415080296105,5.028310032969688,5.031776645712905,5.034815267528638,5.037426135351597,5.039609032287844,5.041363917890414,5.042691550396217,5.0435930684857695,5.044069003946136,5.04411963832204,5.043745591651463,5.042948265728192,5.041729071734515,5.040088830353087,5.038028294480997,5.035548762202791,5.032652253272952,5.029340418500576,5.025614438636824,5.021475625065321,5.016926005396232,5.011968057142443,5.006603674371039,5.000834462080409,4.994662356003798,4.988090104073762,4.9811205034656885,4.973755741289295,4.965997913574465,4.9578496475397404,4.949314323089743,4.940394981872456,4.931094168516501,4.921414537676888,4.911359389637794,4.900932098023125,4.890136024640402,4.878974800624189,4.867452148059626,4.855571696449079,4.843337000189168,4.830751861346572,4.817820305297703,4.804546479410916,4.790934555163144,4.776988702742217,4.762713249006778,4.748112638345798,4.733191413768782,4.7179542177361,4.702405792285911,4.686550977132923,4.670394714238779,4.653942047537956,4.637198120138914,4.620168171478326,4.602857537679991,4.585271662515139,4.567416098098823,4.549296499310798,4.530918618059695,4.512288298785475,4.493411501486423,4.4742943121967285,4.4549429227216795,4.435363617806152,4.415562803133991,4.395546995928531,4.37532282751212,4.354897055083999,4.334276557281497,4.313468349293646,4.292479571014504,4.271317491186722,4.249989494379218 +5.174732160727835,5.180903622708599,5.186686952506652,5.192013327295526,5.19688225572222,5.201293613627981,5.205247181240746,5.2087429322482395,5.2117811780154195,5.214362118193631,5.2164854846529565,5.218151187598436,5.219359955068341,5.2201128965022185,5.220410507026522,5.220253031474429,5.219641053122496,5.218575936889505,5.217059057080765,5.215091197579823,5.212673074491417,5.209805949032212,5.206491803984637,5.202732253229696,5.198528440653562,5.193881640752359,5.188793844137117,5.1832674912303105,5.177304439144936,5.170906256009326,5.1640748406149175,5.15681290374636,5.149123205452326,5.141007896004062,5.132469034679701,5.123509211777622,5.114131770091559,5.1043397144610605,5.094135553004345,5.083521903928565,5.0725020309081845,5.061079270934013,5.049256949670364,5.037038662537341,5.024428096119548,5.011428844438714,4.998044426911214,4.984278610855913,4.970135386425396,4.955618865908516,4.940733186549184,4.925482486202744,4.909871059591844,4.893903315828597,4.87758376136939,4.860917003364526,4.843907751940126,4.826560815048263,4.808881098144,4.790873605589687,4.772543440478265,4.753895804146517,4.73493599325144,4.715669406910857,4.696101549065097,4.676238023124912,4.656084526385289,4.635646845245477,4.614930874491818,4.593942626282128,4.572688211945636,4.551173827518701,4.52940577606647,4.507390477754955,4.485134461568931,4.462644357655836,4.439926897009238,4.416988952924326,4.39383750601778,4.370479647599395,4.346922582201131 +5.339481524759377,5.34584139316859,5.351790208341798,5.357255081800982,5.3622354699565165,5.36673123500397,5.370742102123307,5.374268035569405,5.377309312076515,5.379866096992224,5.381938075048823,5.383525111049468,5.384627905243409,5.3852475401592175,5.385384477191901,5.385038927399424,5.38421144023438,5.382903346678769,5.381115987101975,5.378850111539112,5.376106402257682,5.372886086559349,5.36919111321438,5.365023062143984,5.360383043333125,5.355272297360355,5.349692780832249,5.3436469001170135,5.3371364783188575,5.330163049571892,5.32272847863865,5.314835442194979,5.306486666154499,5.297684266707697,5.28843026905287,5.2787272293375755,5.268578456062475,5.257986919825799,5.246955094648832,5.235485564616915,5.223581559086196,5.211246380621956,5.198483320650099,5.185295940484727,5.171687892611537,5.157662736834772,5.143223958214947,5.128375290029127,5.113120688816407,5.097464233459224,5.081410027770423,5.064962176245326,5.0481249407571696,5.0309026975160105,5.0132999202086985,4.995321183695546,4.976971166858452,4.958254647701644,4.939176500133477,4.919741695903721,4.899955306228443,4.879822503115831,4.859348556343044,4.838538834990242,4.817398809611171,4.795934049810164,4.774150221646384,4.75205308461798,4.729648501409315,4.7069424434069775,4.683940980276651,4.660650270701853,4.6370765746078595,4.613226265057946,4.589105820079605,4.564721812298384,4.540080909752902,4.515189911894358,4.490055718795846,4.464685333187558,4.4390858674142155 +5.492158307785596,5.498693017089647,5.504795746311915,5.510389895047848,5.515474872013892,5.5200505269465925,5.524116534763551,5.5276728511067725,5.530719721088023,5.53325727873154,5.535285165730218,5.536803205431894,5.537812072708054,5.538312825508803,5.538305894427567,5.537791459683049,5.5367700398418265,5.535242934890441,5.533211454205306,5.530676316919837,5.527638174413503,5.524098223022606,5.520058380460659,5.51552019564985,5.510484747633333,5.504953246035822,5.49892761643795,5.492410234158767,5.485402891249766,5.4779070907691345,5.469924666392095,5.46145826370599,5.452510577489472,5.443083692704282,5.433179603268816,5.422800834050593,5.411950662187994,5.4006320268007535,5.388847370438852,5.376599245641548,5.363890850054737,5.350725454413315,5.337106318234245,5.323036970804864,5.308521032454199,5.293562030674391,5.278163417728282,5.26232889433031,5.246062385285122,5.229367937903735,5.212249623860317,5.194711514410874,5.176757838691263,5.158392942020436,5.139621268124316,5.120447361717338,5.100875870746412,5.08091154381088,5.06055922617717,5.039823861373196,5.01871049354451,4.997224269684519,4.975370438444112,4.953154346689881,4.930581440415874,4.907657265976776,4.884387471275191,4.860777805217518,4.836834117232698,4.812562359104087,4.787968583498669,4.763058942448704,4.737839689660897,4.712317183445854,4.6864978850021775,4.660388355833715,4.63399525767217,4.607325365894267,4.580385557778737,4.553182813227877,4.525724218586125 +5.632147882407264,5.638843098575324,5.645087401745068,5.650800833261112,5.655982758688597,5.6606330164916905,5.664751236111604,5.668337365397552,5.6713916208480155,5.6739141081450635,5.675904430040694,5.677362372374366,5.678288587052865,5.6786841097836795,5.678549343287961,5.677884439877926,5.676689890170479,5.6749669661028666,5.672716949002252,5.669940530028103,5.666638332594917,5.662811525005713,5.658461996852757,5.653591268982554,5.648200392417273,5.642290548745015,5.635863635426439,5.628921999609383,5.621467405234172,5.613501327275154,5.605025571286511,5.596042754644355,5.586555543903205,5.5765659958686475,5.566076076311203,5.555088281883978,5.54360586140178,5.531631725710833,5.519168289184005,5.506218076168215,5.492784255993453,5.478870071027497,5.464478752515721,5.4496138015294155,5.434278810185851,5.418477277718649,5.402212628077988,5.385488533808874,5.368308891741964,5.350677721313683,5.332599066322213,5.314076970180983,5.295115634445218,5.275719376932074,5.255892613994376,5.235639863210812,5.214965745839268,5.193874984430531,5.172372397736033,5.150462902495787,5.128151516503394,5.1054433615700745,5.082343662540675,5.058857741363134,5.034991017782641,5.01074901201119,4.986137347387621,4.96116175073079,4.935828047341365,4.910142161194983,4.884110117234729,4.857738042692599,4.831032164793685,4.803998812482956,4.776644415924537,4.74897550463217,4.720998707429295,4.692720759465298,4.6641484956932135,4.63528885097291,4.606148863996769 +5.758894168935592,5.76573486264412,5.772107704114742,5.777929730072219,5.783200267472944,5.787919144695343,5.792085950490517,5.795700625735024,5.798763361323977,5.801274237579054,5.803232822410058,5.804638868095411,5.80549300599126,5.805796251899242,5.805548983599469,5.804751328433326,5.8034037520070845,5.801507501155503,5.799063832103167,5.796073410976466,5.792536836165089,5.788455250884063,5.783830519555429,5.77866413789896,5.772957131863447,5.766710657949287,5.7599265884497495,5.752607245297722,5.744754367281863,5.736369404255278,5.727454136616115,5.718011156500426,5.708043105218473,5.697552014389252,5.6865398246040195,5.675009007284987,5.6629627859424785,5.6504040461478215,5.637335177044723,5.623758677738558,5.609677692253717,5.595095437641842,5.580015119834963,5.564440214567206,5.548374288608423,5.531820815847809,5.514783194873928,5.497265072874151,5.479270321364179,5.4608029344758275,5.441866930676436,5.422466327990078,5.402605302631555,5.382288147241096,5.361519253088987,5.34030311267771,5.318644322143981,5.296547579060811,5.2740176773441005,5.2510595090059,5.227678067246343,5.203878449470203,5.179665856583161,5.15504558655946,5.130023034951941,5.104603697981041,5.078793175631327,5.052597172311297,5.026021490587818,4.999072030888613,4.971754794937675,4.9440758881632805,4.91604151617126,4.887657984638901,4.85893169999996,4.829869169214458,4.800476999413262,4.770761900214366,4.740730681137098,4.71039025152135,4.679747623152912 +5.871899650882129,5.878870170731086,5.88535789254314,5.891277202012279,5.896627392028618,5.9014082820714595,5.905619424988336,5.90926075550334,5.912332441918822,5.914834542178389,5.916766593448489,5.918128318394169,5.918920330236102,5.919143627208373,5.9187985650821995,5.917885249164413,5.916404122990708,5.914356411242103,5.911743347989244,5.908565577267525,5.90482367538311,5.900518763410146,5.895652683554479,5.890226909360123,5.884242444650615,5.877700423788162,5.870602696853463,5.8629515635232785,5.854748740393962,5.845995655158257,5.836694066021837,5.826846542850569,5.816455704686985,5.8055235609335805,5.794052029972612,5.782043560976666,5.769501355159245,5.756428275812771,5.742826689814091,5.728699073992947,5.714048550077015,5.6988783128397165,5.683191545879153,5.6669917025230845,5.65028232711818,5.633066871168897,5.615348710897684,5.597131471033088,5.578419000553712,5.559215271002003,5.539524278227507,5.51935001754955,5.498696642372087,5.477568422672067,5.455969727090842,5.433905025362922,5.4113788905268025,5.388395996834986,5.364961115342316,5.341079115498213,5.316754967833965,5.291993746592317,5.266800629254769,5.241180890991165,5.215139905065267,5.18868314558002,5.161816190230148,5.1345447208110695,5.106874518057094,5.078811461690542,5.050361533056727,5.021530817212745,4.992325500577575,4.962751869203481,4.932816310243209,4.902525313385527,4.871885470077385,4.840903473509267,4.809586118068446,4.777940299385429,4.74597301518261 +5.970725368461157,5.977809514184574,5.98439790929551,5.990402642007766,5.995822975687562,6.000658722101672,6.004909402980811,6.008574947716843,6.011655505030546,6.014151113470257,6.016061283558017,6.017385712293437,6.018124997177844,6.018280121217395,6.017851421107408,6.016838983057118,6.0152432314726925,6.013065371831919,6.0103066190020575,6.006967597870891,6.003048865603586,5.9985515240825045,5.993477396255239,5.9878279364429545,5.981604129290257,5.97480708996892,5.967438649304051,5.959501087678191,5.950996102447803,5.941925102092586,5.932289825576764,5.922092823455914,5.91133669546222,5.900023431735021,5.88815493140024,5.875733624335101,5.862762692408144,5.849244979587045,5.835182833445876,5.82057871150319,5.805435716143557,5.789757022804999,5.773545795720066,5.756805468802841,5.739539566974174,5.721751522336444,5.703444691720913,5.684622680408647,5.66528931788606,5.645448556172888,5.625104371577986,5.604260739827647,5.582921794674067,5.561091786533471,5.538775064510244,5.515976078723255,5.492699382406201,5.468949629891619,5.444731572589066,5.420050060483531,5.3949100445933444,5.369316579380838,5.34327482241967,5.316790029350598,5.289867554226159,5.262512852063155,5.234731481402531,5.206529104739731,5.177911484010845,5.148884480844357,5.119454058736355,5.089626284971746,5.059407328969885,5.028803459532442,4.997821046815745,4.96646656482266,4.934746590391061,4.902667801623074,4.870236978730736,4.8374610041542025,4.804346862252133 +6.054990892309186,6.062171987960312,6.068846373458553,6.074924193054724,6.08040468513346,6.0852876549355805,6.089572597861914,6.0932594387923755,6.096348309877871,6.098839233254681,6.100731696899468,6.102025376094573,6.102720855038943,6.102819103851601,6.102320443085623,6.101224942790037,6.0995330111832695,6.097245837491829,6.09436462033158,6.090889968386968,6.086822422625521,6.082163068688218,6.076913713224238,6.071075794286422,6.064650280288112,6.057638270159854,6.050041578430062,6.041862469151194,6.033102623394494,6.0237634333763115,6.013846621773028,6.003354722792798,5.992290319818323,5.9806553866826535,5.9684518062117915,5.955681991946158,5.942349109366966,5.928455986077347,5.914004953317839,5.898998452265974,5.883439568924632,5.867331462348183,5.850677280380932,5.833480440525525,5.8157444512847984,5.797472728353374,5.778668612160909,5.759335691565452,5.739477779614464,5.7190988118789905,5.698202748214973,5.6767935478849365,5.654875328163035,5.632452323009045,5.6095288650774995,5.5861093880205095,5.56219842856414,5.537800624517414,5.512920710837453,5.48756352110624,5.461733989939065,5.43543715534837,5.4086781584492245,5.381462238532358,5.353794733394812,5.325681081851452,5.297126826254068,5.268137612914368,5.238719187749656,5.208877396578275,5.178618187197131,5.147947611287812,5.11687182292952,5.085397075505846,5.053529723857979,5.021276227133847,4.98864314773974,4.955637149082716,4.9222649970634285,4.888533560204068,4.854449808888537 +6.12437432937036,6.131635296512254,6.138380586834161,6.1445187541124335,6.15004901629234,6.154971173275318,6.159284698919381,6.162989514412806,6.166085738348761,6.168573379435024,6.170451907201967,6.1717209791630205,6.172381168632415,6.172433435183821,6.171878086162276,6.170715178394318,6.168945106853015,6.166569047466736,6.163588185553098,6.160003116538194,6.155814368135927,6.1510230126973156,6.145630843534211,6.13963928538704,6.133049293387422,6.125861953175871,6.118079065944705,6.109702882382342,6.1007350702333945,6.091177008405885,6.081030406247355,6.070297784586935,6.058981713426627,6.047084153256774,6.034606973566703,6.02155257452835,6.007924108210933,5.993724388827344,5.978955734256843,5.963620572310161,5.947721975587778,5.931263089739585,5.914247049206951,5.896677258075057,5.878557211426009,5.859890311542145,5.840679885452209,5.820929508596896,5.800642980584674,5.779824223539854,5.758477183878317,5.736605807438433,5.714214198055715,5.691306576216279,5.667887261081511,5.643960672809095,5.619531334640987,5.594603870880807,5.569183002960372,5.543273550923112,5.516880435828042,5.490008682104975,5.4626634172173985,5.434849866826328,5.406573355151897,5.377839307413851,5.348653252276968,5.319020822233919,5.288947749467907,5.2584398662188825,5.227503106686222,5.196143508782262,5.164367212840795,5.132180458719142,5.0995895878200255,5.066601045784187,5.033221381514396,4.9994572452714,4.965315389924918,4.9308026710992,4.895926046415914 +6.178612328009123,6.185935758910088,6.192736539059795,6.198921985224274,6.204491299451588,6.209444277490338,6.2137803764429025,6.217499514626263,6.2206018000883185,6.22308723109218,6.224955262821059,6.226205538967971,6.226838624380668,6.226855470430053,6.226256374187772,6.2250413821959,6.223210879125031,6.220766030559514,6.217708011472859,6.214037406978448,6.209754734481191,6.204861055994505,6.199358154455357,6.193247444248874,6.186529870175512,6.1792065075380815,6.171279147154912,6.16275002931839,6.153620811406562,6.1438928619753375,6.13356788000394,6.122648375913487,6.111136909296632,6.099035430264826,6.086345797933446,6.073070402075102,6.059212384324936,6.044774548479689,6.029759202026285,6.0141687623789455,5.998006291712941,5.981274925251037,5.963977787010473,5.946118270642443,5.92769986079313,5.90872594931539,5.889199852818969,5.869125136313471,5.848505588957529,5.827345122419915,5.8056476726690445,5.783417175115578,5.7606577231561005,5.737373526794965,5.713568894689524,5.689248236496927,5.664416064981364,5.639076993951778,5.613235734302397,5.586897095509705,5.560065988047533,5.532747425748434,5.504946525412207,5.47666850202425,5.44791866915252,5.418702441329702,5.389025336437543,5.358892976049682,5.328311081466072,5.297285474141204,5.265822077435377,5.23392691821519,5.201606125715248,5.168865928942713,5.135712658509895,5.10215274905788,5.068192738359026,5.033839266017404,4.999099074240095,4.963979007999113,4.928486014426036 +6.217500063822873,6.224868294637783,6.23170889339763,6.237928293303056,6.243525685047016,6.248500861411807,6.252853267531437,6.256582819671946,6.25968961834871,6.262173654364204,6.264034372653648,6.265271407038249,6.2658853163179575,6.2658770460041096,6.265246885829964,6.2639948749913845,6.262121390799772,6.259627591450338,6.256514644527387,6.252783127777743,6.248433551242437,6.243466969550383,6.237885158227143,6.231689524260602,6.224881005071436,6.217460668577961,6.209430298187836,6.200792126766951,6.191547804288058,6.1816986919121355,6.171246481211858,6.160193675173457,6.148542825953705,6.136295876249147,6.123454677763456,6.110021612839763,6.095999815659309,6.08139208257658,6.066200713652054,6.050428118870161,6.034077352956372,6.017151543682386,5.999653807614234,5.981587530942709,5.962956190851202,5.943763171734369,5.924011782748467,5.903705581440313,5.882848349495924,5.861443991107162,5.839496434766802,5.817009608413468,5.7939875979649385,5.770434605937677,5.746354933495336,5.721752982798663,5.696633259114109,5.671000368744589,5.64485901507542,5.61821400007175,5.591070226689598,5.563432701232262,5.535306532950381,5.506696929291172,5.477609196302464,5.448048740993828,5.4180210736990615,5.387531808401047,5.356586658843724,5.325191438995556,5.29335206472898,5.261074555363347,5.2283650326149615,5.195229718046183,5.161674934863244,5.127707110305018,5.093332774744829,5.058558560590787,5.023391202887334,4.987837539488848,4.951904510476935 +6.240891223220927,6.248286407156036,6.255150970285742,6.261390815677118,6.267005127210075,6.271993695887126,6.276355959661244,6.2800918335678935,6.283201413602864,6.285684686092192,6.287541089822292,6.288770252691259,6.289372729870238,6.289349463354649,6.288700738474621,6.287426590018223,6.285527390881232,6.283004294825159,6.279858465000568,6.27609047473421,6.271700829648659,6.266690579941927,6.261061496692431,6.254814982449461,6.247951970205636,6.2404735234483715,6.232381421138624,6.223677891686031,6.21436458061987,6.204442844663867,6.193914370946445,6.182781657992497,6.171047253496819,6.1587130957065686,6.145781031877932,6.132253439895827,6.118133449468446,6.103423852484265,6.088126944547478,6.072245131184191,6.055781462649196,6.038739062242489,6.0211210420588275,6.002930783812694,5.98417176020979,5.964847351169471,5.944960861375031,5.924515843895784,5.903516075935861,5.881965457203365,5.859867911707917,5.83722736290717,5.814047892235844,5.790333697722747,5.766089076041438,5.7413184248625875,5.716026244964905,5.690217138163243,5.663895803347811,5.637067037983548,5.60973574052503,5.581906912776504,5.5535856594893795,5.524777183604798,5.4954867866590265,5.465719871146651,5.435481942884969,5.404778611334214,5.373615585711001,5.341998675460344,5.309933791924567,5.277426949874713,5.244484266473148,5.211111958747057,5.1773163453706585,5.143103849028511,5.108480995508423,5.073454412726503,5.038030831208947,5.002217084268895,4.966020107469362 +6.248698025168768,6.256102205668344,6.2629747691189985,6.269221441877786,6.274841405553606,6.279834450555718,6.284200012443411,6.2879380058419585,6.291048525240611,6.293531555473251,6.295386533276694,6.296613084574661,6.297211763328674,6.297183510362156,6.296528609537508,6.295247094173529,6.293339335694104,6.290806486382809,6.2876497079122595,6.283869572135829,6.279466583203254,6.274441789835547,6.268796961628697,6.262533499652424,6.255652335423298,6.248154530951733,6.240041863716409,6.231316560641503,6.221980265775066,6.2120343343616575,6.201480452048202,6.190321115872413,6.178558872041654,6.166195657319771,6.153233317480231,6.139674228921662,6.125521519860936,6.1107779806975415,6.09544590555008,6.079527698458347,6.063026408186453,6.045945156543108,6.0282870541324,6.01005548117691,5.991253908890022,5.9718857156991945,5.951954204795997,5.9314629277576065,5.910415660296269,5.888816300628312,5.866668771271497,5.843976994191983,5.820745049334383,5.796977133237676,5.772677541086308,5.747850669063332,5.722501016463074,5.696633183620805,5.6702518679412055,5.6433618653996085,5.615968072963715,5.588075490960799,5.559689222678239,5.530814469579129,5.501456531704539,5.471620810052562,5.441312808958915,5.410538136423044,5.379302500175123,5.347611708136548,5.315471670121076,5.28288839938945,5.249868011572831,5.216416722140739,5.182540848190233,5.14824681080734,5.1135411341483845,5.0784304444766315,5.042921470618004,5.00702104413442,4.970736098819935 +6.240891223220926,6.248286407156036,6.255150970285742,6.261390815677118,6.267005127210074,6.271993695887126,6.276355959661244,6.2800918335678935,6.283201413602863,6.285684686092192,6.287541089822292,6.288770252691259,6.289372729870238,6.28934946335465,6.288700738474621,6.287426590018224,6.285527390881232,6.283004294825159,6.279858465000569,6.27609047473421,6.27170082964866,6.266690579941928,6.261061496692431,6.254814982449462,6.247951970205636,6.2404735234483715,6.232381421138624,6.223677891686031,6.21436458061987,6.204442844663866,6.193914370946445,6.182781657992497,6.171047253496818,6.158713095706568,6.145781031877931,6.132253439895827,6.118133449468446,6.103423852484264,6.0881269445474775,6.072245131184191,6.055781462649196,6.038739062242488,6.021121042058827,6.002930783812693,5.984171760209789,5.96484735116947,5.94496086137503,5.924515843895783,5.903516075935861,5.881965457203364,5.859867911707916,5.837227362907169,5.814047892235843,5.790333697722746,5.766089076041437,5.741318424862587,5.716026244964904,5.690217138163243,5.66389580334781,5.637067037983547,5.60973574052503,5.581906912776503,5.5535856594893795,5.524777183604797,5.495486786659026,5.465719871146651,5.435481942884969,5.404778611334214,5.373615585711,5.341998675460344,5.309933791924566,5.277426949874712,5.244484266473148,5.211111958747056,5.1773163453706585,5.14310384902851,5.1084809955084225,5.073454412726502,5.038030831208947,5.002217084268894,4.966020107469362 +6.217500063822873,6.224868294637783,6.231708893397629,6.237928293303055,6.2435256850470155,6.248500861411806,6.252853267531437,6.256582819671945,6.25968961834871,6.262173654364205,6.264034372653648,6.26527140703825,6.2658853163179575,6.2658770460041096,6.265246885829965,6.263994874991385,6.262121390799773,6.259627591450338,6.256514644527387,6.252783127777744,6.248433551242437,6.243466969550384,6.237885158227144,6.231689524260602,6.2248810050714365,6.21746066857796,6.209430298187836,6.200792126766951,6.191547804288058,6.181698691912135,6.171246481211857,6.160193675173456,6.1485428259537045,6.136295876249147,6.123454677763455,6.110021612839762,6.095999815659308,6.0813920825765795,6.066200713652052,6.05042811887016,6.03407735295637,6.017151543682385,5.999653807614233,5.981587530942707,5.9629561908512,5.943763171734367,5.924011782748465,5.903705581440311,5.882848349495922,5.86144399110716,5.839496434766801,5.817009608413467,5.793987597964938,5.770434605937676,5.7463549334953345,5.721752982798662,5.696633259114107,5.671000368744587,5.6448590150754185,5.618214000071749,5.591070226689597,5.563432701232261,5.535306532950379,5.506696929291172,5.477609196302463,5.448048740993827,5.4180210736990615,5.387531808401046,5.356586658843723,5.325191438995554,5.293352064728978,5.261074555363345,5.228365032614961,5.195229718046181,5.161674934863243,5.127707110305017,5.093332774744828,5.058558560590785,5.023391202887333,4.987837539488846,4.951904510476934 +6.178612328009122,6.185935758910087,6.192736539059795,6.198921985224274,6.204491299451588,6.209444277490338,6.213780376442902,6.217499514626263,6.2206018000883185,6.2230872310921805,6.224955262821059,6.226205538967971,6.226838624380669,6.226855470430054,6.226256374187773,6.225041382195901,6.223210879125032,6.220766030559515,6.217708011472859,6.2140374069784485,6.209754734481191,6.204861055994506,6.199358154455358,6.1932474442488745,6.186529870175513,6.1792065075380815,6.171279147154912,6.1627500293183894,6.153620811406561,6.143892861975336,6.133567880003939,6.122648375913486,6.111136909296631,6.099035430264825,6.086345797933445,6.073070402075101,6.059212384324935,6.044774548479688,6.029759202026283,6.014168762378943,5.998006291712939,5.981274925251034,5.963977787010471,5.946118270642441,5.927699860793127,5.9087259493153885,5.889199852818967,5.869125136313469,5.8485055889575275,5.8273451224199135,5.805647672669043,5.783417175115576,5.760657723156099,5.737373526794963,5.713568894689522,5.689248236496925,5.664416064981363,5.639076993951776,5.613235734302395,5.586897095509703,5.560065988047532,5.532747425748433,5.504946525412205,5.476668502024249,5.447918669152518,5.418702441329701,5.389025336437542,5.358892976049681,5.328311081466071,5.297285474141202,5.2658220774353754,5.233926918215189,5.201606125715246,5.1688659289427115,5.135712658509894,5.102152749057878,5.068192738359024,5.033839266017402,4.999099074240093,4.963979007999111,4.928486014426034 +6.12437432937036,6.131635296512253,6.138380586834161,6.1445187541124335,6.15004901629234,6.154971173275318,6.159284698919381,6.162989514412806,6.166085738348761,6.168573379435025,6.170451907201967,6.171720979163021,6.172381168632416,6.172433435183822,6.171878086162276,6.170715178394319,6.168945106853016,6.166569047466736,6.163588185553098,6.160003116538194,6.155814368135927,6.1510230126973156,6.145630843534211,6.13963928538704,6.133049293387422,6.125861953175871,6.118079065944704,6.109702882382341,6.100735070233393,6.091177008405884,6.081030406247353,6.070297784586933,6.058981713426626,6.047084153256773,6.034606973566701,6.021552574528348,6.007924108210932,5.993724388827343,5.978955734256841,5.963620572310158,5.947721975587776,5.931263089739583,5.914247049206948,5.896677258075055,5.878557211426006,5.859890311542142,5.840679885452207,5.820929508596894,5.800642980584672,5.779824223539852,5.758477183878315,5.736605807438432,5.714214198055713,5.691306576216277,5.66788726108151,5.643960672809093,5.619531334640985,5.594603870880805,5.5691830029603695,5.54327355092311,5.51688043582804,5.490008682104972,5.462663417217396,5.434849866826326,5.406573355151894,5.377839307413849,5.348653252276966,5.3190208222339175,5.288947749467904,5.258439866218881,5.227503106686219,5.19614350878226,5.164367212840793,5.132180458719139,5.099589587820024,5.066601045784184,5.0332213815143945,4.999457245271398,4.965315389924915,4.9308026710991975,4.895926046415911 +6.054990892309187,6.062171987960313,6.068846373458554,6.074924193054725,6.080404685133461,6.085287654935582,6.089572597861915,6.093259438792377,6.096348309877873,6.098839233254683,6.1007316968994685,6.102025376094574,6.102720855038945,6.102819103851602,6.102320443085624,6.101224942790038,6.0995330111832695,6.097245837491829,6.09436462033158,6.090889968386968,6.086822422625521,6.082163068688218,6.076913713224238,6.071075794286421,6.064650280288112,6.057638270159854,6.050041578430062,6.0418624691511935,6.033102623394493,6.023763433376311,6.013846621773027,6.003354722792797,5.992290319818322,5.980655386682653,5.968451806211791,5.955681991946157,5.942349109366965,5.9284559860773465,5.914004953317837,5.898998452265973,5.88343956892463,5.867331462348183,5.850677280380931,5.833480440525523,5.815744451284798,5.797472728353373,5.778668612160909,5.759335691565451,5.739477779614462,5.719098811878989,5.698202748214971,5.676793547884936,5.654875328163033,5.632452323009044,5.609528865077498,5.586109388020509,5.562198428564138,5.537800624517412,5.512920710837451,5.487563521106237,5.461733989939063,5.435437155348367,5.408678158449223,5.381462238532356,5.35379473339481,5.3256810818514495,5.2971268262540665,5.268137612914367,5.238719187749654,5.208877396578273,5.178618187197128,5.147947611287809,5.116871822929517,5.085397075505845,5.053529723857976,5.0212762271338445,4.9886431477397375,4.955637149082714,4.922264997063426,4.888533560204065,4.854449808888535 +5.97072536846116,5.9778095141845755,5.984397909295513,5.990402642007767,5.995822975687564,6.000658722101675,6.004909402980813,6.008574947716845,6.011655505030548,6.01415111347026,6.016061283558019,6.017385712293438,6.018124997177845,6.018280121217397,6.01785142110741,6.01683898305712,6.0152432314726925,6.01306537183192,6.010306619002058,6.006967597870891,6.003048865603587,5.998551524082504,5.993477396255239,5.987827936442954,5.981604129290257,5.97480708996892,5.967438649304051,5.959501087678191,5.950996102447803,5.941925102092586,5.932289825576764,5.922092823455914,5.91133669546222,5.900023431735021,5.88815493140024,5.875733624335101,5.862762692408144,5.849244979587044,5.835182833445876,5.82057871150319,5.805435716143556,5.7897570228049995,5.773545795720066,5.756805468802841,5.739539566974174,5.721751522336444,5.703444691720914,5.684622680408647,5.665289317886059,5.645448556172887,5.625104371577986,5.604260739827647,5.582921794674066,5.56109178653347,5.538775064510243,5.515976078723253,5.492699382406199,5.468949629891618,5.444731572589064,5.420050060483529,5.394910044593343,5.369316579380836,5.343274822419668,5.316790029350596,5.289867554226157,5.262512852063153,5.234731481402529,5.2065291047397295,5.177911484010843,5.148884480844355,5.119454058736353,5.089626284971744,5.059407328969883,5.0288034595324405,4.997821046815743,4.9664665648226585,4.934746590391058,4.90266780162307,4.870236978730733,4.8374610041542,4.804346862252129 +5.87189965088213,5.878870170731089,5.8853578925431425,5.89127720201228,5.896627392028621,5.901408282071461,5.905619424988338,5.909260755503341,5.912332441918824,5.914834542178391,5.916766593448491,5.918128318394172,5.918920330236105,5.919143627208375,5.9187985650822,5.9178852491644145,5.916404122990708,5.914356411242104,5.911743347989244,5.908565577267525,5.90482367538311,5.900518763410145,5.895652683554478,5.8902269093601225,5.884242444650615,5.877700423788161,5.870602696853463,5.8629515635232785,5.854748740393961,5.845995655158257,5.836694066021837,5.826846542850568,5.816455704686984,5.8055235609335805,5.794052029972611,5.782043560976666,5.769501355159244,5.7564282758127705,5.742826689814091,5.728699073992946,5.714048550077014,5.6988783128397165,5.683191545879152,5.666991702523084,5.6502823271181795,5.6330668711688965,5.615348710897684,5.597131471033088,5.578419000553711,5.559215271002002,5.539524278227506,5.519350017549549,5.498696642372085,5.477568422672066,5.4559697270908405,5.43390502536292,5.411378890526801,5.388395996834984,5.3649611153423145,5.341079115498211,5.316754967833964,5.291993746592314,5.266800629254767,5.241180890991163,5.215139905065264,5.188683145580017,5.161816190230146,5.134544720811068,5.106874518057092,5.078811461690539,5.0503615330567255,5.021530817212743,4.992325500577573,4.962751869203478,4.932816310243206,4.902525313385524,4.871885470077381,4.840903473509264,4.8095861180684425,4.777940299385426,4.7459730151826065 +5.758894168935594,5.7657348626441225,5.7721077041147435,5.777929730072222,5.783200267472947,5.787919144695346,5.792085950490519,5.795700625735026,5.79876336132398,5.801274237579056,5.8032328224100596,5.804638868095413,5.805493005991263,5.805796251899244,5.805548983599471,5.804751328433328,5.803403752007085,5.801507501155504,5.799063832103168,5.796073410976467,5.792536836165091,5.788455250884064,5.78383051955543,5.77866413789896,5.772957131863448,5.766710657949287,5.75992658844975,5.752607245297722,5.744754367281864,5.736369404255278,5.727454136616115,5.718011156500426,5.708043105218474,5.697552014389252,5.6865398246040195,5.675009007284987,5.6629627859424785,5.650404046147821,5.637335177044723,5.623758677738557,5.609677692253716,5.595095437641841,5.580015119834962,5.564440214567205,5.548374288608422,5.531820815847807,5.514783194873926,5.49726507287415,5.479270321364178,5.460802934475826,5.441866930676434,5.422466327990076,5.402605302631552,5.382288147241095,5.361519253088984,5.3403031126777085,5.318644322143979,5.296547579060809,5.274017677344098,5.251059509005898,5.227678067246341,5.203878449470201,5.179665856583158,5.155045586559458,5.13002303495194,5.104603697981038,5.078793175631326,5.052597172311295,5.026021490587816,4.999072030888611,4.971754794937672,4.944075888163278,4.916041516171258,4.887657984638898,4.858931699999958,4.829869169214455,4.800476999413259,4.770761900214363,4.740730681137094,4.710390251521346,4.67974762315291 +5.632147882407265,5.638843098575325,5.6450874017450685,5.650800833261113,5.655982758688598,5.660633016491691,5.664751236111606,5.668337365397553,5.671391620848016,5.673914108145064,5.675904430040695,5.6773623723743665,5.678288587052866,5.67868410978368,5.678549343287962,5.677884439877927,5.67668989017048,5.674966966102867,5.672716949002253,5.6699405300281045,5.666638332594919,5.662811525005714,5.658461996852758,5.653591268982554,5.648200392417275,5.642290548745015,5.63586363542644,5.628921999609383,5.621467405234173,5.613501327275154,5.605025571286511,5.596042754644355,5.586555543903205,5.5765659958686475,5.5660760763112025,5.555088281883977,5.543605861401779,5.531631725710832,5.519168289184004,5.506218076168214,5.492784255993451,5.478870071027495,5.464478752515718,5.449613801529413,5.434278810185848,5.4184772777186465,5.402212628077985,5.3854885338088705,5.36830889174196,5.350677721313679,5.332599066322209,5.31407697018098,5.2951156344452155,5.275719376932071,5.255892613994372,5.235639863210809,5.214965745839265,5.193874984430528,5.172372397736031,5.150462902495784,5.128151516503391,5.105443361570072,5.082343662540673,5.0588577413631315,5.034991017782638,5.010749012011187,4.986137347387619,4.9611617507307875,4.935828047341363,4.91014216119498,4.884110117234726,4.857738042692596,4.831032164793681,4.803998812482952,4.776644415924533,4.748975504632167,4.720998707429292,4.692720759465295,4.664148495693209,4.635288850972906,4.606148863996766 +5.492158307785597,5.498693017089648,5.504795746311916,5.510389895047849,5.515474872013893,5.520050526946593,5.524116534763552,5.527672851106773,5.5307197210880235,5.533257278731541,5.535285165730219,5.5368032054318945,5.537812072708055,5.538312825508804,5.538305894427569,5.53779145968305,5.536770039841828,5.535242934890443,5.533211454205308,5.530676316919839,5.5276381744135055,5.524098223022609,5.520058380460662,5.515520195649851,5.510484747633336,5.504953246035824,5.4989276164379515,5.4924102341587675,5.485402891249768,5.477907090769135,5.469924666392096,5.461458263705991,5.452510577489473,5.443083692704282,5.433179603268816,5.422800834050593,5.411950662187994,5.400632026800753,5.388847370438851,5.376599245641546,5.363890850054735,5.350725454413312,5.337106318234242,5.3230369708048615,5.308521032454196,5.293562030674387,5.278163417728279,5.262328894330307,5.246062385285119,5.2293679379037314,5.2122496238603135,5.19471151441087,5.176757838691261,5.158392942020433,5.139621268124312,5.120447361717336,5.1008758707464095,5.080911543810878,5.060559226177168,5.039823861373194,5.018710493544508,4.9972242696845175,4.97537043844411,4.953154346689879,4.930581440415872,4.907657265976774,4.884387471275189,4.860777805217516,4.836834117232695,4.812562359104083,4.787968583498666,4.7630589424487,4.737839689660894,4.71231718344585,4.686497885002174,4.660388355833711,4.633995257672167,4.607325365894264,4.580385557778732,4.553182813227872,4.525724218586121 +5.339481524759378,5.345841393168591,5.351790208341799,5.357255081800983,5.362235469956517,5.366731235003971,5.370742102123308,5.374268035569406,5.377309312076516,5.379866096992225,5.381938075048824,5.383525111049469,5.38462790524341,5.385247540159218,5.385384477191903,5.385038927399425,5.384211440234383,5.382903346678771,5.381115987101977,5.378850111539115,5.376106402257685,5.372886086559352,5.3691911132143835,5.365023062143986,5.360383043333128,5.355272297360357,5.3496927808322505,5.343646900117015,5.33713647831886,5.330163049571894,5.322728478638652,5.314835442194981,5.3064866661545,5.297684266707698,5.288430269052871,5.278727229337576,5.2685784560624755,5.257986919825799,5.246955094648831,5.235485564616914,5.2235815590861945,5.211246380621954,5.198483320650096,5.185295940484726,5.171687892611534,5.157662736834769,5.143223958214945,5.128375290029124,5.113120688816404,5.097464233459222,5.081410027770421,5.064962176245324,5.048124940757168,5.030902697516009,5.013299920208697,4.995321183695544,4.97697116685845,4.958254647701642,4.939176500133475,4.919741695903719,4.899955306228441,4.879822503115829,4.8593485563430425,4.83853883499024,4.817398809611169,4.795934049810162,4.774150221646382,4.7520530846179785,4.729648501409312,4.706942443406974,4.683940980276647,4.66065027070185,4.637076574607855,4.613226265057943,4.5891058200796015,4.56472181229838,4.5400809097528985,4.515189911894353,4.490055718795841,4.4646853331875525,4.43908586741421 +5.174732160727834,5.180903622708599,5.186686952506651,5.192013327295525,5.196882255722219,5.201293613627981,5.205247181240746,5.208742932248239,5.211781178015419,5.214362118193631,5.2164854846529565,5.218151187598435,5.219359955068341,5.2201128965022185,5.220410507026522,5.220253031474429,5.219641053122497,5.218575936889506,5.217059057080766,5.215091197579824,5.212673074491418,5.209805949032214,5.206491803984639,5.202732253229697,5.198528440653563,5.19388164075236,5.188793844137118,5.183267491230311,5.177304439144937,5.170906256009327,5.164074840614918,5.156812903746361,5.149123205452327,5.141007896004062,5.132469034679701,5.123509211777622,5.114131770091559,5.10433971446106,5.094135553004344,5.083521903928564,5.072502030908182,5.0610792709340116,5.0492569496703625,5.037038662537339,5.0244280961195456,5.011428844438711,4.998044426911211,4.9842786108559105,4.970135386425394,4.955618865908513,4.940733186549181,4.925482486202742,4.9098710595918424,4.893903315828595,4.877583761369388,4.860917003364524,4.843907751940123,4.826560815048261,4.808881098143997,4.790873605589685,4.7725434404782625,4.753895804146515,4.734935993251437,4.715669406910854,4.696101549065094,4.676238023124909,4.6560845263852855,4.635646845245473,4.614930874491814,4.593942626282123,4.572688211945631,4.551173827518696,4.529405776066465,4.507390477754949,4.485134461568926,4.46264435765583,4.439926897009234,4.41698895292432,4.393837506017774,4.370479647599389,4.346922582201125 +4.998583421333204,5.004553753079981,5.010160868193616,5.0153403632754054,5.020091803751707,5.024415080296104,5.028310032969687,5.031776645712904,5.0348152675286375,5.037426135351596,5.039609032287844,5.041363917890413,5.042691550396216,5.043593068485769,5.044069003946136,5.04411963832204,5.043745591651463,5.042948265728193,5.041729071734515,5.040088830353087,5.0380282944809975,5.035548762202791,5.032652253272952,5.029340418500576,5.025614438636825,5.021475625065321,5.016926005396232,5.011968057142443,5.00660367437104,5.00083446208041,4.994662356003799,4.988090104073763,4.9811205034656885,4.973755741289295,4.965997913574465,4.9578496475397404,4.949314323089743,4.940394981872455,4.9310941685165,4.921414537676887,4.911359389637793,4.900932098023124,4.890136024640401,4.878974800624187,4.867452148059625,4.855571696449078,4.843337000189166,4.830751861346571,4.817820305297701,4.804546479410915,4.790934555163143,4.776988702742216,4.762713249006777,4.7481126383457966,4.733191413768781,4.717954217736098,4.7024057922859095,4.686550977132921,4.6703947142387765,4.653942047537952,4.63719812013891,4.620168171478324,4.602857537679988,4.585271662515136,4.56741609809882,4.5492964993107945,4.53091861805969,4.512288298785471,4.493411501486419,4.474294312196724,4.454942922721675,4.435363617806147,4.4155628031339855,4.395546995928526,4.3753228275121145,4.354897055083993,4.334276557281491,4.313468349293639,4.292479571014497,4.271317491186715,4.249989494379211 +4.811767094367433,4.817524487036783,4.8229455734158835,4.8279707233626485,4.8325995636255366,4.836832000897018,4.840667939855839,4.844107375514246,4.847150697518923,4.849798183064047,4.852049670583908,4.853905172918688,4.85536548090657,4.856431764786802,4.857104595916805,4.857384295465076,4.857271523141103,4.856767720501525,4.855874338485046,4.854592237549114,4.8529222103880425,4.8508655948917605,4.848424450629039,4.845600468368405,4.842394868987421,4.83880900405195,4.834844941338375,4.830505198753248,4.825791710723183,4.820706122534992,4.815250410404472,4.809427363378754,4.803239820054949,4.796690007846433,4.789780062809306,4.782512653150655,4.774891201339377,4.76691879028182,4.7585980038347655,4.7499315358476855,4.740922727759474,4.731574995286219,4.72189173944571,4.711876628281562,4.701533420126968,4.690865781624015,4.679877303306934,4.668571821236725,4.656953395127762,4.645026205547538,4.6327944542559925,4.620262334879984,4.607434194977139,4.594314509505319,4.580907855222002,4.567218902185134,4.553252405339429,4.5390132123191,4.524506289025236,4.509736712799921,4.494709657693185,4.479430380481107,4.46390423028943,4.448136679146487,4.432133317550359,4.415899843587434,4.399442052077555,4.382765829349829,4.365877195578471,4.348782320086393,4.331487489598417,4.313999094116101,4.296323673444677,4.278467862657892,4.260438418625595,4.242242277980542,4.223886547297802,4.205378437530459,4.186725326253788,4.1679347624013126,4.1490144128025985 +4.615073553147262,4.620607186099383,4.625833418207076,4.630697746468614,4.635199863497272,4.639339693191069,4.643117209630553,4.646532419719543,4.649585756753555,4.652277541164436,4.654607670813166,4.65657621577302,4.658184001890591,4.6594322332912315,4.660321523831306,4.66085223723965,4.661025075834661,4.6608415238773935,4.6603030750075956,4.659410632389722,4.658165031444522,4.656567652807718,4.654620598828365,4.652325603162623,4.649683929683292,4.646696973003498,4.643366843981324,4.63969610383126,4.635686730134282,4.631340411184878,4.626659166401518,4.621645828668325,4.616303280585644,4.610633792467981,4.6046395429683145,4.598323243715056,4.5916883617890285,4.584738023493319,4.577474854500107,4.569901590355975,4.5620216155081765,4.5538383891712755,4.545355353676409,4.536576216625672,4.527504775251491,4.518144735440818,4.508499725324194,4.498573617624606,4.488370510727839,4.477894623482699,4.467150192802592,4.456141440408323,4.444872741196404,4.433348608759897,4.421573662999948,4.409552612485786,4.397290238438538,4.384791414419299,4.3720611469597985,4.359104563046169,4.345926887440951,4.332533421750047,4.318929565430415,4.3051208522217355,4.291112939080794,4.276911598457347,4.262522710996238,4.247952264619485,4.233206392779309,4.218291386936202,4.203213672734997,4.187979810657651,4.172596539614775,4.157070662799951,4.141409109348682,4.125619050427581,4.109707881865738,4.093683040242595,4.077552170079765,4.061323126738987,4.0450038773822 +4.409351768049738,4.414651882098308,4.419675496165401,4.424373588329072,4.428745921610073,4.432792438298741,4.436513186661153,4.439908186313281,4.442977917203623,4.445722745982269,4.448142634042227,4.4502377126432915,4.452008845052895,4.4534572716213905,4.454583651652739,4.455388394397643,4.455872247749685,4.456036741677121,4.45588341553715,4.455413218073822,4.454627030264166,4.453526278422602,4.452113110770691,4.450389306643146,4.44835617531406,4.446015156800134,4.4433684076341775,4.440418534747447,4.437167560970572,4.433617219568999,4.429769574966168,4.42562750512544,4.421193937248788,4.4164711862785895,4.411461475406372,4.406167560363275,4.4005929510890915,4.394740817034349,4.3886138284821,4.382214765315709,4.375547054203625,4.368614195109711,4.361419673391155,4.353967241466378,4.346260741383898,4.33830392204714,4.330100452509801,4.3216542503113935,4.312969463966749,4.304050364841257,4.294901241693295,4.285526367351831,4.2759301742125,4.266117238284262,4.256092246340695,4.245859978667455,4.235425295785705,4.224793165348218,4.213968685073116,4.202957072704576,4.191763655330487,4.180393861105889,4.1688532476891185,4.157147497127453,4.145282399834996,4.133263873047211,4.121097980382404,4.108790946410358,4.096349124308781,4.083778985749831,4.071087155706392,4.058280463112361,4.045365921466254,4.0323505621222555,4.019241534736065,4.006046268059582,3.992772443470734,3.9794276873157783,3.966019839564926,3.9525569498918784,3.9390471557374016 +4.195509315890846,4.20056728655889,4.205381653836824,4.209909230876403,4.214149855646923,4.218103490014658,4.221770261207977,4.225150202371352,4.228243843084043,4.231051599188464,4.233573499747604,4.235809741190622,4.237761226590985,4.2394292347941915,4.240814473669493,4.241917401282548,4.242738814343544,4.243280291679536,4.24354342178148,4.24352920240255,4.2432385635635095,4.242672980967544,4.241834653130796,4.240725408880841,4.2393466053820035,4.237699730614972,4.235786990875007,4.233611043724874,4.231173959646622,4.228477517689428,4.225523828715234,4.222315818744992,4.218856460630876,4.215148113896549,4.211193045596389,4.206994054020734,4.202554687161442,4.197878152673403,4.19296716398248,4.1878245427369025,4.182453749240374,4.176858311184633,4.17104174880572,4.1650078547446725,4.158760510352025,4.15230349667203,4.1456405078124225,4.13877549703202,4.131712661757427,4.124456327745697,4.1170108356953685,4.109380509110877,4.101569847183438,4.093583499161135,4.085426233074057,4.077102923524627,4.068618550136575,4.05997823959648,4.051187222516625,4.042250834248531,4.033174546930322,4.023964004540644,4.014625057985159,4.005163639467528,3.995585740959431,3.985897500182473,3.976105289072355,3.9662157653270222,3.956235650692701,3.9461716618293967,3.9360306968634307,3.925820005505507,3.915547002538653,3.90521903469355,3.8948435417397334,3.8844282617608497,3.8739811784834077,3.8635100906185857,3.853022958290791,3.842527885428261,3.8320330151794946 +3.9745123897680292,3.979320800555537,3.98392050058044,3.9882744921164592,3.992382692621927,3.9962450847217044,3.9998618793805534,4.003233124085947,4.006359401017506,4.009241178179055,4.011878556444375,4.014271801395984,4.016421858285576,4.018330046697215,4.019997126206358,4.021423607044033,4.022610338020069,4.023558949970466,4.0242710839824625,4.024747790372997,4.024990051852837,4.024999397372705,4.02477808036433,4.024327983200556,4.023650513613273,4.022747210334252,4.021620333891683,4.020272598011354,4.0187061236468224,4.0169227367183025,4.0149245964127065,4.012714680707124,4.010296010149687,4.007670989351305,4.004841929024859,4.001811669341652,3.998583793077701,3.995161542355918,3.9915476728478865,3.9877450460412045,3.9837571486896897,3.979587524836659,3.9752397226430727,3.9707175713957663,3.9660249872511533,3.961165773718116,3.9561436356519937,3.9509625538300472,3.9456267731292254,3.9401406748354404,3.9345086511462863,3.9287350757656716,3.9228245225721885,3.916781721243112,3.9106115302685662,3.904318935189961,3.897909068125004,3.8913872705169563,3.8847589350088683,3.878029525906651,3.8712046846018486,3.8642903382479474,3.857292737009952,3.850218132433532,3.8430727517727923,3.8358629889233242,3.8285955997595753,3.8212778099810705,3.8139167908260565,3.80651949500524,3.799093083266646,3.791645276227675,3.784183898083095,3.7767165865569714,3.769251022178056,3.7617951550190534,3.754357112713566,3.7469446824446884,3.7395656806374484,3.732227914330363,3.724939149753112 +3.747385811131839,3.751938526798326,3.7563194206699255,3.7604980382469213,3.764474381022253,3.7682484535730927,3.771820555395981,3.775190749149541,3.7783596726727557,3.7813278491137163,3.7840954551661468,3.786662829427267,3.7890309617822586,3.7912012150287855,3.7931744029184538,3.794951089908275,3.7965321791083695,3.7979193558979962,3.799114315836038,3.800118163160346,3.8009319343259973,3.8015572123294255,3.8019963048814422,3.8022511479074197,3.8023232020775435,3.8022140588735858,3.801926031583658,3.8014618856163755,3.800823794429896,3.8000136372804874,3.7990336257605946,3.797886787091839,3.796576190551424,3.795104295565216,3.7934734694940135,3.7916866050186946,3.7897473313360233,3.7876589445884745,3.785424264291203,3.783046216970073,3.7805283468537785,3.777874251519416,3.775087550372449,3.772172159635587,3.7691320879802794,3.765971227355952,3.7626933776729987,3.7593026311800335,3.7558033474541785,3.7522000327000664,3.7484972228430777,3.7446994773144446,3.7408115786472997,3.7368384133636767,3.732784983953195,3.728656462098342,3.724458259936943,3.720196041035552,3.7158754304839725,3.7115020750126577,3.7070818247324846,3.702620914844235,3.6981259625140774,3.6936034856731967,3.6890598969811075,3.684501757435123,3.6799360344452317,3.6753702377044175,3.6708116818276197,3.666267253373359,3.66174399468149,3.657249539966029,3.6527914412570306,3.648377050776645,3.6440136742620512,3.639708608849574,3.635469088036495,3.631301863702694,3.6272134712307484,3.623210165620845,3.6192980378388677 +3.5152130320067343,3.519505271847032,3.5236645754880778,3.5276673858127654,3.531513792894936,3.535203824459548,3.5387378733329995,3.542116018173307,3.5453389555535826,3.5484072667412665,3.5513212082115984,3.5540811953892724,3.5566882652449507,3.559143826277766,3.5614487486782047,3.563603652832733,3.565609498030386,3.567468026440374,3.5691809894309783,3.570749546091797,3.5721747872319027,3.5734583499533032,3.574602594057131,3.575609508293675,3.5764806094848094,3.5772175446729393,3.577822677569614,3.578298819230875,3.5786481990681986,3.5788727594251335,3.5789747711401483,3.5789573093704314,3.578823497223702,3.57857586391044,3.578216852540691,3.577749426588639,3.5771772860581468,3.5765038137892087,3.575731923078687,3.5748646408143308,3.5739056185413496,3.572858569241299,3.571727246466441,3.57051572027021,3.569228168106061,3.567868661800236,3.566441214210063,3.5649501385253086,3.563399989677735,3.5617954778959167,3.5601413899189915,3.5584426283538195,3.5567043277586623,3.5549316040957883,3.5531296440955864,3.551303860542537,3.5494600416847697,3.547604216120039,3.5457422585350766,3.5438800046405534,3.5420234821976853,3.5401791342620017,3.538353721234019,3.5365538039959,3.5347857883239535,3.533056146047784,3.531371637705588,3.529739420185346,3.528166319830948,3.526658593972986,3.5252224824508875,3.523864538725735,3.522590894533273,3.5214076338384452,3.5203206579986626,3.5193352515582386,3.5184561770142335,3.517687619471652,3.517033180789908,3.5164957897049627,3.5160780402326868 +3.279136136466175,3.2831645475835316,3.2871009049931157,3.290928903223818,3.2946487255998065,3.298260424519774,3.3017644915736475,3.3051610230361117,3.308450779335206,3.3116344050339226,3.3147122418061317,3.317684787864132,3.3205531346708157,3.3233187481459057,3.3259825598695434,3.328545246087036,3.33100782631229,3.3333721150756976,3.3356399309508973,3.3378124803124325,3.339890894791148,3.3418768616129295,3.343772795109782,3.34558072601192,3.347302210308715,3.348938930289971,3.350493274366731,3.3519680483078966,3.353365506659265,3.3546876424035963,3.355936752778652,3.357115870780215,3.358228077151124,3.3592759457989123,3.360261988148419,3.3611891778535172,3.362061143594868,3.3628812757995004,3.3636525930599688,3.364378234513068,3.3650618869678146,3.365707292149054,3.3663183215607257,3.3668992186147157,3.367454366999323,3.3679880586938458,3.3685046102573497,3.369008664422105,3.3695050284605803,3.3699986855822104,3.370494825870469,3.3709990300295325,3.3715171476224106,3.3720546246938454,3.3726168360278783,3.3732095645817237,3.373839400045321,3.374513161379056,3.37523720091255,3.3760176872113785,3.3768609798905453,3.377773975170897,3.3787637551684537,3.3798369853510546,3.3810001023495877,3.3822594454426884,3.3836213809311193,3.3850923282389873,3.3866781728082476,3.3883840733068866,3.3902148352784067,3.392174914658493,3.3942676716887665,3.3964969657756683,3.3988663014650604,3.4013772631069865,3.404030058409561,3.40682421095576,3.409758120035774,3.4128289399126204,3.4160332798464785 +3.0403558807569473,3.04411861139178,3.0478321679523463,3.0514878508157373,3.0550859410089233,3.058626516553103,3.06211017212194,3.0655370218545404,3.0689078915576813,3.072223490460909,3.075484248447015,3.0786907488697035,3.0818441364745617,3.0849459301788587,3.0879971238555646,3.090998453748306,3.09395100085456,3.0968566449505763,3.09971726707869,3.1025341309336807,3.1053084235326085,3.108041888657884,3.1107369941308263,3.113395825199051,3.1160200003909044,3.1186112633427507,3.121172051596441,3.1237052078589342,3.126213049984024,3.1286976528485093,3.131161385811411,3.1336073265374043,3.1360386169858687,3.138457926249161,3.1408678739536686,3.1432715342349926,3.1456726440618983,3.148074730693188,3.150480951374715,3.152894598293173,3.155319542045315,3.1577597413167213,3.1602192886857967,3.1627026563699525,3.1652144824845756,3.167759358449832,3.1703419818826024,3.1729673415528166,3.175640488284852,3.178366636091139,3.181151290282373,3.1840005037709846,3.1869205390447553,3.1899170085887856,3.1929953337501984,3.196161379054716,3.19942193711199,3.202783834161491,3.20625331837808,3.2098363935568752,3.2135391064852237,3.2173677887257157,3.221328504456896,3.225426864240049,3.2296683244100874,3.2340579886364713,3.238600391826243,3.2432993389703606,3.248158052512583,3.253179168611128,3.2583644963923737,3.263714382307017,3.269227434618524,3.2749033136501926,3.280741096739872,3.2867366383536862,3.2928835304456046,3.2991747380157244,3.3056015535315075,3.312153462424245,3.31881915612367 +2.800131667247713,2.803628440059961,2.807120915789595,2.810608354872548,2.814091140428484,2.8175693768880583,2.8210437660149186,2.824514439690899,2.827982289587907,2.8314480899739127,2.834912361489512,2.838375774201633,2.8418395225975983,2.845305169691931,2.8487737723614517,2.8522461337284057,2.8557233981905608,2.8592074984324616,2.8627003709761882,2.866203354808382,2.8697177182873133,2.8732452752115814,2.87678855158135,2.880349712270696,2.883930475570339,2.8875326889322994,2.891158886653767,2.894812031446834,2.898494566774037,2.9022086909190303,2.9059569155171596,2.9097425142602935,2.9135688607582377,2.9174387858836606,2.921355060232779,2.92532098751485,2.9293406841429044,2.9334179882123133,2.9375562113953424,2.9417588194738604,2.9460300487055817,2.950374307191753,2.9547959936304387,2.9592998054157764,2.963890605195475,2.968573286006188,2.973352889039999,2.9782346160800226,2.983223620763894,2.988325149675845,2.993544707235085,2.998888231333161,3.004361659743351,3.0099703611419075,3.015719470167772,3.021614323686732,3.0276607172367456,3.033864031360295,3.0402293299737373,3.0467615237957855,3.0534652085872978,3.0603444556967174,3.0674022119299935,3.0746412339942286,3.0820644074275028,3.0896739244913043,3.097470434296623,3.1054525847803687,3.113618638198636,3.121966817063589,3.1304939451455405,3.139193877622717,3.1480583133924602,3.1570803579009206,3.166252255962682,3.175562107477518,3.184995051539285,3.1945358785225912,3.2041672681807283,3.213869670415069,3.2236224722342053 +2.559781547742295,2.5630137334451937,2.5662885005199216,2.5696134244825304,2.5729890038863457,2.5764153702716937,2.579893349171019,2.5834230863070875,2.587005546146394,2.5906415671511054,2.5943317568782387,2.598076864673215,2.6018781170032192,2.6057370730949243,2.6096548235202217,2.613632223127842,2.6176704400182493,2.621771347669926,2.625936816421549,2.6301682575868863,2.634467045868412,2.6388349966869935,2.643274487662063,2.647787668003212,2.652376409474334,2.6570427099811993,2.6617890999560245,2.6666185580162387,2.6715336682543684,2.6765368105359166,2.681630701600027,2.6868188981570076,2.6921051798100755,2.6974926155802335,2.702984188447122,2.7085836278733173,2.714295930265277,2.7201256036434875,2.7260761014998476,2.7321510867389813,2.738355624821865,2.7446952169421346,2.7511748886166507,2.7577997084655825,2.7645749046105106,2.7715059691764967,2.778598621554838,2.785858402523328,2.7932906205492114,2.8009005165033725,2.8086934324360264,2.8166746491905625,2.824848945292463,2.8332212065185876,2.841796203264065,2.8505781951908333,2.859570365650002,2.868774496084895,2.878193123191773,2.8878291082330843,2.897684232818645,2.907757775043553,2.918046186781618,2.9285467023123877,2.9392574803831795,2.9501755033176074,2.9612944375142085,2.9726035614653843,2.9840925609353075,2.995752707069819,3.007573077891685,3.0195372504147775,3.0316267539955333,3.0438244766448244,3.0561120494689904,3.068466857020519,3.0808629528799347,3.093274948864103,3.10567502115693,3.1180328497938725,3.1303167051812646 +2.320682264581978,2.3236529545223377,2.326715101380448,2.3298849499509564,2.333163117359178,2.3365497633315515,2.340045839794187,2.3436515074911886,2.3473678047733695,2.3511956370356772,2.355135704545098,2.359188841794262,2.363356310412735,2.367639670825615,2.372040059427661,2.3765583991595602,2.381195898907235,2.385954392602436,2.390835714424348,2.3958413760474295,2.40097288901125,2.4062321124155255,2.411621348940427,2.4171427947958133,2.4227985018373106,2.428590653309058,2.4345218677783333,2.4405952808616256,2.446813675072047,2.453179615416945,2.4596960651606787,2.4663670113964042,2.4731967610391368,2.480188617690613,2.4873457342748777,2.494672284302602,2.5021742145749677,2.5098566632791615,2.5177231069838695,2.525777258222346,2.534024878366951,2.542472403779364,2.551125197935632,2.5599882937158873,2.5690667988760336,2.5783662746597837,2.587892438163859,2.597650397059663,2.6076448987047374,2.61788038056257,2.6283609875105607,2.6390898485811363,2.6500689407933886,2.661301512889677,2.6727909419827336,2.6845390176043544,2.696544160982253,2.7088022465398227,2.721311368036889,2.7340705472964424,2.7470767972487473,2.7603222159857927,2.773794197405876,2.787482137529233,2.8013772502499203,2.8154690982970347,2.8297420817676096,2.8441737905259723,2.8587431974315822,2.873432641799282,2.8882216056760495,2.903081822029139,2.9179835666375076,2.932897895254838,2.947794247391923,2.9626387038700543,2.9773942600856156,2.9920254510606963,3.00649442938213,3.02076100313271,3.0347833070668826 +2.08426923384589,2.0869833132661286,2.0898397173329823,2.0928637134522945,2.096056029262447,2.099416854488511,2.1029472551019075,2.106647414456886,2.110518441358951,2.1145613170218014,2.1187768524557797,2.1231659901572133,2.1277300581805325,2.1324706907511275,2.1373891313877786,2.1424864217348834,2.14776389455642,2.153223523800749,2.1588673170448587,2.164696941991027,2.1707140715276614,2.1769207713980965,2.18331966223227,2.189913218103336,2.19670364717887,2.2036933169667714,2.2108852209602214,2.2182830276952643,2.2258897889801066,2.233708170433016,2.2417413570497833,2.2499938959986934,2.2584705817300073,2.267174811315033,2.27610971142575,2.2852796304014045,2.294690876215521,2.304348601207275,2.314256030723622,2.324416542870696,2.3348356426774077,2.3455194609307366,2.3564726114363133,2.367698979474215,2.379202281841733,2.390986586384681,2.403055690701955,2.4154124258040617,2.4280593350823576,2.4409983536109046,2.4542304029818314,2.4677539851418784,2.4815658699669014,2.495665487406412,2.5100526822833427,2.5247244961852626,2.539672133444565,2.5548834718498625,2.5703498300173977,2.586063816833856,2.6020152871948814,2.6181873870847525,2.6345574057337937,2.6511054488599988,2.6678138735453705,2.684663112016197,2.701627464518704,2.7186733601203543,2.7357691999390212,2.7528874781108894,2.7699977243444467,2.7870613732758023,2.8040393165904844,2.820892074564161,2.8375784254917433,2.8540556715045002,2.8702791310529903,2.8862060009875545,2.9017917934515247,2.916990499391918,2.9317544515434673 +1.852036528011045,1.8545007468100874,1.8571601170062249,1.8600492726867466,1.863168962990435,1.8665193949352077,1.8701016001148085,1.8739157852075121,1.877962982172934,1.8822441543383945,1.886760192413981,1.891512108246857,1.8965012060155817,1.9017291660317153,1.9071973661172628,1.9129070104983643,1.9188596396815463,1.9250575997510175,1.9315034071055162,1.9381989093203114,1.945145903378918,1.9523469313123578,1.959805763752597,1.9675256319344085,1.9755087518334955,1.983757598689736,1.9922762205295244,2.00106981229178,2.0101418939550397,2.0194950130501246,2.0291325758805447,2.039060264281499,2.0492836160114143,2.059805947133836,2.070630030368362,2.0817600993800904,2.0932021412919126,2.1049604794697108,2.1170377964356133,2.1294366605083135,2.14216088826531,2.1552143207665075,2.168599182997956,2.182316642147041,2.196367185590534,2.210750946873253,2.2254666663563913,2.240512027083308,2.2558849818270095,2.2715825400308307,2.287599395275823,2.3039256652303366,2.320549417655555,2.3374630539308434,2.3546597362003734,2.37212850509773,2.3898500981035737,2.407802035321226,2.425965769093252,2.444323808918883,2.462855650874521,2.4815338540530116,2.5003255661348818,2.5192005383673477,2.538130177047232,2.5570840778084674,2.5760265681911516,2.59491591284336,2.613711948363634,2.6323768448819336,2.6508707454538154,2.6691491908808427,2.6871683077973647,2.704881904927629,2.722242356937338,2.7392036526327717,2.7557201711922734,2.7717469500371053,2.787239083859574,2.802152042710201,2.8164401387197224 +1.625536898634125,1.6277599471729256,1.630232927702111,1.6330001835511516,1.6360623836472565,1.6394197369372694,1.6430730703242957,1.6470226122796232,1.651269146244791,1.65581350430537,1.6606566131136193,1.665799499491583,1.6712433273938991,1.6769897686069184,1.6830403384894344,1.6893964228313798,1.6960598276307108,1.7030334797254179,1.7103207121859796,1.7179235217055797,1.7258437268443452,1.7340845619426264,1.7426517365530745,1.7515496450636903,1.7607802633151308,1.7703459896968101,1.7802524982981456,1.7905073467950923,1.8011145743119117,1.812076259654785,1.8233958584606333,1.8350804815413047,1.8471363371629805,1.8595662871175707,1.8723722462048884,1.8855577354881956,1.899127191281113,1.913082832567329,1.9274263170558577,1.9421587094198847,1.9572801991886126,1.9727897484609769,1.9886851183464718,2.004962817166155,2.021617889652834,2.0386436494114677,2.0560301881366057,2.073766801303083,2.091844112989892,2.1102514690950076,2.128974080786331,2.147989873316126,2.167274807091059,2.1868108344896404,2.206580993428272,2.226563017031089,2.2467244086519513,2.267030729671087,2.2874508392123984,2.307953641663957,2.3285054687175233,2.3490676526331953,2.369598622794146,2.3900578307681033,2.410404498672952,2.430596640251802,2.4505901584155856,2.4703401941423073,2.4898017974975115,2.5089276039393327,2.5276703674598453,2.5459861738128025,2.5638328765172522,2.58116340211049,2.597930073671492,2.6140909035267104,2.629607894878088,2.6444406567914784,2.658551656683457,2.671906703213879,2.684471508091469 +1.4063818058080395,1.4083744001180516,1.4106737886254188,1.4133344026158368,1.416357041790279,1.4197419601443386,1.4234901385211574,1.4276018580921495,1.4320780525984906,1.4369197207647149,1.4421279784316898,1.4477040630850315,1.453649388096252,1.4599660216038943,1.4666557554930877,1.4737201591096452,1.4811613499157075,1.4889829338902913,1.4971889279907158,1.5057814218141592,1.514762177947815,1.5241348029971264,1.533905968555287,1.5440804294976513,1.5546597938000235,1.5656460751458905,1.577045163392035,1.5888646486079363,1.6011079776791743,1.6137763885344956,1.626872328345669,1.6404014253532224,1.6543675798894015,1.668771995521713,1.68361491587005,1.6988970127918057,1.7146172673291749,1.7307730384914972,1.7473635653221389,1.7643869192383752,1.7818367759878986,1.7997041446180613,1.817979926413367,1.8366536562717402,1.8557127203183368,1.8751415409254197,1.8949200412925937,1.9150276253913656,1.9354455397045653,1.956153501256722,1.97712605042755,1.9983293494459888,2.019728037557101,2.041292593534884,2.062994388727274,2.0847994978605056,2.1066644093237437,2.128545498202403,2.150400527127768,2.172185934299234,2.1938566525035634,2.2153661199506467,2.2366684931618765,2.257716653269873,2.2784609315802653,2.2988513500385497,2.31883999602244,2.3383844256052106,2.3574401175002087,2.3759553847233117,2.3938810193318356,2.4111792258839384,2.427814928127499,2.4437462770096694,2.458931879034741,2.4733393633143455,2.4869434804835318,2.499713887047191,2.5116255752061227,2.522659433054588,2.532797498702223 +1.1962413596768555,1.1980163068386782,1.2001570314769452,1.2027284419572781,1.2057317751469843,1.2091673869004738,1.21303692617295,1.2173407562095762,1.2220805113115756,1.2272577479407831,1.2328739509308309,1.2389307967535315,1.245430422763479,1.25237571544976,1.2597688198487116,1.2676113945536784,1.27590577696587,1.2846560647601752,1.2938663965221284,1.30353875219126,1.3136746544508706,1.3242773268518684,1.335352343657045,1.3469032593087367,1.358931105021774,1.371437004468638,1.3844245362039713,1.3978973347945434,1.4118564604399306,1.4263017977113042,1.4412331949289536,1.4566503091735794,1.4725503722066655,1.4889312528783996,1.5057904761028356,1.5231230054346792,1.5409173881483216,1.5591628049215471,1.5778542236588486,1.5969848651415597,1.6165387946398355,1.6364960942983378,1.6568385184309267,1.6775464088583694,1.6985975918075584,1.7199663039128166,1.7416222264803822,1.7635346500040427,1.7856741693540295,1.8080097467142868,1.830505577343567,1.85311909741865,1.8758069884208761,1.8985289735420834,1.9212447428441186,1.9439104037477701,1.96647664065219,1.9888960465617214,2.0111194344821772,2.0330948325119365,2.0547703854288026,2.0760973896000365,2.09703198150374,2.117526424262572,2.137528194593236,2.156985527521899,2.1758531213183834,2.1940968032575467,2.2116785467761053,2.228550024137025,2.2446673791406377,2.260003983578436,2.2745364414967413,2.2882342991792375,2.301068659393407,2.313021206509602,2.3240824265692606,2.334236507201595,2.343474340039233,2.3517940189086,2.3591959828322704 +0.9968442993850288,0.9984093648691816,1.0003922529526046,1.0028671808358478,1.0058371121692051,1.009302780147182,1.013268744130551,1.0177356792533838,1.0227084875962011,1.0281912641854418,1.0341869500185923,1.0406990560234224,1.047733140712001,1.0552958737559137,1.0633911614874378,1.0720213876848457,1.0811901395824899,1.0909036580728302,1.101166794420958,1.1119823419097932,1.1233524746577412,1.1352799271603735,1.147766190443581,1.1608115886837966,1.1744178117089836,1.1885856217372857,1.2033118942437158,1.2185882315343626,1.2344107230189596,1.2507785817796533,1.2676870193689886,1.2851202994059934,1.303059816844883,1.3214978679305887,1.340428651419003,1.3598361075388496,1.3796874678669582,1.3999570904949223,1.420632830230077,1.4417001593899519,1.4631273308522925,1.4848775261723344,1.5069190162525432,1.5292186243426742,1.5517409774620856,1.5744479425138702,1.5972997120800905,1.620255623521878,1.643272964522358,1.6663074576501262,1.6893138785814936,1.7122482277434017,1.7350669666318892,1.7577204808996778,1.7801563299502057,1.8023239527997945,1.824179333817984,1.84568219884966,1.8667847548624128,1.887434955039071,1.9075833469982588,1.927189873179721,1.9462236994679478,1.9646472868454559,1.9824166357884128,1.9994896834940425,2.015834804142335,2.0314343973356093,2.046266203527483,2.060299567141539,2.0735089532017996,2.0858848373381713,2.0974206270330886,2.1081059506860593,2.1179329512736293,2.1269018936238866,2.1350199390298004,2.1422910999145452,2.1487246185314817,2.1543351018171686,2.159140382231858 +0.8099781102479716,0.8113554611822106,0.8132109631567027,0.8156295753284533,0.8186153789440169,0.8221693188478201,0.826297747059814,0.8310014422631614,0.8362871512402016,0.842160279262683,0.8486244111288324,0.8556838427486272,0.8633456562389534,0.8716178154249455,0.8805043781553631,0.8900072981750118,0.9001297026620094,0.910876963487329,0.9222517212356126,0.9342557797396468,0.9468903529516005,0.9601551884242583,0.9740441574829626,0.9885516152584493,1.0036779635021615,1.0194215745860837,1.0357697438067799,1.052699082626127,1.070198432350727,1.0882643101551057,1.106885228407475,1.1260280026777554,1.1456566864162916,1.165756211229819,1.1863156634086718,1.2073067995733415,1.2286758685730836,1.2503824540901025,1.2724059287590261,1.294722989567206,1.3172871269568835,1.3400466056216878,1.3629575705581658,1.385974748099102,1.4090514601490045,1.4321399653616875,1.4551946473107296,1.4781684633876346,1.501008716838601,1.5236614807592053,1.546076503290468,1.5682137182270768,1.5900347013249518,1.6114857395871784,1.6325076570542305,1.6530489742642411,1.6730770244601332,1.6925641225716679,1.7114701091993503,1.7297498020304793,1.7473628597045683,1.7642834041362843,1.7804978017096826,1.7959839447849044,1.8107127471097415,1.8246580020917766,1.8378062568950857,1.8501583663707726,1.8617106817984683,1.8724553075917036,1.8823891552802963,1.8915203097328794,1.8998590006352827,1.9074161546947543,1.9142058162427626,1.9202459066625397,1.9255577792407768,1.930164092779577,1.9340900668880767,1.9373631881198135,1.9400145635545694 +0.6374888876374863,0.638705988734242,0.6404734575917082,0.6428890244551706,0.6459564313035577,0.6496764730192756,0.6540546833790045,0.6590915445452735,0.6647924602714261,0.6711615026700001,0.6782012525846329,0.6859146679814268,0.6943064405296399,0.703380957616204,0.7131395183251912,0.7235817364963086,0.7347074058859392,0.7465160311729179,0.7590036664243834,0.7721681923785325,0.7860072207889339,0.8005140790651046,0.8156714579255945,0.8314648838902242,0.8478902264086987,0.8649404450093628,0.8825913282122881,0.9008042087932723,0.9195587935190332,0.9388458457277393,0.9586453635754292,0.9789084298260842,0.9995841750928841,1.0206488646551375,1.0420844086666436,1.0638514947612414,1.0858813971810601,1.108122487457891,1.1305452683505732,1.1531178151672272,1.1757837149865076,1.1984824705478523,1.221162515991208,1.2437709718101422,1.2662547209802484,1.2885618892403692,1.3106464997139693,1.3324606734296776,1.353948212927054,1.375052323209751,1.3957240656162984,1.4159320372463136,1.435647342181313,1.454819491299488,1.4733910179847078,1.491316648698243,1.5085787850622299,1.525165146821622,1.541048517272771,1.5561969832661349,1.570584885333123,1.5842033753607019,1.5970565378621921,1.6091400085357088,1.620443461074636,1.6309599359920628,1.6406950916000183,1.649666335423221,1.6578877818190259,1.6653745866391996,1.6721455157261658,1.6782233322140025,1.6836318378675328,1.6884000819410825,1.6925603257933728,1.6961437142976883,1.699180698282841,1.7017074750568366,1.7037596583032406,1.705371605951402,1.7065810749161772 +0.48128135608065803,0.48236709783738796,0.48408585638535906,0.48654987533155447,0.4897605167184531,0.4937179336678449,0.4984232158110193,0.5038760309910748,0.5100760765136293,0.5170225218581489,0.5247147639368235,0.5331516187954182,0.5423301856323481,0.5522449332932947,0.5628907383001058,0.574262572553766,0.5863534036347453,0.5991508304955067,0.6126393297013825,0.6268093585990937,0.6416517655715688,0.657149759183427,0.6732719832751395,0.6899927398402005,0.7072994599775863,0.725176275402853,0.7435869298694013,0.7624800323332737,0.7818250887872045,0.8016036398827336,0.821786022401869,0.8423129126062088,0.8631245845495612,0.8841881692417088,0.9054765317423124,0.9269426261514935,0.9485135720475026,0.9701331495646961,0.9917637842644277,1.0133663752006485,1.0348826092925831,1.0562526728682906,1.0774241739721506,1.0983435934981634,1.1189585590728528,1.1392204944821283,1.159089527448631,1.1785236908189405,1.1974720048011283,1.2158838028013805,1.2337188501786367,1.2509578749817434,1.2675844307829145,1.2835595916152527,1.2988378358655488,1.3133875576989813,1.3272071505064988,1.3402995844860275,1.3526542225091926,1.3642573700119072,1.3751016929879314,1.3851951834562113,1.3945565160683087,1.4031981767563924,1.411129372552107,1.4183625550075827,1.424919789865314,1.4308296979291537,1.4361197970295083,1.4408238054533302,1.444977177228297,1.4486114617089325,1.4517580288266492,1.4544569980830522,1.456751207618339,1.4586777649481815,1.4602692753768238,1.4615672173354342,1.4626096414506324,1.463430277103035,1.4640653741926815 +0.34331885526800204,0.3442977127890947,0.34599365639140167,0.3485335846137484,0.3519143380749086,0.3561349236043337,0.3611882140673699,0.3670725544787913,0.37377745302373366,0.3812935487877518,0.38961487227582725,0.39873329711214267,0.4086331162966869,0.41929263516116544,0.430696809857925,0.4428338668576023,0.4556867266048231,0.4692255432951431,0.4834189183959162,0.4982467332415833,0.5136902243223405,0.5297195268440185,0.5462879690427596,0.5633574541909112,0.5809034483855922,0.5988983947467246,0.6172952288394133,0.6360349816200715,0.6550771974081794,0.6743912989170626,0.6939379874234965,0.7136556275345626,0.7334843445322651,0.753382874296745,0.7733140341023204,0.7932281731127894,0.8130618735174436,0.8327628765250381,0.8522876161192313,0.8715925264400537,0.890627120105582,0.9093432656376474,0.9276959105603702,0.9456391374888322,0.9631295688135881,0.9801299942521247,0.9966129997253318,1.0125492954650366,1.0279024641517571,1.04263750432762,1.0567299801546204,1.0701744236090582,1.0829678368301838,1.0950906378701317,1.1065192927777072,1.1172422472445944,1.1272719503734494,1.1366236315627107,1.1453045431496374,1.1533217746056401,1.1606873561017919,1.16742296967543,1.173555759739834,1.1791111584580456,1.184115457976473,1.1885974178124183,1.1925896804555225,1.1961243062432818,1.1992351502534864,1.2019660923593671,1.2043605153950128,1.2064509540977175,1.2082686540175527,1.209855160134284,1.2112536799427287,1.2124984799077119,1.2136167181255706,1.2146452349006183,1.2156155865038127,1.2165531612890392,1.217484412405602 +0.22562332606158914,0.22650754850181887,0.22817528329030454,0.2307648799435733,0.2342671174517803,0.23867950825913448,0.24398404753214442,0.25017743827537986,0.25723592152907176,0.265139166485879,0.2738744085434851,0.283424758527693,0.2937583018119003,0.304833259603169,0.31662262789523354,0.3291068016954945,0.3422569434254506,0.3560228571634644,0.37035544481183763,0.38522247120652103,0.40059417736073666,0.4164267643612764,0.43265966040133536,0.4492433492642278,0.46613963152341725,0.48330824478861045,0.5006946613978291,0.5182393956064741,0.5358941154345767,0.5536151504561688,0.5713553551943596,0.5890601197319587,0.6066790334260931,0.6241647865154156,0.6414708830931931,0.658551187827441,0.6753649944704792,0.6918726915340373,0.7080281260966385,0.7237870816651789,0.7391161670955414,0.7539888901689081,0.7683751943159473,0.7822444743745116,0.7955698171235778,0.8083322038332807,0.8205214172411079,0.8321260194123481,0.8431318366357845,0.8535272359383967,0.8633085853624938,0.8724836574900569,0.8810616943598003,0.8890475970300423,0.8964469594208037,0.9032717910350627,0.9095441412415024,0.915286266479491,0.920521723417377,0.9252776300432042,0.9295832494464444,0.9334683608574483,0.9369607643169013,0.94009259515314,0.9429017945610488,0.9454274436676992,0.9477049168630984,0.9497611621813002,0.9516280298705495,0.953348969976874,0.9549647593346247,0.9565008376897445,0.9579805602287504,0.9594367128973437,0.960902285855641,0.9624002464729101,0.9639456627557474,0.9655613511009209,0.9672644311566765,0.9690657004139269,0.9709752171072382 +0.1302752965031863,0.13105512713271866,0.1326356436899898,0.13515792139363075,0.13860665989766013,0.1429779064973207,0.14824287794598362,0.15439673843840165,0.16140277986464194,0.16923007560264466,0.17785937384489534,0.1872654511393343,0.1974009678451981,0.20820535061814108,0.2196407180715292,0.23168061230486148,0.24428582227575596,0.2573882587211635,0.27092402358691225,0.2848504514473651,0.29912834197855753,0.3137028845845423,0.3285049778397007,0.34347792403925725,0.358571836081865,0.3737362062266403,0.38891477859645496,0.40405542832657604,0.4191064755287755,0.43401380987065,0.4487266652955278,0.46320582676677874,0.47741839882342313,0.49131541909677157,0.5048446209039876,0.5179693217852647,0.5306800908669264,0.5429563147999147,0.5547539171454093,0.56603264790813,0.5767820929325923,0.587002863154566,0.596684654161487,0.6058169409776731,0.6143935207229246,0.6224167099111102,0.6298947326140519,0.6368355562361282,0.6432502961643646,0.6491534855025506,0.6545629703452419,0.6594966133637068,0.6639723379624798,0.6680194335644225,0.6716734247747627,0.6749683538331105,0.6779296389866323,0.6805801494128636,0.6829547846856934,0.6850956695645671,0.6870435461427368,0.6888292103649404,0.6904735540273542,0.6920079830043551,0.6934742795118496,0.6949137841472739,0.6963564548799593,0.6978171075623785,0.6993180259891273,0.7008920223580443,0.7025676122360084,0.7043569867004392,0.7062699364078354,0.7083225853970965,0.7105297276420725,0.7128974110714554,0.7154249743596677,0.7181152607153216,0.7209665266439099,0.7239722628393798,0.727123475765954 +0.059413867822393926,0.06004579471334232,0.061399677226560125,0.06360246291240347,0.06663541720795811,0.0704938014337368,0.07514295209005473,0.08057730621789319,0.08675337941036161,0.0936348409678003,0.10119920576024899,0.10941715437212288,0.11823380828008183,0.1275798437574746,0.13741274445336674,0.14770351804514537,0.1584089925016824,0.16945432689545323,0.1807711690521818,0.1923133009631807,0.20403836611557163,0.21588927718343845,0.22779918281687456,0.23971297957023804,0.25157700663800914,0.26333842489247383,0.2749476048254873,0.2863669533981733,0.29754906677299486,0.30843774669294477,0.3189860293447667,0.32917460249815916,0.3389913780920393,0.3483926408596673,0.3573279735289408,0.3657743522774642,0.3737520322939824,0.3812602734764932,0.3882628511855039,0.39472846469859735,0.40066847684748524,0.4061072637877941,0.41105313724394144,0.41551446031431016,0.4195037233953423,0.42304103985854824,0.42614857081795193,0.42884917043860615,0.4311739991913654,0.43315787697034447,0.4348337824542287,0.4362236705372069,0.4373482390007599,0.43825273731949344,0.43899292576145493,0.43961575480510856,0.44014315108201346,0.44059252160668955,0.4410013766456416,0.44141660623986617,0.44188071467079354,0.44241833902127553,0.4430387386461263,0.44376559497340995,0.4446353492412979,0.44568269486337997,0.44692603016904403,0.44836551475165154,0.4500097666193112,0.45187456212604366,0.45397087194917957,0.4562961947279679,0.4588458895369851,0.4616173873140614,0.4646057011147839,0.46780027239019073,0.47118654713711927,0.47474849332041325,0.478467683761444,0.4823235876512297,0.4862921191514587 +0.015236700444775887,0.015629737779308973,0.016505908665018738,0.01795001376934837,0.019946551701977424,0.02249165124808122,0.025556894470017282,0.029137850458229832,0.03319921252831307,0.03771163814732053,0.04265704465665909,0.048011917805517196,0.05373229705466432,0.05976311006218723,0.06607101715849402,0.07263365357008474,0.07941760623294503,0.08636502619752151,0.09342409259131987,0.10055941879130086,0.10773862203516439,0.11491925493319649,0.12205454341628362,0.12910574440442701,0.13603193674135247,0.14279363793432492,0.14935899637559222,0.15570946852517004,0.16181335447936246,0.16762885103400735,0.173124260006202,0.17829724091916832,0.1831511189432459,0.18765732535867224,0.19178045973010424,0.19551170889050407,0.1988826525118693,0.20190401159568497,0.2045540673121609,0.20681524230905796,0.20870794819210392,0.21026302088359902,0.2114967667964161,0.21242566155733492,0.21306897325276766,0.2134508310207639,0.21359444961691176,0.21352410684507403,0.21327313900175215,0.21287762673595637,0.21236917993281396,0.21176483107703076,0.21107994182676465,0.2103553513915033,0.20964197690631653,0.20898003086195743,0.20838200889764386,0.2078561046908611,0.20742962847379134,0.20713781255545777,0.2070110158801638,0.20706227604760802,0.20729057025460068,0.20770740296987256,0.20833469858190728,0.20919240719229137,0.21028613583584965,0.21160697720268976,0.21315255384565343,0.21492329761332915,0.21691605156584734,0.21912024822991577,0.22152402050843722,0.22411430541774366,0.22687572994614266,0.2297910745032134,0.23284210463716895,0.2360063766575454,0.23926121888688895,0.24258422892642037,0.24594990472006842 +0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0. diff --git a/projects/pic/pic_test_cases.py b/projects/pic/pic_test_cases.py new file mode 100644 index 0000000..ae9b55b --- /dev/null +++ b/projects/pic/pic_test_cases.py @@ -0,0 +1,327 @@ +import sys +import os + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../..'))) + +import pickle +from typing import Tuple, List +import numpy as np + +from epde.interface.prepared_tokens import CustomTokens, PhasedSine1DTokens, ConstantToken +from epde.interface.equation_translator import translate_equation +from epde.interface.interface import EpdeSearch + +from epde.operators.common.coeff_calculation import LinRegBasedCoeffsEquation +from epde.operators.common.sparsity import LASSOSparsity + +from epde.operators.utils.operator_mappers import map_operator_between_levels +import epde.operators.common.fitness as fitness +from epde.operators.utils.template import CompoundOperator + +# Introduce noise levels, test with complex setups +# np.random.seed(0) + +def load_pretrained_PINN(ann_filename): + try: + with open(ann_filename, 'rb') as data_input_file: + data_nn = pickle.load(data_input_file) + except FileNotFoundError: + print('No model located, proceeding with ann approx. retraining.') + data_nn = None + return data_nn + + +def compare_equations(correct_symbolic: str, eq_incorrect_symbolic: str, + search_obj: EpdeSearch, all_vars: List[str] = ['u',]) -> bool: + metaparams = {('sparsity', var): {'optimizable': False, 'value': 1E-6} for var in all_vars} + + correct_eq = translate_equation(correct_symbolic, search_obj.pool, all_vars = all_vars) + for var in all_vars: + correct_eq.vals[var].main_var_to_explain = var + correct_eq.vals[var].metaparameters = metaparams + print(correct_eq.text_form) + + incorrect_eq = translate_equation(eq_incorrect_symbolic, search_obj.pool, all_vars = all_vars) # , all_vars = ['u', 'v']) + for var in all_vars: + incorrect_eq.vals[var].main_var_to_explain = var + incorrect_eq.vals[var].metaparameters = metaparams + print(incorrect_eq.text_form) + + fit_operator.apply(correct_eq, {}) + fit_operator.apply(incorrect_eq, {}) + + print([correct_eq.vals[var].fitness_value < incorrect_eq.vals[var].fitness_value for var in all_vars]) + return all([correct_eq.vals[var].fitness_value < incorrect_eq.vals[var].fitness_value for var in all_vars]) + + +def prepare_suboperators(fitness_operator: CompoundOperator) -> CompoundOperator: + sparsity = LASSOSparsity() + coeff_calc = LinRegBasedCoeffsEquation() + + sparsity = map_operator_between_levels(sparsity, 'gene level', 'chromosome level') + coeff_calc = map_operator_between_levels(coeff_calc, 'gene level', 'chromosome level') + + fitness_operator.set_suboperators({'sparsity' : sparsity, + 'coeff_calc' : coeff_calc}) + return fitness_operator + + +def noise_data(data, noise_level): + # add noise level to the input data + return noise_level * 0.01 * np.std(data) * np.random.normal(size=data.shape) + data + + +def ODE_test(operator: CompoundOperator, foldername: str, noise_level: int = 0): + # Test scenario to evaluate performance on simple 2nd order ODE + # x'' + sin(2t) x' + 4 x = 1.5 t, written as $g_{1} x'' + g_{2} x' + g_{3} x = g_{4} + # g1 = lambda x: 1. + # g2 = lambda x: np.sin(2*x) + # g3 = lambda x: 4. + # g4 = lambda x: 1.5*x + + eq_ode_symbolic = '-1.0 * d^2u/dx0^2{power: 1.0} + 1.5 * x_0{power: 1.0, dim: 0.0} + -4.0 * u{power: 1.0} + -0.0 \ + = du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0, dim: 0.0}' + eq_ode_incorrect = '1.0 * du/dx0{power: 1.0} + 3.5 * x_0{power: 1.0, dim: 0.0} * u{power: 1.0} + -1.2 \ + = du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0, dim: 0.0}' + + step = 0.05; steps_num = 320 + t = np.arange(start = 0., stop = step * steps_num, step = step) + data = np.load(os.path.join(foldername, 'ode_data.npy')) + noised_data = noise_data(data, noise_level) + data_nn = load_pretrained_PINN(os.path.join(foldername, 'ode_ann_pretrained.pickle')) + + dimensionality = 0 + + from epde import TrigonometricTokens, GridTokens + trig_tokens = TrigonometricTokens(freq = (2 - 1e-8, 2 + 1e-8), + dimensionality = dimensionality) + grid_tokens = GridTokens(['x_0',], dimensionality = dimensionality, max_power = 2) + + epde_search_obj = EpdeSearch(use_solver = True, dimensionality = dimensionality, boundary = 10, + coordinate_tensors = (t,), verbose_params = {'show_iter_idx' : True}, + device = 'cpu') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + epde_search_obj.create_pool(data=noised_data, variable_names=['u',], max_deriv_order=(2,), + additional_tokens = [grid_tokens, trig_tokens], data_nn = data_nn) + + assert compare_equations(eq_ode_symbolic, eq_ode_incorrect, epde_search_obj) + + +def VdP_test(operator: CompoundOperator, foldername: str, noise_level: int = 0): + # u'' + E (u^2 - 1)u' + u = 0, where $\mathcal{E}$ is a positive constant (in the example we will use $\mathcal{E} = 0.2$) + # Test scenario to evaluate performance on Van-der-Pol oscillator + eq_vdp_symbolic = '-0.2 * u{power: 2.0} * du/dx0{power: 1.0} + 0.2 * du/dx0{power: 1.0} + -1.0 * u{power: 1.0} + -0.0 \ + = d^2u/dx0^2{power: 1.0}' + eq_vdp_incorrect = '-1.0 * d^2u/dx0^2{power: 1.0} + 1.5 * x_0{power: 1.0, dim: 0.0} + -4.0 * u{power: 1.0} + -0.0 \ + = du/dx0{power: 1.0} * sin{power: 1.0, freq: 2.0, dim: 0.0}' + + # grid, data = load_data(os.path.join(foldername, 'data.npy')) + + step = 0.05; steps_num = 320 + t = np.arange(start = 0., stop = step * steps_num, step = step) + data = np.load(os.path.join(foldername, 'vdp_data.npy')) + noised_data = noise_data(data, noise_level) + data_nn = load_pretrained_PINN(os.path.join(foldername, 'vdp_ann_pretrained.pickle')) + + dimensionality = 0 + + from epde import TrigonometricTokens, GridTokens + trig_tokens = TrigonometricTokens(freq = (2 - 1e-8, 2 + 1e-8), + dimensionality = dimensionality) + grid_tokens = GridTokens(['x_0',], dimensionality = dimensionality, max_power = 2) + + epde_search_obj = EpdeSearch(use_solver = True, dimensionality = dimensionality, boundary = 10, + coordinate_tensors = (t,), verbose_params = {'show_iter_idx' : True}, + device = 'cpu') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + epde_search_obj.create_pool(data=noised_data, variable_names=['u',], max_deriv_order=(2,), + additional_tokens = [grid_tokens, trig_tokens], data_nn = data_nn) + + assert compare_equations(eq_vdp_symbolic, eq_vdp_incorrect, epde_search_obj) + +def ac_data(filename: str): + t = np.linspace(0., 1., 51) + x = np.linspace(-1., 0.984375, 128) + data = np.load(filename) + # t = np.linspace(0, 1, shape+1); x = np.linspace(0, 1, shape+1) + grids = np.meshgrid(t, x, indexing = 'ij') # np.stack(, axis = 2) , axis = 2) + return grids, data + + +def AC_test(operator: CompoundOperator, foldername: str, noise_level: int = 0): + # Test scenario to evaluate performance on Allen-Cahn equation + eq_ac_symbolic = '0.0001 * d^2u/dx1^2{power: 1.0} + -5.0 * u{power: 3.0} + 5.0 * u{power: 1.0} + 0.0 = du/dx0{power: 1.0}' + eq_ac_incorrect = '-1.0 * d^2u/dx0^2{power: 1.0} + 1.5 * u{power: 1.0} + -0.0 = du/dx0{power: 1.0}' + + grid, data = ac_data(os.path.join(foldername, 'ac_data.npy')) + noised_data = noise_data(data, noise_level) + data_nn = load_pretrained_PINN(os.path.join(foldername, 'ac_ann_pretrained.pickle')) + + print('Shapes:', data.shape, grid[0].shape) + dimensionality = 1 + + epde_search_obj = EpdeSearch(use_solver = True, dimensionality = dimensionality, boundary = 10, + coordinate_tensors = ((grid[0], grid[1])), verbose_params = {'show_iter_idx' : True}, + device = 'cpu') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + epde_search_obj.create_pool(data=noised_data, variable_names=['u',], max_deriv_order=(2, 2), + additional_tokens = [], data_nn = data_nn) + + assert compare_equations(eq_ac_symbolic, eq_ac_incorrect, epde_search_obj) + + +def wave_data(filename): + shape = 80 + + # print(os.path.dirname( __file__ )) + data = np.loadtxt(filename, delimiter=',').T + t = np.linspace(0, 1, shape + 1); + x = np.linspace(0, 1, shape + 1) + grids = np.stack(np.meshgrid(t, x, indexing='ij'), axis=2) + return grids, data + +def wave_test(operator: CompoundOperator, foldername: str, noise_level: int = 0): + # eq_wave_symbolic = '1. * d^2u/dx1^2{power: 1} + 0. = d^2u/dx0^2{power: 1}' + eq_wave_symbolic = '0.04 * d^2u/dx1^2{power: 1} + 0. = d^2u/dx0^2{power: 1}' + eq_wave_incorrect = '1. * d^2u/dx1^2{power: 1} * du/dx1{power: 1} + 2.3 * d^2u/dx0^2{power: 1} + 0. = du/dx0{power: 1}' + + grid, data = wave_data(os.path.join(foldername, 'wave_sln_80.csv')) + noised_data = noise_data(data, noise_level) + data_nn = load_pretrained_PINN(os.path.join(foldername, 'ann_pretrained.pickle')) + + dimensionality = data.ndim - 1 + + epde_search_obj = EpdeSearch(use_solver=True, dimensionality=dimensionality, boundary=10, + coordinate_tensors=(grid[..., 0], grid[..., 1]), + verbose_params={'show_iter_idx': True}, + device='cpu') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + epde_search_obj.create_pool(data=noised_data, variable_names=['u', ], max_deriv_order=(2, 2), + additional_tokens=[], data_nn=data_nn) + + assert compare_equations(eq_wave_symbolic, eq_wave_incorrect, epde_search_obj) + + +def kdv_data(filename, shape = 80): + shape = 80 + + print(os.path.dirname( __file__ )) + data = np.loadtxt(filename, delimiter = ',').T + + t = np.linspace(0, 1, shape+1); x = np.linspace(0, 1, shape+1) + grids = np.meshgrid(t, x, indexing = 'ij') # np.stack(, axis = 2) + return grids, data + + +def KdV_test(operator: CompoundOperator, foldername: str, noise_level: int = 0): + # Test scenario to evaluate performance on Korteweg-de Vries equation + eq_kdv_symbolic = '-6.0 * du/dx1{power: 1.0} * u{power: 1.0} + -1.0 * d^3u/dx1^3{power: 1.0} + \ + 1.0 * sin{power: 1, freq: 1.0, dim: 1} * cos{power: 1, freq: 1.0, dim: 1} + \ + 0.0 = du/dx0{power: 1.0}' + + eq_kdv_incorrect = '0.04 * d^2u/dx1^2{power: 1} + 0. = d^2u/dx0^2{power: 1}' + + grid, data = kdv_data(os.path.join(foldername, 'data.csv')) + noised_data = noise_data(data, noise_level) + data_nn = load_pretrained_PINN(os.path.join(foldername, 'kdv_ann_pretrained.pickle')) + + print('Shapes:', data.shape, grid[0].shape) + dimensionality = 1 + + from epde import TrigonometricTokens + trig_tokens = TrigonometricTokens(freq=(1 - 1e-8, 1 + 1e-8), + dimensionality=dimensionality) + + epde_search_obj = EpdeSearch(use_solver = True, dimensionality = dimensionality, boundary = 10, + coordinate_tensors = (grid[0], grid[1]), verbose_params = {'show_iter_idx' : True}, + device = 'cpu') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + epde_search_obj.create_pool(data=noised_data, variable_names=['u',], max_deriv_order=(2, 3), + additional_tokens = [trig_tokens,], data_nn = data_nn) + + assert compare_equations(eq_kdv_symbolic, eq_kdv_incorrect, epde_search_obj) + +# TODO: implement tests on noised data, corrupted by additive Gaussian noise & preform full-scale equation search experiments. +def epde_discovery(foldername): + step = 0.05 + steps_num = 320 + t = np.arange(start=0., stop=step * steps_num, step=step) + data = np.load(os.path.join(foldername, 'ode_data.npy')) + # noised_data = noise_data(data, noise_level) + data_nn = load_pretrained_PINN(os.path.join(foldername, 'ode_ann_pretrained.pickle')) + + dimensionality = 0 + + from epde import TrigonometricTokens, GridTokens + trig_tokens = TrigonometricTokens(freq=(2 - 1e-8, 2 + 1e-8), + dimensionality=dimensionality) + grid_tokens = GridTokens(['x_0', ], dimensionality=dimensionality, max_power=2) + + epde_search_obj = EpdeSearch(use_solver=True, dimensionality=dimensionality, boundary=10, + coordinate_tensors=(t,), verbose_params={'show_iter_idx': True}, + device='cuda') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + popsize = 8 + epde_search_obj.set_moeadd_params(population_size=popsize, training_epochs=55) + + factors_max_number = {'factors_num': [1, 2], 'probas': [0.65, 0.35]} + + epde_search_obj.fit(data=[data, ], variable_names=['u', ], max_deriv_order=(2,), + equation_terms_max_number=5, data_fun_pow=1, + additional_tokens=[trig_tokens, grid_tokens], + equation_factors_max_number=factors_max_number, + eq_sparsity_interval=(1e-12, 1e-4)) + + epde_search_obj.equations(only_print=True, num=1) + + # syss = epde_search_obj.equation_search_results(only_print = False, num = 1) + ''' + Having insight about the initial ODE structure, we are extracting the equation with complexity of 5 + + In other cases, you should call sys.equation_search_results(only_print = True), + where the algorithm presents Pareto frontier of optimal equations. + ''' + sys = epde_search_obj.get_equations_by_complexity(5)[0] + return epde_search_obj, sys + +if __name__ == "__main__": + # Operator = fitness.SolverBasedFitness # Replace by the developed PIC-based operator. + Operator = fitness.PIC + operator_params = {"penalty_coeff" : 0.2, "pinn_loss_mult" : 1e4} + fit_operator = prepare_suboperators(Operator(list(operator_params.keys()))) + fit_operator.params = operator_params + + directory = os.path.dirname(os.path.realpath(__file__)) + ode_folder_name = os.path.join(directory, 'data\ode') + ODE_test(fit_operator, ode_folder_name, 0) + # epde_discovery(ode_folder_name) + + vdp_folder_name = os.path.join(directory, 'data\vdp') + # VdP_test(fit_operator, vdp_folder_name, 75) + + ac_folder_name = os.path.join(directory, 'data\ac') + # AC_test(fit_operator, ac_folder_name, 25) + + wave_folder_name = os.path.join(directory, 'data\wave') + # wave_test(fit_operator, wave_folder_name, 200) + + ode_folder_name = os.path.join(directory, 'data\kdv') + # KdV_test(fit_operator, kdv_folder_name, 25) \ No newline at end of file diff --git a/projects/pic/pic_wave_equation_test.py b/projects/pic/pic_wave_equation_test.py new file mode 100644 index 0000000..31bae80 --- /dev/null +++ b/projects/pic/pic_wave_equation_test.py @@ -0,0 +1,114 @@ +import sys +import os + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../..'))) + +import pickle +from typing import Tuple +import numpy as np + +from epde.interface.prepared_tokens import CustomTokens, PhasedSine1DTokens, ConstantToken +from epde.interface.equation_translator import translate_equation +from epde.interface.interface import EpdeSearch + +from epde.operators.common.coeff_calculation import LinRegBasedCoeffsEquation +from epde.operators.common.sparsity import LASSOSparsity + +from epde.operators.utils.operator_mappers import map_operator_between_levels +import epde.operators.common.fitness as fitness +from epde.operators.utils.template import CompoundOperator + +# def load_data(data_filename: str, grid_filename : str = None) -> Tuple[np.ndarray]: +# if '.npy' in data_filename: +# data = np.load(data_filename) +# elif ('.csv' in data_filename) or ('.txt' in data_filename): +# data = np.loadtxt(data_filename) + +# if grid_filename is None: +# if '.npy' in grid_filename: +# grid = np.load(grid_filename) +# elif ('.csv' in grid_filename) or ('.txt' in grid_filename): +# grid = np.loadtxt(grid_filename) +# else: + + +# return (data, grid) + +def load_data(filename): + shape = 80 + + # print(os.path.dirname( __file__ )) + data = np.loadtxt(filename, delimiter = ',').T + t = np.linspace(0, 1, shape+1); x = np.linspace(0, 1, shape+1) + grids = np.stack(np.meshgrid(t, x, indexing = 'ij'), axis = 2) + return grids, data + + +def load_pretrained_PINN(ann_filename): + try: + with open(ann_filename, 'rb') as data_input_file: + data_nn = pickle.load(data_input_file) + except FileNotFoundError: + print('No model located, proceeding with ann approx. retraining.') + data_nn = None + return data_nn + +def prepare_suboperators(fitness_operator: CompoundOperator) -> CompoundOperator: + sparsity = LASSOSparsity() + coeff_calc = LinRegBasedCoeffsEquation() + + sparsity = map_operator_between_levels(sparsity, 'gene level', 'chromosome level') + coeff_calc = map_operator_between_levels(coeff_calc, 'gene level', 'chromosome level') + + fitness_operator.set_suboperators({'sparsity' : sparsity, + 'coeff_calc' : coeff_calc}) + return fitness_operator + + +if __name__ == "__main__": + # Operator = fitness.SolverBasedFitness # Replace by the developed PIC-based operator. + Operator = fitness.PIC # Replace by the developed PIC-based operator. + operator_params = {"penalty_coeff" : 0.2, "pinn_loss_mult" : 1e4} + fit_operator = prepare_suboperators(Operator(list(operator_params.keys()))) + fit_operator.params = operator_params + + directory = os.path.dirname(os.path.realpath(__file__)) + pinn_file_name = os.path.join(directory, 'data/wave/ann_pretrained.pickle') # If neccessary, replace by other filename + + shape = 80 + data_file_name = os.path.join(os.path.dirname( __file__ ), f'wave_sln_{shape}.csv') + + grid, data = load_data(data_file_name) + data_nn = load_pretrained_PINN(pinn_file_name) + + dimensionality = data.ndim - 1 + + epde_search_obj = EpdeSearch(use_solver = True, dimensionality = dimensionality, boundary = 10, + coordinate_tensors = (grid[..., 0], grid[..., 1]), verbose_params = {'show_iter_idx' : True}, + device = 'cpu') + + epde_search_obj.set_preprocessor(default_preprocessor_type='FD', + preprocessor_kwargs={}) + + epde_search_obj.create_pool(data=data, variable_names=['u',], max_deriv_order=(2, 2), + additional_tokens = [], data_nn = data_nn) + + # eq_wave_symbolic = '1. * d^2u/dx1^2{power: 1} + 0. = d^2u/dx0^2{power: 1}' + eq_wave_symbolic = '0.04 * d^2u/dx1^2{power: 1} + 0. = d^2u/dx0^2{power: 1}' + wave_eq = translate_equation(eq_wave_symbolic, epde_search_obj.pool, all_vars = ['u',]) + wave_eq.vals['u'].main_var_to_explain = 'u' + wave_eq.vals['u'].metaparameters = {('sparsity', 'u'): {'optimizable': False, 'value': 0.5}} + print(wave_eq.text_form) + + eq_incorrect_symbolic = '1. * d^2u/dx1^2{power: 1} * du/dx1{power: 1} + 2.3 * d^2u/dx0^2{power: 1} + 0. = du/dx0{power: 1}' + incorrect_eq = translate_equation(eq_incorrect_symbolic, epde_search_obj.pool, all_vars = ['u',]) # , all_vars = ['u', 'v']) + incorrect_eq.vals['u'].main_var_to_explain = 'u' + incorrect_eq.vals['u'].metaparameters = {('sparsity', 'u'): {'optimizable': False, 'value': 0.5}} + print(incorrect_eq.text_form) + + fit_operator.apply(wave_eq, {}) + fit_operator.apply(incorrect_eq, {}) + + print(wave_eq.vals['u'].fitness_value) + print(incorrect_eq.vals['u'].fitness_value) + assert wave_eq.vals['u'].fitness_value < incorrect_eq.vals['u'].fitness_value \ No newline at end of file